UNPKG

806 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.6.1 @license Apache-2.0 */
2'use strict';
3
4Object.defineProperty(exports, '__esModule', { value: true });
5
6var _assertThisInitialized = require('@babel/runtime/helpers/assertThisInitialized');
7var _inheritsLoose = require('@babel/runtime/helpers/inheritsLoose');
8var document = require('global/document');
9var window = require('global/window');
10var _resolveUrl = require('@videojs/vhs-utils/cjs/resolve-url.js');
11var videojs = require('video.js');
12var m3u8Parser = require('m3u8-parser');
13var mediaTypes_js = require('@videojs/vhs-utils/cjs/media-types.js');
14var mpdParser = require('mpd-parser');
15var parseSidx = require('mux.js/lib/tools/parse-sidx');
16var id3Helpers = require('@videojs/vhs-utils/cjs/id3-helpers');
17var containers = require('@videojs/vhs-utils/cjs/containers');
18var byteHelpers = require('@videojs/vhs-utils/cjs/byte-helpers');
19var tsInspector = require('mux.js/lib/tools/ts-inspector.js');
20var clock = require('mux.js/lib/utils/clock');
21var mp4probe = require('mux.js/lib/mp4/probe');
22var codecs_js = require('@videojs/vhs-utils/cjs/codecs.js');
23
24function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
25
26var _assertThisInitialized__default = /*#__PURE__*/_interopDefaultLegacy(_assertThisInitialized);
27var _inheritsLoose__default = /*#__PURE__*/_interopDefaultLegacy(_inheritsLoose);
28var document__default = /*#__PURE__*/_interopDefaultLegacy(document);
29var window__default = /*#__PURE__*/_interopDefaultLegacy(window);
30var _resolveUrl__default = /*#__PURE__*/_interopDefaultLegacy(_resolveUrl);
31var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
32var parseSidx__default = /*#__PURE__*/_interopDefaultLegacy(parseSidx);
33var tsInspector__default = /*#__PURE__*/_interopDefaultLegacy(tsInspector);
34var mp4probe__default = /*#__PURE__*/_interopDefaultLegacy(mp4probe);
35
36/**
37 * @file resolve-url.js - Handling how URLs are resolved and manipulated
38 */
39var resolveUrl = _resolveUrl__default['default'];
40/**
41 * Checks whether xhr request was redirected and returns correct url depending
42 * on `handleManifestRedirects` option
43 *
44 * @api private
45 *
46 * @param {string} url - an url being requested
47 * @param {XMLHttpRequest} req - xhr request result
48 *
49 * @return {string}
50 */
51
52var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
53 // To understand how the responseURL below is set and generated:
54 // - https://fetch.spec.whatwg.org/#concept-response-url
55 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
56 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
57 return req.responseURL;
58 }
59
60 return url;
61};
62
63var logger = function logger(source) {
64 if (videojs__default['default'].log.debug) {
65 return videojs__default['default'].log.debug.bind(videojs__default['default'], 'VHS:', source + " >");
66 }
67
68 return function () {};
69};
70
71var log = videojs__default['default'].log;
72var createPlaylistID = function createPlaylistID(index, uri) {
73 return index + "-" + uri;
74};
75/**
76 * Parses a given m3u8 playlist
77 *
78 * @param {string} manifestString
79 * The downloaded manifest string
80 * @param {Object[]} [customTagParsers]
81 * An array of custom tag parsers for the m3u8-parser instance
82 * @param {Object[]} [customTagMappers]
83 * An array of custom tag mappers for the m3u8-parser instance
84 * @return {Object}
85 * The manifest object
86 */
87
88var parseManifest = function parseManifest(_ref) {
89 var onwarn = _ref.onwarn,
90 oninfo = _ref.oninfo,
91 manifestString = _ref.manifestString,
92 _ref$customTagParsers = _ref.customTagParsers,
93 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
94 _ref$customTagMappers = _ref.customTagMappers,
95 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers;
96 var parser = new m3u8Parser.Parser();
97
98 if (onwarn) {
99 parser.on('warn', onwarn);
100 }
101
102 if (oninfo) {
103 parser.on('info', oninfo);
104 }
105
106 customTagParsers.forEach(function (customParser) {
107 return parser.addParser(customParser);
108 });
109 customTagMappers.forEach(function (mapper) {
110 return parser.addTagMapper(mapper);
111 });
112 parser.push(manifestString);
113 parser.end();
114 return parser.manifest;
115};
116/**
117 * Loops through all supported media groups in master and calls the provided
118 * callback for each group
119 *
120 * @param {Object} master
121 * The parsed master manifest object
122 * @param {Function} callback
123 * Callback to call for each media group
124 */
125
126var forEachMediaGroup = function forEachMediaGroup(master, callback) {
127 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
128 for (var groupKey in master.mediaGroups[mediaType]) {
129 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
130 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
131 callback(mediaProperties, mediaType, groupKey, labelKey);
132 }
133 }
134 });
135};
136/**
137 * Adds properties and attributes to the playlist to keep consistent functionality for
138 * playlists throughout VHS.
139 *
140 * @param {Object} config
141 * Arguments object
142 * @param {Object} config.playlist
143 * The media playlist
144 * @param {string} [config.uri]
145 * The uri to the media playlist (if media playlist is not from within a master
146 * playlist)
147 * @param {string} id
148 * ID to use for the playlist
149 */
150
151var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
152 var playlist = _ref2.playlist,
153 uri = _ref2.uri,
154 id = _ref2.id;
155 playlist.id = id;
156
157 if (uri) {
158 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
159 // playlists do not contain their own source URI, but one is needed for consistency in
160 // VHS.
161 playlist.uri = uri;
162 } // For HLS master playlists, even though certain attributes MUST be defined, the
163 // stream may still be played without them.
164 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
165 // manifest.
166 //
167 // To avoid undefined reference errors through the project, and make the code easier
168 // to write/read, add an empty attributes object for these cases.
169
170
171 playlist.attributes = playlist.attributes || {};
172};
173/**
174 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
175 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
176 * playlist references to the playlists array.
177 *
178 * @param {Object} master
179 * The master playlist
180 */
181
182var setupMediaPlaylists = function setupMediaPlaylists(master) {
183 var i = master.playlists.length;
184
185 while (i--) {
186 var playlist = master.playlists[i];
187 setupMediaPlaylist({
188 playlist: playlist,
189 id: createPlaylistID(i, playlist.uri)
190 });
191 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
192 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
193
194 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
195 // the stream can be played without it. Although an attributes property may have been
196 // added to the playlist to prevent undefined references, issue a warning to fix the
197 // manifest.
198
199 if (!playlist.attributes.BANDWIDTH) {
200 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
201 }
202 }
203};
204/**
205 * Adds resolvedUri properties to each media group.
206 *
207 * @param {Object} master
208 * The master playlist
209 */
210
211var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
212 forEachMediaGroup(master, function (properties) {
213 if (properties.uri) {
214 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
215 }
216 });
217};
218/**
219 * Creates a master playlist wrapper to insert a sole media playlist into.
220 *
221 * @param {Object} media
222 * Media playlist
223 * @param {string} uri
224 * The media URI
225 *
226 * @return {Object}
227 * Master playlist
228 */
229
230var masterForMedia = function masterForMedia(media, uri) {
231 var id = createPlaylistID(0, uri);
232 var master = {
233 mediaGroups: {
234 'AUDIO': {},
235 'VIDEO': {},
236 'CLOSED-CAPTIONS': {},
237 'SUBTITLES': {}
238 },
239 uri: window__default['default'].location.href,
240 resolvedUri: window__default['default'].location.href,
241 playlists: [{
242 uri: uri,
243 id: id,
244 resolvedUri: uri,
245 // m3u8-parser does not attach an attributes property to media playlists so make
246 // sure that the property is attached to avoid undefined reference errors
247 attributes: {}
248 }]
249 }; // set up ID reference
250
251 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
252
253 master.playlists[uri] = master.playlists[0];
254 return master;
255};
256/**
257 * Does an in-place update of the master manifest to add updated playlist URI references
258 * as well as other properties needed by VHS that aren't included by the parser.
259 *
260 * @param {Object} master
261 * Master manifest object
262 * @param {string} uri
263 * The source URI
264 */
265
266var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
267 master.uri = uri;
268
269 for (var i = 0; i < master.playlists.length; i++) {
270 if (!master.playlists[i].uri) {
271 // Set up phony URIs for the playlists since playlists are referenced by their URIs
272 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
273 // TODO: consider adding dummy URIs in mpd-parser
274 var phonyUri = "placeholder-uri-" + i;
275 master.playlists[i].uri = phonyUri;
276 }
277 }
278
279 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
280 if (!properties.playlists || !properties.playlists.length || properties.playlists[0].uri) {
281 return;
282 } // Set up phony URIs for the media group playlists since playlists are referenced by
283 // their URIs throughout VHS, but some formats (e.g., DASH) don't have external URIs
284
285
286 var phonyUri = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey;
287 var id = createPlaylistID(0, phonyUri);
288 properties.playlists[0].uri = phonyUri;
289 properties.playlists[0].id = id; // setup ID and URI references (URI for backwards compatibility)
290
291 master.playlists[id] = properties.playlists[0];
292 master.playlists[phonyUri] = properties.playlists[0];
293 });
294 setupMediaPlaylists(master);
295 resolveMediaGroupUris(master);
296};
297
298var mergeOptions = videojs__default['default'].mergeOptions,
299 EventTarget = videojs__default['default'].EventTarget;
300/**
301 * Returns a new array of segments that is the result of merging
302 * properties from an older list of segments onto an updated
303 * list. No properties on the updated playlist will be overridden.
304 *
305 * @param {Array} original the outdated list of segments
306 * @param {Array} update the updated list of segments
307 * @param {number=} offset the index of the first update
308 * segment in the original segment list. For non-live playlists,
309 * this should always be zero and does not need to be
310 * specified. For live playlists, it should be the difference
311 * between the media sequence numbers in the original and updated
312 * playlists.
313 * @return a list of merged segment objects
314 */
315
316var updateSegments = function updateSegments(original, update, offset) {
317 var result = update.slice();
318 offset = offset || 0;
319 var length = Math.min(original.length, update.length + offset);
320
321 for (var i = offset; i < length; i++) {
322 result[i - offset] = mergeOptions(original[i], result[i - offset]);
323 }
324
325 return result;
326};
327var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
328 if (!segment.resolvedUri) {
329 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
330 }
331
332 if (segment.key && !segment.key.resolvedUri) {
333 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
334 }
335
336 if (segment.map && !segment.map.resolvedUri) {
337 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
338 }
339}; // consider the playlist unchanged if the playlist object is the same or
340// the number of segments is equal, the media sequence number is unchanged,
341// and this playlist hasn't become the end of the playlist
342
343var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
344 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence;
345};
346/**
347 * Returns a new master playlist that is the result of merging an
348 * updated media playlist into the original version. If the
349 * updated media playlist does not match any of the playlist
350 * entries in the original master playlist, null is returned.
351 *
352 * @param {Object} master a parsed master M3U8 object
353 * @param {Object} media a parsed media M3U8 object
354 * @return {Object} a new object that represents the original
355 * master playlist with the updated media playlist merged in, or
356 * null if the merge produced no change.
357 */
358
359var updateMaster = function updateMaster(master, media, unchangedCheck) {
360 if (unchangedCheck === void 0) {
361 unchangedCheck = isPlaylistUnchanged;
362 }
363
364 var result = mergeOptions(master, {});
365 var playlist = result.playlists[media.id];
366
367 if (!playlist) {
368 return null;
369 }
370
371 if (unchangedCheck(playlist, media)) {
372 return null;
373 }
374
375 var mergedPlaylist = mergeOptions(playlist, media); // if the update could overlap existing segment information, merge the two segment lists
376
377 if (playlist.segments) {
378 mergedPlaylist.segments = updateSegments(playlist.segments, media.segments, media.mediaSequence - playlist.mediaSequence);
379 } // resolve any segment URIs to prevent us from having to do it later
380
381
382 mergedPlaylist.segments.forEach(function (segment) {
383 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
384 }); // TODO Right now in the playlists array there are two references to each playlist, one
385 // that is referenced by index, and one by URI. The index reference may no longer be
386 // necessary.
387
388 for (var i = 0; i < result.playlists.length; i++) {
389 if (result.playlists[i].id === media.id) {
390 result.playlists[i] = mergedPlaylist;
391 }
392 }
393
394 result.playlists[media.id] = mergedPlaylist; // URI reference added for backwards compatibility
395
396 result.playlists[media.uri] = mergedPlaylist;
397 return result;
398};
399/**
400 * Calculates the time to wait before refreshing a live playlist
401 *
402 * @param {Object} media
403 * The current media
404 * @param {boolean} update
405 * True if there were any updates from the last refresh, false otherwise
406 * @return {number}
407 * The time in ms to wait before refreshing the live playlist
408 */
409
410var refreshDelay = function refreshDelay(media, update) {
411 var lastSegment = media.segments[media.segments.length - 1];
412 var delay;
413
414 if (update && lastSegment && lastSegment.duration) {
415 delay = lastSegment.duration * 1000;
416 } else {
417 // if the playlist is unchanged since the last reload or last segment duration
418 // cannot be determined, try again after half the target duration
419 delay = (media.targetDuration || 10) * 500;
420 }
421
422 return delay;
423};
424/**
425 * Load a playlist from a remote location
426 *
427 * @class PlaylistLoader
428 * @extends Stream
429 * @param {string|Object} src url or object of manifest
430 * @param {boolean} withCredentials the withCredentials xhr option
431 * @class
432 */
433
434var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
435 _inheritsLoose__default['default'](PlaylistLoader, _EventTarget);
436
437 function PlaylistLoader(src, vhs, options) {
438 var _this;
439
440 if (options === void 0) {
441 options = {};
442 }
443
444 _this = _EventTarget.call(this) || this;
445
446 if (!src) {
447 throw new Error('A non-empty playlist URL or object is required');
448 }
449
450 _this.logger_ = logger('PlaylistLoader');
451 var _options = options,
452 _options$withCredenti = _options.withCredentials,
453 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
454 _options$handleManife = _options.handleManifestRedirects,
455 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
456 _this.src = src;
457 _this.vhs_ = vhs;
458 _this.withCredentials = withCredentials;
459 _this.handleManifestRedirects = handleManifestRedirects;
460 var vhsOptions = vhs.options_;
461 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
462 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || []; // initialize the loader state
463
464 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
465
466 _this.on('mediaupdatetimeout', function () {
467 if (_this.state !== 'HAVE_METADATA') {
468 // only refresh the media playlist if no other activity is going on
469 return;
470 }
471
472 _this.state = 'HAVE_CURRENT_METADATA';
473 _this.request = _this.vhs_.xhr({
474 uri: resolveUrl(_this.master.uri, _this.media().uri),
475 withCredentials: _this.withCredentials
476 }, function (error, req) {
477 // disposed
478 if (!_this.request) {
479 return;
480 }
481
482 if (error) {
483 return _this.playlistRequestError(_this.request, _this.media(), 'HAVE_METADATA');
484 }
485
486 _this.haveMetadata({
487 playlistString: _this.request.responseText,
488 url: _this.media().uri,
489 id: _this.media().id
490 });
491 });
492 });
493
494 return _this;
495 }
496
497 var _proto = PlaylistLoader.prototype;
498
499 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
500 var uri = playlist.uri,
501 id = playlist.id; // any in-flight request is now finished
502
503 this.request = null;
504
505 if (startingState) {
506 this.state = startingState;
507 }
508
509 this.error = {
510 playlist: this.master.playlists[id],
511 status: xhr.status,
512 message: "HLS playlist request error at URL: " + uri + ".",
513 responseText: xhr.responseText,
514 code: xhr.status >= 500 ? 4 : 2
515 };
516 this.trigger('error');
517 }
518 /**
519 * Update the playlist loader's state in response to a new or updated playlist.
520 *
521 * @param {string} [playlistString]
522 * Playlist string (if playlistObject is not provided)
523 * @param {Object} [playlistObject]
524 * Playlist object (if playlistString is not provided)
525 * @param {string} url
526 * URL of playlist
527 * @param {string} id
528 * ID to use for playlist
529 */
530 ;
531
532 _proto.haveMetadata = function haveMetadata(_ref) {
533 var _this2 = this;
534
535 var playlistString = _ref.playlistString,
536 playlistObject = _ref.playlistObject,
537 url = _ref.url,
538 id = _ref.id;
539 // any in-flight request is now finished
540 this.request = null;
541 this.state = 'HAVE_METADATA';
542 var playlist = playlistObject || parseManifest({
543 onwarn: function onwarn(_ref2) {
544 var message = _ref2.message;
545 return _this2.logger_("m3u8-parser warn for " + id + ": " + message);
546 },
547 oninfo: function oninfo(_ref3) {
548 var message = _ref3.message;
549 return _this2.logger_("m3u8-parser info for " + id + ": " + message);
550 },
551 manifestString: playlistString,
552 customTagParsers: this.customTagParsers,
553 customTagMappers: this.customTagMappers
554 });
555 playlist.lastRequest = Date.now();
556 setupMediaPlaylist({
557 playlist: playlist,
558 uri: url,
559 id: id
560 }); // merge this playlist into the master
561
562 var update = updateMaster(this.master, playlist);
563 this.targetDuration = playlist.targetDuration;
564
565 if (update) {
566 this.master = update;
567 this.media_ = this.master.playlists[id];
568 } else {
569 this.trigger('playlistunchanged');
570 } // refresh live playlists after a target duration passes
571
572
573 if (!this.media().endList) {
574 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
575 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
576 _this2.trigger('mediaupdatetimeout');
577 }, refreshDelay(this.media(), !!update));
578 }
579
580 this.trigger('loadedplaylist');
581 }
582 /**
583 * Abort any outstanding work and clean up.
584 */
585 ;
586
587 _proto.dispose = function dispose() {
588 this.trigger('dispose');
589 this.stopRequest();
590 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
591 window__default['default'].clearTimeout(this.finalRenditionTimeout);
592 this.off();
593 };
594
595 _proto.stopRequest = function stopRequest() {
596 if (this.request) {
597 var oldRequest = this.request;
598 this.request = null;
599 oldRequest.onreadystatechange = null;
600 oldRequest.abort();
601 }
602 }
603 /**
604 * When called without any arguments, returns the currently
605 * active media playlist. When called with a single argument,
606 * triggers the playlist loader to asynchronously switch to the
607 * specified media playlist. Calling this method while the
608 * loader is in the HAVE_NOTHING causes an error to be emitted
609 * but otherwise has no effect.
610 *
611 * @param {Object=} playlist the parsed media playlist
612 * object to switch to
613 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
614 *
615 * @return {Playlist} the current loaded media
616 */
617 ;
618
619 _proto.media = function media(playlist, shouldDelay) {
620 var _this3 = this;
621
622 // getter
623 if (!playlist) {
624 return this.media_;
625 } // setter
626
627
628 if (this.state === 'HAVE_NOTHING') {
629 throw new Error('Cannot switch media playlist from ' + this.state);
630 } // find the playlist object if the target playlist has been
631 // specified by URI
632
633
634 if (typeof playlist === 'string') {
635 if (!this.master.playlists[playlist]) {
636 throw new Error('Unknown playlist URI: ' + playlist);
637 }
638
639 playlist = this.master.playlists[playlist];
640 }
641
642 window__default['default'].clearTimeout(this.finalRenditionTimeout);
643
644 if (shouldDelay) {
645 var delay = playlist.targetDuration / 2 * 1000 || 5 * 1000;
646 this.finalRenditionTimeout = window__default['default'].setTimeout(this.media.bind(this, playlist, false), delay);
647 return;
648 }
649
650 var startingState = this.state;
651 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to fully loaded playlists immediately
652
653 if (this.master.playlists[playlist.id].endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
654 // media playlist or, for the case of demuxed audio, a resolved audio media group)
655 playlist.endList && playlist.segments.length) {
656 // abort outstanding playlist requests
657 if (this.request) {
658 this.request.onreadystatechange = null;
659 this.request.abort();
660 this.request = null;
661 }
662
663 this.state = 'HAVE_METADATA';
664 this.media_ = playlist; // trigger media change if the active media has been updated
665
666 if (mediaChange) {
667 this.trigger('mediachanging');
668
669 if (startingState === 'HAVE_MASTER') {
670 // The initial playlist was a master manifest, and the first media selected was
671 // also provided (in the form of a resolved playlist object) as part of the
672 // source object (rather than just a URL). Therefore, since the media playlist
673 // doesn't need to be requested, loadedmetadata won't trigger as part of the
674 // normal flow, and needs an explicit trigger here.
675 this.trigger('loadedmetadata');
676 } else {
677 this.trigger('mediachange');
678 }
679 }
680
681 return;
682 } // switching to the active playlist is a no-op
683
684
685 if (!mediaChange) {
686 return;
687 }
688
689 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
690
691 if (this.request) {
692 if (playlist.resolvedUri === this.request.url) {
693 // requesting to switch to the same playlist multiple times
694 // has no effect after the first
695 return;
696 }
697
698 this.request.onreadystatechange = null;
699 this.request.abort();
700 this.request = null;
701 } // request the new playlist
702
703
704 if (this.media_) {
705 this.trigger('mediachanging');
706 }
707
708 this.request = this.vhs_.xhr({
709 uri: playlist.resolvedUri,
710 withCredentials: this.withCredentials
711 }, function (error, req) {
712 // disposed
713 if (!_this3.request) {
714 return;
715 }
716
717 playlist.lastRequest = Date.now();
718 playlist.resolvedUri = resolveManifestRedirect(_this3.handleManifestRedirects, playlist.resolvedUri, req);
719
720 if (error) {
721 return _this3.playlistRequestError(_this3.request, playlist, startingState);
722 }
723
724 _this3.haveMetadata({
725 playlistString: req.responseText,
726 url: playlist.uri,
727 id: playlist.id
728 }); // fire loadedmetadata the first time a media playlist is loaded
729
730
731 if (startingState === 'HAVE_MASTER') {
732 _this3.trigger('loadedmetadata');
733 } else {
734 _this3.trigger('mediachange');
735 }
736 });
737 }
738 /**
739 * pause loading of the playlist
740 */
741 ;
742
743 _proto.pause = function pause() {
744 this.stopRequest();
745 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
746
747 if (this.state === 'HAVE_NOTHING') {
748 // If we pause the loader before any data has been retrieved, its as if we never
749 // started, so reset to an unstarted state.
750 this.started = false;
751 } // Need to restore state now that no activity is happening
752
753
754 if (this.state === 'SWITCHING_MEDIA') {
755 // if the loader was in the process of switching media, it should either return to
756 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
757 // playlist yet. This is determined by the existence of loader.media_
758 if (this.media_) {
759 this.state = 'HAVE_METADATA';
760 } else {
761 this.state = 'HAVE_MASTER';
762 }
763 } else if (this.state === 'HAVE_CURRENT_METADATA') {
764 this.state = 'HAVE_METADATA';
765 }
766 }
767 /**
768 * start loading of the playlist
769 */
770 ;
771
772 _proto.load = function load(shouldDelay) {
773 var _this4 = this;
774
775 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
776 var media = this.media();
777
778 if (shouldDelay) {
779 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
780 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
781 return _this4.load();
782 }, delay);
783 return;
784 }
785
786 if (!this.started) {
787 this.start();
788 return;
789 }
790
791 if (media && !media.endList) {
792 this.trigger('mediaupdatetimeout');
793 } else {
794 this.trigger('loadedplaylist');
795 }
796 }
797 /**
798 * start loading of the playlist
799 */
800 ;
801
802 _proto.start = function start() {
803 var _this5 = this;
804
805 this.started = true;
806
807 if (typeof this.src === 'object') {
808 // in the case of an entirely constructed manifest object (meaning there's no actual
809 // manifest on a server), default the uri to the page's href
810 if (!this.src.uri) {
811 this.src.uri = window__default['default'].location.href;
812 } // resolvedUri is added on internally after the initial request. Since there's no
813 // request for pre-resolved manifests, add on resolvedUri here.
814
815
816 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
817 // request can be skipped (since the top level of the manifest, at a minimum, is
818 // already available as a parsed manifest object). However, if the manifest object
819 // represents a master playlist, some media playlists may need to be resolved before
820 // the starting segment list is available. Therefore, go directly to setup of the
821 // initial playlist, and let the normal flow continue from there.
822 //
823 // Note that the call to setup is asynchronous, as other sections of VHS may assume
824 // that the first request is asynchronous.
825
826 setTimeout(function () {
827 _this5.setupInitialPlaylist(_this5.src);
828 }, 0);
829 return;
830 } // request the specified URL
831
832
833 this.request = this.vhs_.xhr({
834 uri: this.src,
835 withCredentials: this.withCredentials
836 }, function (error, req) {
837 // disposed
838 if (!_this5.request) {
839 return;
840 } // clear the loader's request reference
841
842
843 _this5.request = null;
844
845 if (error) {
846 _this5.error = {
847 status: req.status,
848 message: "HLS playlist request error at URL: " + _this5.src + ".",
849 responseText: req.responseText,
850 // MEDIA_ERR_NETWORK
851 code: 2
852 };
853
854 if (_this5.state === 'HAVE_NOTHING') {
855 _this5.started = false;
856 }
857
858 return _this5.trigger('error');
859 }
860
861 _this5.src = resolveManifestRedirect(_this5.handleManifestRedirects, _this5.src, req);
862 var manifest = parseManifest({
863 manifestString: req.responseText,
864 customTagParsers: _this5.customTagParsers,
865 customTagMappers: _this5.customTagMappers
866 });
867
868 _this5.setupInitialPlaylist(manifest);
869 });
870 };
871
872 _proto.srcUri = function srcUri() {
873 return typeof this.src === 'string' ? this.src : this.src.uri;
874 }
875 /**
876 * Given a manifest object that's either a master or media playlist, trigger the proper
877 * events and set the state of the playlist loader.
878 *
879 * If the manifest object represents a master playlist, `loadedplaylist` will be
880 * triggered to allow listeners to select a playlist. If none is selected, the loader
881 * will default to the first one in the playlists array.
882 *
883 * If the manifest object represents a media playlist, `loadedplaylist` will be
884 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
885 *
886 * In the case of a media playlist, a master playlist object wrapper with one playlist
887 * will be created so that all logic can handle playlists in the same fashion (as an
888 * assumed manifest object schema).
889 *
890 * @param {Object} manifest
891 * The parsed manifest object
892 */
893 ;
894
895 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
896 this.state = 'HAVE_MASTER';
897
898 if (manifest.playlists) {
899 this.master = manifest;
900 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
901 // then resolve URIs in advance, as they are usually done after a playlist request,
902 // which may not happen if the playlist is resolved.
903
904 manifest.playlists.forEach(function (playlist) {
905 if (playlist.segments) {
906 playlist.segments.forEach(function (segment) {
907 resolveSegmentUris(segment, playlist.resolvedUri);
908 });
909 }
910 });
911 this.trigger('loadedplaylist');
912
913 if (!this.request) {
914 // no media playlist was specifically selected so start
915 // from the first listed one
916 this.media(this.master.playlists[0]);
917 }
918
919 return;
920 } // In order to support media playlists passed in as vhs-json, the case where the uri
921 // is not provided as part of the manifest should be considered, and an appropriate
922 // default used.
923
924
925 var uri = this.srcUri() || window__default['default'].location.href;
926 this.master = masterForMedia(manifest, uri);
927 this.haveMetadata({
928 playlistObject: manifest,
929 url: uri,
930 id: this.master.playlists[0].id
931 });
932 this.trigger('loadedmetadata');
933 };
934
935 return PlaylistLoader;
936}(EventTarget);
937
938/**
939 * ranges
940 *
941 * Utilities for working with TimeRanges.
942 *
943 */
944
945var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
946// can be misleading because of precision differences or when the current media has poorly
947// aligned audio and video, which can cause values to be slightly off from what you would
948// expect. This value is what we consider to be safe to use in such comparisons to account
949// for these scenarios.
950
951var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
952
953var filterRanges = function filterRanges(timeRanges, predicate) {
954 var results = [];
955 var i;
956
957 if (timeRanges && timeRanges.length) {
958 // Search for ranges that match the predicate
959 for (i = 0; i < timeRanges.length; i++) {
960 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
961 results.push([timeRanges.start(i), timeRanges.end(i)]);
962 }
963 }
964 }
965
966 return videojs__default['default'].createTimeRanges(results);
967};
968/**
969 * Attempts to find the buffered TimeRange that contains the specified
970 * time.
971 *
972 * @param {TimeRanges} buffered - the TimeRanges object to query
973 * @param {number} time - the time to filter on.
974 * @return {TimeRanges} a new TimeRanges object
975 */
976
977
978var findRange = function findRange(buffered, time) {
979 return filterRanges(buffered, function (start, end) {
980 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
981 });
982};
983/**
984 * Returns the TimeRanges that begin later than the specified time.
985 *
986 * @param {TimeRanges} timeRanges - the TimeRanges object to query
987 * @param {number} time - the time to filter on.
988 * @return {TimeRanges} a new TimeRanges object.
989 */
990
991var findNextRange = function findNextRange(timeRanges, time) {
992 return filterRanges(timeRanges, function (start) {
993 return start - TIME_FUDGE_FACTOR >= time;
994 });
995};
996/**
997 * Returns gaps within a list of TimeRanges
998 *
999 * @param {TimeRanges} buffered - the TimeRanges object
1000 * @return {TimeRanges} a TimeRanges object of gaps
1001 */
1002
1003var findGaps = function findGaps(buffered) {
1004 if (buffered.length < 2) {
1005 return videojs__default['default'].createTimeRanges();
1006 }
1007
1008 var ranges = [];
1009
1010 for (var i = 1; i < buffered.length; i++) {
1011 var start = buffered.end(i - 1);
1012 var end = buffered.start(i);
1013 ranges.push([start, end]);
1014 }
1015
1016 return videojs__default['default'].createTimeRanges(ranges);
1017};
1018/**
1019 * Calculate the intersection of two TimeRanges
1020 *
1021 * @param {TimeRanges} bufferA
1022 * @param {TimeRanges} bufferB
1023 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
1024 */
1025
1026var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
1027 var start = null;
1028 var end = null;
1029 var arity = 0;
1030 var extents = [];
1031 var ranges = [];
1032
1033 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
1034 return videojs__default['default'].createTimeRange();
1035 } // Handle the case where we have both buffers and create an
1036 // intersection of the two
1037
1038
1039 var count = bufferA.length; // A) Gather up all start and end times
1040
1041 while (count--) {
1042 extents.push({
1043 time: bufferA.start(count),
1044 type: 'start'
1045 });
1046 extents.push({
1047 time: bufferA.end(count),
1048 type: 'end'
1049 });
1050 }
1051
1052 count = bufferB.length;
1053
1054 while (count--) {
1055 extents.push({
1056 time: bufferB.start(count),
1057 type: 'start'
1058 });
1059 extents.push({
1060 time: bufferB.end(count),
1061 type: 'end'
1062 });
1063 } // B) Sort them by time
1064
1065
1066 extents.sort(function (a, b) {
1067 return a.time - b.time;
1068 }); // C) Go along one by one incrementing arity for start and decrementing
1069 // arity for ends
1070
1071 for (count = 0; count < extents.length; count++) {
1072 if (extents[count].type === 'start') {
1073 arity++; // D) If arity is ever incremented to 2 we are entering an
1074 // overlapping range
1075
1076 if (arity === 2) {
1077 start = extents[count].time;
1078 }
1079 } else if (extents[count].type === 'end') {
1080 arity--; // E) If arity is ever decremented to 1 we leaving an
1081 // overlapping range
1082
1083 if (arity === 1) {
1084 end = extents[count].time;
1085 }
1086 } // F) Record overlapping ranges
1087
1088
1089 if (start !== null && end !== null) {
1090 ranges.push([start, end]);
1091 start = null;
1092 end = null;
1093 }
1094 }
1095
1096 return videojs__default['default'].createTimeRanges(ranges);
1097};
1098/**
1099 * Gets a human readable string for a TimeRange
1100 *
1101 * @param {TimeRange} range
1102 * @return {string} a human readable string
1103 */
1104
1105var printableRange = function printableRange(range) {
1106 var strArr = [];
1107
1108 if (!range || !range.length) {
1109 return '';
1110 }
1111
1112 for (var i = 0; i < range.length; i++) {
1113 strArr.push(range.start(i) + ' => ' + range.end(i));
1114 }
1115
1116 return strArr.join(', ');
1117};
1118/**
1119 * Calculates the amount of time left in seconds until the player hits the end of the
1120 * buffer and causes a rebuffer
1121 *
1122 * @param {TimeRange} buffered
1123 * The state of the buffer
1124 * @param {Numnber} currentTime
1125 * The current time of the player
1126 * @param {number} playbackRate
1127 * The current playback rate of the player. Defaults to 1.
1128 * @return {number}
1129 * Time until the player has to start rebuffering in seconds.
1130 * @function timeUntilRebuffer
1131 */
1132
1133var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
1134 if (playbackRate === void 0) {
1135 playbackRate = 1;
1136 }
1137
1138 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
1139 return (bufferedEnd - currentTime) / playbackRate;
1140};
1141/**
1142 * Converts a TimeRanges object into an array representation
1143 *
1144 * @param {TimeRanges} timeRanges
1145 * @return {Array}
1146 */
1147
1148var timeRangesToArray = function timeRangesToArray(timeRanges) {
1149 var timeRangesList = [];
1150
1151 for (var i = 0; i < timeRanges.length; i++) {
1152 timeRangesList.push({
1153 start: timeRanges.start(i),
1154 end: timeRanges.end(i)
1155 });
1156 }
1157
1158 return timeRangesList;
1159};
1160/**
1161 * Determines if two time range objects are different.
1162 *
1163 * @param {TimeRange} a
1164 * the first time range object to check
1165 *
1166 * @param {TimeRange} b
1167 * the second time range object to check
1168 *
1169 * @return {Boolean}
1170 * Whether the time range objects differ
1171 */
1172
1173var isRangeDifferent = function isRangeDifferent(a, b) {
1174 // same object
1175 if (a === b) {
1176 return false;
1177 } // one or the other is undefined
1178
1179
1180 if (!a && b || !b && a) {
1181 return true;
1182 } // length is different
1183
1184
1185 if (a.length !== b.length) {
1186 return true;
1187 } // see if any start/end pair is different
1188
1189
1190 for (var i = 0; i < a.length; i++) {
1191 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
1192 return true;
1193 }
1194 } // if the length and every pair is the same
1195 // this is the same time range
1196
1197
1198 return false;
1199};
1200
1201/**
1202 * @file playlist.js
1203 *
1204 * Playlist related utilities.
1205 */
1206var createTimeRange = videojs__default['default'].createTimeRange;
1207/**
1208 * walk backward until we find a duration we can use
1209 * or return a failure
1210 *
1211 * @param {Playlist} playlist the playlist to walk through
1212 * @param {Number} endSequence the mediaSequence to stop walking on
1213 */
1214
1215var backwardDuration = function backwardDuration(playlist, endSequence) {
1216 var result = 0;
1217 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
1218 // the interval, use it
1219
1220 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
1221 // information that is earlier than endSequence
1222
1223 if (segment) {
1224 if (typeof segment.start !== 'undefined') {
1225 return {
1226 result: segment.start,
1227 precise: true
1228 };
1229 }
1230
1231 if (typeof segment.end !== 'undefined') {
1232 return {
1233 result: segment.end - segment.duration,
1234 precise: true
1235 };
1236 }
1237 }
1238
1239 while (i--) {
1240 segment = playlist.segments[i];
1241
1242 if (typeof segment.end !== 'undefined') {
1243 return {
1244 result: result + segment.end,
1245 precise: true
1246 };
1247 }
1248
1249 result += segment.duration;
1250
1251 if (typeof segment.start !== 'undefined') {
1252 return {
1253 result: result + segment.start,
1254 precise: true
1255 };
1256 }
1257 }
1258
1259 return {
1260 result: result,
1261 precise: false
1262 };
1263};
1264/**
1265 * walk forward until we find a duration we can use
1266 * or return a failure
1267 *
1268 * @param {Playlist} playlist the playlist to walk through
1269 * @param {number} endSequence the mediaSequence to stop walking on
1270 */
1271
1272
1273var forwardDuration = function forwardDuration(playlist, endSequence) {
1274 var result = 0;
1275 var segment;
1276 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
1277 // information
1278
1279 for (; i < playlist.segments.length; i++) {
1280 segment = playlist.segments[i];
1281
1282 if (typeof segment.start !== 'undefined') {
1283 return {
1284 result: segment.start - result,
1285 precise: true
1286 };
1287 }
1288
1289 result += segment.duration;
1290
1291 if (typeof segment.end !== 'undefined') {
1292 return {
1293 result: segment.end - result,
1294 precise: true
1295 };
1296 }
1297 } // indicate we didn't find a useful duration estimate
1298
1299
1300 return {
1301 result: -1,
1302 precise: false
1303 };
1304};
1305/**
1306 * Calculate the media duration from the segments associated with a
1307 * playlist. The duration of a subinterval of the available segments
1308 * may be calculated by specifying an end index.
1309 *
1310 * @param {Object} playlist a media playlist object
1311 * @param {number=} endSequence an exclusive upper boundary
1312 * for the playlist. Defaults to playlist length.
1313 * @param {number} expired the amount of time that has dropped
1314 * off the front of the playlist in a live scenario
1315 * @return {number} the duration between the first available segment
1316 * and end index.
1317 */
1318
1319
1320var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
1321 if (typeof endSequence === 'undefined') {
1322 endSequence = playlist.mediaSequence + playlist.segments.length;
1323 }
1324
1325 if (endSequence < playlist.mediaSequence) {
1326 return 0;
1327 } // do a backward walk to estimate the duration
1328
1329
1330 var backward = backwardDuration(playlist, endSequence);
1331
1332 if (backward.precise) {
1333 // if we were able to base our duration estimate on timing
1334 // information provided directly from the Media Source, return
1335 // it
1336 return backward.result;
1337 } // walk forward to see if a precise duration estimate can be made
1338 // that way
1339
1340
1341 var forward = forwardDuration(playlist, endSequence);
1342
1343 if (forward.precise) {
1344 // we found a segment that has been buffered and so it's
1345 // position is known precisely
1346 return forward.result;
1347 } // return the less-precise, playlist-based duration estimate
1348
1349
1350 return backward.result + expired;
1351};
1352/**
1353 * Calculates the duration of a playlist. If a start and end index
1354 * are specified, the duration will be for the subset of the media
1355 * timeline between those two indices. The total duration for live
1356 * playlists is always Infinity.
1357 *
1358 * @param {Object} playlist a media playlist object
1359 * @param {number=} endSequence an exclusive upper
1360 * boundary for the playlist. Defaults to the playlist media
1361 * sequence number plus its length.
1362 * @param {number=} expired the amount of time that has
1363 * dropped off the front of the playlist in a live scenario
1364 * @return {number} the duration between the start index and end
1365 * index.
1366 */
1367
1368
1369var duration = function duration(playlist, endSequence, expired) {
1370 if (!playlist) {
1371 return 0;
1372 }
1373
1374 if (typeof expired !== 'number') {
1375 expired = 0;
1376 } // if a slice of the total duration is not requested, use
1377 // playlist-level duration indicators when they're present
1378
1379
1380 if (typeof endSequence === 'undefined') {
1381 // if present, use the duration specified in the playlist
1382 if (playlist.totalDuration) {
1383 return playlist.totalDuration;
1384 } // duration should be Infinity for live playlists
1385
1386
1387 if (!playlist.endList) {
1388 return window__default['default'].Infinity;
1389 }
1390 } // calculate the total duration based on the segment durations
1391
1392
1393 return intervalDuration(playlist, endSequence, expired);
1394};
1395/**
1396 * Calculate the time between two indexes in the current playlist
1397 * neight the start- nor the end-index need to be within the current
1398 * playlist in which case, the targetDuration of the playlist is used
1399 * to approximate the durations of the segments
1400 *
1401 * @param {Object} playlist a media playlist object
1402 * @param {number} startIndex
1403 * @param {number} endIndex
1404 * @return {number} the number of seconds between startIndex and endIndex
1405 */
1406
1407var sumDurations = function sumDurations(playlist, startIndex, endIndex) {
1408 var durations = 0;
1409
1410 if (startIndex > endIndex) {
1411 var _ref = [endIndex, startIndex];
1412 startIndex = _ref[0];
1413 endIndex = _ref[1];
1414 }
1415
1416 if (startIndex < 0) {
1417 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
1418 durations += playlist.targetDuration;
1419 }
1420
1421 startIndex = 0;
1422 }
1423
1424 for (var _i = startIndex; _i < endIndex; _i++) {
1425 durations += playlist.segments[_i].duration;
1426 }
1427
1428 return durations;
1429};
1430/**
1431 * Determines the media index of the segment corresponding to the safe edge of the live
1432 * window which is the duration of the last segment plus 2 target durations from the end
1433 * of the playlist.
1434 *
1435 * A liveEdgePadding can be provided which will be used instead of calculating the safe live edge.
1436 * This corresponds to suggestedPresentationDelay in DASH manifests.
1437 *
1438 * @param {Object} playlist
1439 * a media playlist object
1440 * @param {number} [liveEdgePadding]
1441 * A number in seconds indicating how far from the end we want to be.
1442 * If provided, this value is used instead of calculating the safe live index from the target durations.
1443 * Corresponds to suggestedPresentationDelay in DASH manifests.
1444 * @return {number}
1445 * The media index of the segment at the safe live point. 0 if there is no "safe"
1446 * point.
1447 * @function safeLiveIndex
1448 */
1449
1450var safeLiveIndex = function safeLiveIndex(playlist, liveEdgePadding) {
1451 if (!playlist.segments.length) {
1452 return 0;
1453 }
1454
1455 var i = playlist.segments.length;
1456 var lastSegmentDuration = playlist.segments[i - 1].duration || playlist.targetDuration;
1457 var safeDistance = typeof liveEdgePadding === 'number' ? liveEdgePadding : lastSegmentDuration + playlist.targetDuration * 2;
1458
1459 if (safeDistance === 0) {
1460 return i;
1461 }
1462
1463 var distanceFromEnd = 0;
1464
1465 while (i--) {
1466 distanceFromEnd += playlist.segments[i].duration;
1467
1468 if (distanceFromEnd >= safeDistance) {
1469 break;
1470 }
1471 }
1472
1473 return Math.max(0, i);
1474};
1475/**
1476 * Calculates the playlist end time
1477 *
1478 * @param {Object} playlist a media playlist object
1479 * @param {number=} expired the amount of time that has
1480 * dropped off the front of the playlist in a live scenario
1481 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
1482 * playlist end calculation should consider the safe live end
1483 * (truncate the playlist end by three segments). This is normally
1484 * used for calculating the end of the playlist's seekable range.
1485 * This takes into account the value of liveEdgePadding.
1486 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
1487 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
1488 * If this is provided, it is used in the safe live end calculation.
1489 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
1490 * Corresponds to suggestedPresentationDelay in DASH manifests.
1491 * @return {number} the end time of playlist
1492 * @function playlistEnd
1493 */
1494
1495var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
1496 if (!playlist || !playlist.segments) {
1497 return null;
1498 }
1499
1500 if (playlist.endList) {
1501 return duration(playlist);
1502 }
1503
1504 if (expired === null) {
1505 return null;
1506 }
1507
1508 expired = expired || 0;
1509 var endSequence = useSafeLiveEnd ? safeLiveIndex(playlist, liveEdgePadding) : playlist.segments.length;
1510 return intervalDuration(playlist, playlist.mediaSequence + endSequence, expired);
1511};
1512/**
1513 * Calculates the interval of time that is currently seekable in a
1514 * playlist. The returned time ranges are relative to the earliest
1515 * moment in the specified playlist that is still available. A full
1516 * seekable implementation for live streams would need to offset
1517 * these values by the duration of content that has expired from the
1518 * stream.
1519 *
1520 * @param {Object} playlist a media playlist object
1521 * dropped off the front of the playlist in a live scenario
1522 * @param {number=} expired the amount of time that has
1523 * dropped off the front of the playlist in a live scenario
1524 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
1525 * Corresponds to suggestedPresentationDelay in DASH manifests.
1526 * @return {TimeRanges} the periods of time that are valid targets
1527 * for seeking
1528 */
1529
1530var seekable = function seekable(playlist, expired, liveEdgePadding) {
1531 var useSafeLiveEnd = true;
1532 var seekableStart = expired || 0;
1533 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
1534
1535 if (seekableEnd === null) {
1536 return createTimeRange();
1537 }
1538
1539 return createTimeRange(seekableStart, seekableEnd);
1540};
1541/**
1542 * Determine the index and estimated starting time of the segment that
1543 * contains a specified playback position in a media playlist.
1544 *
1545 * @param {Object} playlist the media playlist to query
1546 * @param {number} currentTime The number of seconds since the earliest
1547 * possible position to determine the containing segment for
1548 * @param {number} startIndex
1549 * @param {number} startTime
1550 * @return {Object}
1551 */
1552
1553var getMediaInfoForTime = function getMediaInfoForTime(playlist, currentTime, startIndex, startTime) {
1554 var i;
1555 var segment;
1556 var numSegments = playlist.segments.length;
1557 var time = currentTime - startTime;
1558
1559 if (time < 0) {
1560 // Walk backward from startIndex in the playlist, adding durations
1561 // until we find a segment that contains `time` and return it
1562 if (startIndex > 0) {
1563 for (i = startIndex - 1; i >= 0; i--) {
1564 segment = playlist.segments[i];
1565 time += segment.duration + TIME_FUDGE_FACTOR;
1566
1567 if (time > 0) {
1568 return {
1569 mediaIndex: i,
1570 startTime: startTime - sumDurations(playlist, startIndex, i)
1571 };
1572 }
1573 }
1574 } // We were unable to find a good segment within the playlist
1575 // so select the first segment
1576
1577
1578 return {
1579 mediaIndex: 0,
1580 startTime: currentTime
1581 };
1582 } // When startIndex is negative, we first walk forward to first segment
1583 // adding target durations. If we "run out of time" before getting to
1584 // the first segment, return the first segment
1585
1586
1587 if (startIndex < 0) {
1588 for (i = startIndex; i < 0; i++) {
1589 time -= playlist.targetDuration;
1590
1591 if (time < 0) {
1592 return {
1593 mediaIndex: 0,
1594 startTime: currentTime
1595 };
1596 }
1597 }
1598
1599 startIndex = 0;
1600 } // Walk forward from startIndex in the playlist, subtracting durations
1601 // until we find a segment that contains `time` and return it
1602
1603
1604 for (i = startIndex; i < numSegments; i++) {
1605 segment = playlist.segments[i];
1606 time -= segment.duration + TIME_FUDGE_FACTOR;
1607
1608 if (time < 0) {
1609 return {
1610 mediaIndex: i,
1611 startTime: startTime + sumDurations(playlist, startIndex, i)
1612 };
1613 }
1614 } // We are out of possible candidates so load the last one...
1615
1616
1617 return {
1618 mediaIndex: numSegments - 1,
1619 startTime: currentTime
1620 };
1621};
1622/**
1623 * Check whether the playlist is blacklisted or not.
1624 *
1625 * @param {Object} playlist the media playlist object
1626 * @return {boolean} whether the playlist is blacklisted or not
1627 * @function isBlacklisted
1628 */
1629
1630var isBlacklisted = function isBlacklisted(playlist) {
1631 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
1632};
1633/**
1634 * Check whether the playlist is compatible with current playback configuration or has
1635 * been blacklisted permanently for being incompatible.
1636 *
1637 * @param {Object} playlist the media playlist object
1638 * @return {boolean} whether the playlist is incompatible or not
1639 * @function isIncompatible
1640 */
1641
1642var isIncompatible = function isIncompatible(playlist) {
1643 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
1644};
1645/**
1646 * Check whether the playlist is enabled or not.
1647 *
1648 * @param {Object} playlist the media playlist object
1649 * @return {boolean} whether the playlist is enabled or not
1650 * @function isEnabled
1651 */
1652
1653var isEnabled = function isEnabled(playlist) {
1654 var blacklisted = isBlacklisted(playlist);
1655 return !playlist.disabled && !blacklisted;
1656};
1657/**
1658 * Check whether the playlist has been manually disabled through the representations api.
1659 *
1660 * @param {Object} playlist the media playlist object
1661 * @return {boolean} whether the playlist is disabled manually or not
1662 * @function isDisabled
1663 */
1664
1665var isDisabled = function isDisabled(playlist) {
1666 return playlist.disabled;
1667};
1668/**
1669 * Returns whether the current playlist is an AES encrypted HLS stream
1670 *
1671 * @return {boolean} true if it's an AES encrypted HLS stream
1672 */
1673
1674var isAes = function isAes(media) {
1675 for (var i = 0; i < media.segments.length; i++) {
1676 if (media.segments[i].key) {
1677 return true;
1678 }
1679 }
1680
1681 return false;
1682};
1683/**
1684 * Checks if the playlist has a value for the specified attribute
1685 *
1686 * @param {string} attr
1687 * Attribute to check for
1688 * @param {Object} playlist
1689 * The media playlist object
1690 * @return {boolean}
1691 * Whether the playlist contains a value for the attribute or not
1692 * @function hasAttribute
1693 */
1694
1695var hasAttribute = function hasAttribute(attr, playlist) {
1696 return playlist.attributes && playlist.attributes[attr];
1697};
1698/**
1699 * Estimates the time required to complete a segment download from the specified playlist
1700 *
1701 * @param {number} segmentDuration
1702 * Duration of requested segment
1703 * @param {number} bandwidth
1704 * Current measured bandwidth of the player
1705 * @param {Object} playlist
1706 * The media playlist object
1707 * @param {number=} bytesReceived
1708 * Number of bytes already received for the request. Defaults to 0
1709 * @return {number|NaN}
1710 * The estimated time to request the segment. NaN if bandwidth information for
1711 * the given playlist is unavailable
1712 * @function estimateSegmentRequestTime
1713 */
1714
1715var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
1716 if (bytesReceived === void 0) {
1717 bytesReceived = 0;
1718 }
1719
1720 if (!hasAttribute('BANDWIDTH', playlist)) {
1721 return NaN;
1722 }
1723
1724 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1725 return (size - bytesReceived * 8) / bandwidth;
1726};
1727/*
1728 * Returns whether the current playlist is the lowest rendition
1729 *
1730 * @return {Boolean} true if on lowest rendition
1731 */
1732
1733var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1734 if (master.playlists.length === 1) {
1735 return true;
1736 }
1737
1738 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1739 return master.playlists.filter(function (playlist) {
1740 if (!isEnabled(playlist)) {
1741 return false;
1742 }
1743
1744 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1745 }).length === 0;
1746}; // exports
1747
1748var Playlist = {
1749 duration: duration,
1750 seekable: seekable,
1751 safeLiveIndex: safeLiveIndex,
1752 getMediaInfoForTime: getMediaInfoForTime,
1753 isEnabled: isEnabled,
1754 isDisabled: isDisabled,
1755 isBlacklisted: isBlacklisted,
1756 isIncompatible: isIncompatible,
1757 playlistEnd: playlistEnd,
1758 isAes: isAes,
1759 hasAttribute: hasAttribute,
1760 estimateSegmentRequestTime: estimateSegmentRequestTime,
1761 isLowestEnabledRendition: isLowestEnabledRendition
1762};
1763
1764/**
1765 * @file xhr.js
1766 */
1767var videojsXHR = videojs__default['default'].xhr,
1768 mergeOptions$1 = videojs__default['default'].mergeOptions;
1769
1770var callbackWrapper = function callbackWrapper(request, error, response, callback) {
1771 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
1772
1773 if (!error && reqResponse) {
1774 request.responseTime = Date.now();
1775 request.roundTripTime = request.responseTime - request.requestTime;
1776 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
1777
1778 if (!request.bandwidth) {
1779 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
1780 }
1781 }
1782
1783 if (response.headers) {
1784 request.responseHeaders = response.headers;
1785 } // videojs.xhr now uses a specific code on the error
1786 // object to signal that a request has timed out instead
1787 // of setting a boolean on the request object
1788
1789
1790 if (error && error.code === 'ETIMEDOUT') {
1791 request.timedout = true;
1792 } // videojs.xhr no longer considers status codes outside of 200 and 0
1793 // (for file uris) to be errors, but the old XHR did, so emulate that
1794 // behavior. Status 206 may be used in response to byterange requests.
1795
1796
1797 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
1798 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
1799 }
1800
1801 callback(error, request);
1802};
1803
1804var xhrFactory = function xhrFactory() {
1805 var xhr = function XhrFunction(options, callback) {
1806 // Add a default timeout
1807 options = mergeOptions$1({
1808 timeout: 45e3
1809 }, options); // Allow an optional user-specified function to modify the option
1810 // object before we construct the xhr request
1811
1812 var beforeRequest = XhrFunction.beforeRequest || videojs__default['default'].Vhs.xhr.beforeRequest;
1813
1814 if (beforeRequest && typeof beforeRequest === 'function') {
1815 var newOptions = beforeRequest(options);
1816
1817 if (newOptions) {
1818 options = newOptions;
1819 }
1820 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
1821 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
1822
1823
1824 var xhrMethod = videojs__default['default'].Vhs.xhr.original === true ? videojsXHR : videojs__default['default'].Vhs.xhr;
1825 var request = xhrMethod(options, function (error, response) {
1826 return callbackWrapper(request, error, response, callback);
1827 });
1828 var originalAbort = request.abort;
1829
1830 request.abort = function () {
1831 request.aborted = true;
1832 return originalAbort.apply(request, arguments);
1833 };
1834
1835 request.uri = options.uri;
1836 request.requestTime = Date.now();
1837 return request;
1838 };
1839
1840 xhr.original = true;
1841 return xhr;
1842};
1843/**
1844 * Turns segment byterange into a string suitable for use in
1845 * HTTP Range requests
1846 *
1847 * @param {Object} byterange - an object with two values defining the start and end
1848 * of a byte-range
1849 */
1850
1851
1852var byterangeStr = function byterangeStr(byterange) {
1853 // `byterangeEnd` is one less than `offset + length` because the HTTP range
1854 // header uses inclusive ranges
1855 var byterangeEnd = byterange.offset + byterange.length - 1;
1856 var byterangeStart = byterange.offset;
1857 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
1858};
1859/**
1860 * Defines headers for use in the xhr request for a particular segment.
1861 *
1862 * @param {Object} segment - a simplified copy of the segmentInfo object
1863 * from SegmentLoader
1864 */
1865
1866
1867var segmentXhrHeaders = function segmentXhrHeaders(segment) {
1868 var headers = {};
1869
1870 if (segment.byterange) {
1871 headers.Range = byterangeStr(segment.byterange);
1872 }
1873
1874 return headers;
1875};
1876
1877/**
1878 * @file bin-utils.js
1879 */
1880
1881/**
1882 * convert a TimeRange to text
1883 *
1884 * @param {TimeRange} range the timerange to use for conversion
1885 * @param {number} i the iterator on the range to convert
1886 * @return {string} the range in string format
1887 */
1888var textRange = function textRange(range, i) {
1889 return range.start(i) + '-' + range.end(i);
1890};
1891/**
1892 * format a number as hex string
1893 *
1894 * @param {number} e The number
1895 * @param {number} i the iterator
1896 * @return {string} the hex formatted number as a string
1897 */
1898
1899
1900var formatHexString = function formatHexString(e, i) {
1901 var value = e.toString(16);
1902 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
1903};
1904
1905var formatAsciiString = function formatAsciiString(e) {
1906 if (e >= 0x20 && e < 0x7e) {
1907 return String.fromCharCode(e);
1908 }
1909
1910 return '.';
1911};
1912/**
1913 * Creates an object for sending to a web worker modifying properties that are TypedArrays
1914 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
1915 *
1916 * @param {Object} message
1917 * Object of properties and values to send to the web worker
1918 * @return {Object}
1919 * Modified message with TypedArray values expanded
1920 * @function createTransferableMessage
1921 */
1922
1923
1924var createTransferableMessage = function createTransferableMessage(message) {
1925 var transferable = {};
1926 Object.keys(message).forEach(function (key) {
1927 var value = message[key];
1928
1929 if (ArrayBuffer.isView(value)) {
1930 transferable[key] = {
1931 bytes: value.buffer,
1932 byteOffset: value.byteOffset,
1933 byteLength: value.byteLength
1934 };
1935 } else {
1936 transferable[key] = value;
1937 }
1938 });
1939 return transferable;
1940};
1941/**
1942 * Returns a unique string identifier for a media initialization
1943 * segment.
1944 *
1945 * @param {Object} initSegment
1946 * the init segment object.
1947 *
1948 * @return {string} the generated init segment id
1949 */
1950
1951var initSegmentId = function initSegmentId(initSegment) {
1952 var byterange = initSegment.byterange || {
1953 length: Infinity,
1954 offset: 0
1955 };
1956 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
1957};
1958/**
1959 * Returns a unique string identifier for a media segment key.
1960 *
1961 * @param {Object} key the encryption key
1962 * @return {string} the unique id for the media segment key.
1963 */
1964
1965var segmentKeyId = function segmentKeyId(key) {
1966 return key.resolvedUri;
1967};
1968/**
1969 * utils to help dump binary data to the console
1970 *
1971 * @param {Array|TypedArray} data
1972 * data to dump to a string
1973 *
1974 * @return {string} the data as a hex string.
1975 */
1976
1977var hexDump = function hexDump(data) {
1978 var bytes = Array.prototype.slice.call(data);
1979 var step = 16;
1980 var result = '';
1981 var hex;
1982 var ascii;
1983
1984 for (var j = 0; j < bytes.length / step; j++) {
1985 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
1986 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
1987 result += hex + ' ' + ascii + '\n';
1988 }
1989
1990 return result;
1991};
1992var tagDump = function tagDump(_ref) {
1993 var bytes = _ref.bytes;
1994 return hexDump(bytes);
1995};
1996var textRanges = function textRanges(ranges) {
1997 var result = '';
1998 var i;
1999
2000 for (i = 0; i < ranges.length; i++) {
2001 result += textRange(ranges, i) + ' ';
2002 }
2003
2004 return result;
2005};
2006
2007var utils = /*#__PURE__*/Object.freeze({
2008 __proto__: null,
2009 createTransferableMessage: createTransferableMessage,
2010 initSegmentId: initSegmentId,
2011 segmentKeyId: segmentKeyId,
2012 hexDump: hexDump,
2013 tagDump: tagDump,
2014 textRanges: textRanges
2015});
2016
2017// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2018// 25% was arbitrarily chosen, and may need to be refined over time.
2019
2020var SEGMENT_END_FUDGE_PERCENT = 0.25;
2021/**
2022 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2023 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2024 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2025 *
2026 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2027 * point" (a point where we have a mapping from program time to player time, with player
2028 * time being the post transmux start of the segment).
2029 *
2030 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2031 *
2032 * @param {number} playerTime the player time
2033 * @param {Object} segment the segment which contains the player time
2034 * @return {Date} program time
2035 */
2036
2037var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2038 if (!segment.dateTimeObject) {
2039 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2040 // be used to map the start of a segment with a real world time).
2041 return null;
2042 }
2043
2044 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2045 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2046
2047 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2048 var offsetFromSegmentStart = playerTime - startOfSegment;
2049 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2050};
2051var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2052 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2053};
2054/**
2055 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2056 * returned segment might be an estimate or an accurate match.
2057 *
2058 * @param {string} programTime The ISO-8601 programTime to find a match for
2059 * @param {Object} playlist A playlist object to search within
2060 */
2061
2062var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2063 // Assumptions:
2064 // - verifyProgramDateTimeTags has already been run
2065 // - live streams have been started
2066 var dateTimeObject;
2067
2068 try {
2069 dateTimeObject = new Date(programTime);
2070 } catch (e) {
2071 return null;
2072 }
2073
2074 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2075 return null;
2076 }
2077
2078 var segment = playlist.segments[0];
2079
2080 if (dateTimeObject < segment.dateTimeObject) {
2081 // Requested time is before stream start.
2082 return null;
2083 }
2084
2085 for (var i = 0; i < playlist.segments.length - 1; i++) {
2086 segment = playlist.segments[i];
2087 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2088
2089 if (dateTimeObject < nextSegmentStart) {
2090 break;
2091 }
2092 }
2093
2094 var lastSegment = playlist.segments[playlist.segments.length - 1];
2095 var lastSegmentStart = lastSegment.dateTimeObject;
2096 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2097 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2098
2099 if (dateTimeObject > lastSegmentEnd) {
2100 // Beyond the end of the stream, or our best guess of the end of the stream.
2101 return null;
2102 }
2103
2104 if (dateTimeObject > lastSegmentStart) {
2105 segment = lastSegment;
2106 }
2107
2108 return {
2109 segment: segment,
2110 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2111 // Although, given that all segments have accurate date time objects, the segment
2112 // selected should be accurate, unless the video has been transmuxed at some point
2113 // (determined by the presence of the videoTimingInfo object), the segment's "player
2114 // time" (the start time in the player) can't be considered accurate.
2115 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2116 };
2117};
2118/**
2119 * Finds a segment that contains the given player time(in seconds).
2120 *
2121 * @param {number} time The player time to find a match for
2122 * @param {Object} playlist A playlist object to search within
2123 */
2124
2125var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2126 // Assumptions:
2127 // - there will always be a segment.duration
2128 // - we can start from zero
2129 // - segments are in time order
2130 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2131 return null;
2132 }
2133
2134 var segmentEnd = 0;
2135 var segment;
2136
2137 for (var i = 0; i < playlist.segments.length; i++) {
2138 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2139 // should contain the most accurate values we have for the segment's player times.
2140 //
2141 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2142 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2143 // calculate an end value.
2144
2145 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2146
2147 if (time <= segmentEnd) {
2148 break;
2149 }
2150 }
2151
2152 var lastSegment = playlist.segments[playlist.segments.length - 1];
2153
2154 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2155 // The time requested is beyond the stream end.
2156 return null;
2157 }
2158
2159 if (time > segmentEnd) {
2160 // The time is within or beyond the last segment.
2161 //
2162 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2163 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2164 // Technically, because the duration value is only an estimate, the time may still
2165 // exist in the last segment, however, there isn't enough information to make even
2166 // a reasonable estimate.
2167 return null;
2168 }
2169
2170 segment = lastSegment;
2171 }
2172
2173 return {
2174 segment: segment,
2175 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2176 // Because videoTimingInfo is only set after transmux, it is the only way to get
2177 // accurate timing values.
2178 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2179 };
2180};
2181/**
2182 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2183 * If the offset returned is positive, the programTime occurs after the
2184 * comparisonTimestamp.
2185 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2186 *
2187 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2188 * @param {string} programTime The programTime as an ISO-8601 string
2189 * @return {number} offset
2190 */
2191
2192var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2193 var segmentDateTime;
2194 var programDateTime;
2195
2196 try {
2197 segmentDateTime = new Date(comparisonTimeStamp);
2198 programDateTime = new Date(programTime);
2199 } catch (e) {// TODO handle error
2200 }
2201
2202 var segmentTimeEpoch = segmentDateTime.getTime();
2203 var programTimeEpoch = programDateTime.getTime();
2204 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2205};
2206/**
2207 * Checks that all segments in this playlist have programDateTime tags.
2208 *
2209 * @param {Object} playlist A playlist object
2210 */
2211
2212var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2213 if (!playlist.segments || playlist.segments.length === 0) {
2214 return false;
2215 }
2216
2217 for (var i = 0; i < playlist.segments.length; i++) {
2218 var segment = playlist.segments[i];
2219
2220 if (!segment.dateTimeObject) {
2221 return false;
2222 }
2223 }
2224
2225 return true;
2226};
2227/**
2228 * Returns the programTime of the media given a playlist and a playerTime.
2229 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2230 * If the segments containing the time requested have not been buffered yet, an estimate
2231 * may be returned to the callback.
2232 *
2233 * @param {Object} args
2234 * @param {Object} args.playlist A playlist object to search within
2235 * @param {number} time A playerTime in seconds
2236 * @param {Function} callback(err, programTime)
2237 * @return {string} err.message A detailed error message
2238 * @return {Object} programTime
2239 * @return {number} programTime.mediaSeconds The streamTime in seconds
2240 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2241 */
2242
2243var getProgramTime = function getProgramTime(_ref) {
2244 var playlist = _ref.playlist,
2245 _ref$time = _ref.time,
2246 time = _ref$time === void 0 ? undefined : _ref$time,
2247 callback = _ref.callback;
2248
2249 if (!callback) {
2250 throw new Error('getProgramTime: callback must be provided');
2251 }
2252
2253 if (!playlist || time === undefined) {
2254 return callback({
2255 message: 'getProgramTime: playlist and time must be provided'
2256 });
2257 }
2258
2259 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2260
2261 if (!matchedSegment) {
2262 return callback({
2263 message: 'valid programTime was not found'
2264 });
2265 }
2266
2267 if (matchedSegment.type === 'estimate') {
2268 return callback({
2269 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2270 seekTime: matchedSegment.estimatedStart
2271 });
2272 }
2273
2274 var programTimeObject = {
2275 mediaSeconds: time
2276 };
2277 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2278
2279 if (programTime) {
2280 programTimeObject.programDateTime = programTime.toISOString();
2281 }
2282
2283 return callback(null, programTimeObject);
2284};
2285/**
2286 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2287 *
2288 * @param {Object} args
2289 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2290 * @param {Object} args.playlist A playlist to look within
2291 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2292 * @param {Function} args.seekTo A method to perform a seek
2293 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2294 * @param {Object} args.tech The tech to seek on
2295 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2296 * @return {string} err.message A detailed error message
2297 * @return {number} newTime The exact time that was seeked to in seconds
2298 */
2299
2300var seekToProgramTime = function seekToProgramTime(_ref2) {
2301 var programTime = _ref2.programTime,
2302 playlist = _ref2.playlist,
2303 _ref2$retryCount = _ref2.retryCount,
2304 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2305 seekTo = _ref2.seekTo,
2306 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2307 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2308 tech = _ref2.tech,
2309 callback = _ref2.callback;
2310
2311 if (!callback) {
2312 throw new Error('seekToProgramTime: callback must be provided');
2313 }
2314
2315 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2316 return callback({
2317 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2318 });
2319 }
2320
2321 if (!playlist.endList && !tech.hasStarted_) {
2322 return callback({
2323 message: 'player must be playing a live stream to start buffering'
2324 });
2325 }
2326
2327 if (!verifyProgramDateTimeTags(playlist)) {
2328 return callback({
2329 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
2330 });
2331 }
2332
2333 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
2334
2335 if (!matchedSegment) {
2336 return callback({
2337 message: programTime + " was not found in the stream"
2338 });
2339 }
2340
2341 var segment = matchedSegment.segment;
2342 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
2343
2344 if (matchedSegment.type === 'estimate') {
2345 // we've run out of retries
2346 if (retryCount === 0) {
2347 return callback({
2348 message: programTime + " is not buffered yet. Try again"
2349 });
2350 }
2351
2352 seekTo(matchedSegment.estimatedStart + mediaOffset);
2353 tech.one('seeked', function () {
2354 seekToProgramTime({
2355 programTime: programTime,
2356 playlist: playlist,
2357 retryCount: retryCount - 1,
2358 seekTo: seekTo,
2359 pauseAfterSeek: pauseAfterSeek,
2360 tech: tech,
2361 callback: callback
2362 });
2363 });
2364 return;
2365 } // Since the segment.start value is determined from the buffered end or ending time
2366 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
2367 // modifications.
2368
2369
2370 var seekToTime = segment.start + mediaOffset;
2371
2372 var seekedCallback = function seekedCallback() {
2373 return callback(null, tech.currentTime());
2374 }; // listen for seeked event
2375
2376
2377 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
2378
2379 if (pauseAfterSeek) {
2380 tech.pause();
2381 }
2382
2383 seekTo(seekToTime);
2384};
2385
2386// which will only happen if the request is complete.
2387
2388var callbackOnCompleted = function callbackOnCompleted(request, cb) {
2389 if (request.readyState === 4) {
2390 return cb();
2391 }
2392
2393 return;
2394};
2395
2396var containerRequest = function containerRequest(uri, xhr, cb) {
2397 var bytes = [];
2398 var id3Offset;
2399 var finished = false;
2400
2401 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
2402 req.abort();
2403 finished = true;
2404 return cb(err, req, type, _bytes);
2405 };
2406
2407 var progressListener = function progressListener(error, request) {
2408 if (finished) {
2409 return;
2410 }
2411
2412 if (error) {
2413 return endRequestAndCallback(error, request, '', bytes);
2414 } // grap the new part of content that was just downloaded
2415
2416
2417 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
2418
2419 bytes = byteHelpers.concatTypedArrays(bytes, byteHelpers.stringToBytes(newPart, true));
2420 id3Offset = id3Offset || id3Helpers.getId3Offset(bytes); // we need at least 10 bytes to determine a type
2421 // or we need at least two bytes after an id3Offset
2422
2423 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
2424 return callbackOnCompleted(request, function () {
2425 return endRequestAndCallback(error, request, '', bytes);
2426 });
2427 }
2428
2429 var type = containers.detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
2430 // to see the second sync byte, wait until we have enough data
2431 // before declaring it ts
2432
2433 if (type === 'ts' && bytes.length < 188) {
2434 return callbackOnCompleted(request, function () {
2435 return endRequestAndCallback(error, request, '', bytes);
2436 });
2437 } // this may be an unsynced ts segment
2438 // wait for 376 bytes before detecting no container
2439
2440
2441 if (!type && bytes.length < 376) {
2442 return callbackOnCompleted(request, function () {
2443 return endRequestAndCallback(error, request, '', bytes);
2444 });
2445 }
2446
2447 return endRequestAndCallback(null, request, type, bytes);
2448 };
2449
2450 var options = {
2451 uri: uri,
2452 beforeSend: function beforeSend(request) {
2453 // this forces the browser to pass the bytes to us unprocessed
2454 request.overrideMimeType('text/plain; charset=x-user-defined');
2455 request.addEventListener('progress', function (_ref) {
2456 var total = _ref.total,
2457 loaded = _ref.loaded;
2458 return callbackWrapper(request, null, {
2459 statusCode: request.status
2460 }, progressListener);
2461 });
2462 }
2463 };
2464 var request = xhr(options, function (error, response) {
2465 return callbackWrapper(request, error, response, progressListener);
2466 });
2467 return request;
2468};
2469
2470var EventTarget$1 = videojs__default['default'].EventTarget,
2471 mergeOptions$2 = videojs__default['default'].mergeOptions;
2472
2473var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
2474 if (!isPlaylistUnchanged(a, b)) {
2475 return false;
2476 } // for dash the above check will often return true in scenarios where
2477 // the playlist actually has changed because mediaSequence isn't a
2478 // dash thing, and we often set it to 1. So if the playlists have the same amount
2479 // of segments we return true.
2480 // So for dash we need to make sure that the underlying segments are different.
2481 // if sidx changed then the playlists are different.
2482
2483
2484 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
2485 return false;
2486 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
2487 return false;
2488 } // one or the other does not have segments
2489 // there was a change.
2490
2491
2492 if (a.segments && !b.segments || !a.segments && b.segments) {
2493 return false;
2494 } // neither has segments nothing changed
2495
2496
2497 if (!a.segments && !b.segments) {
2498 return true;
2499 } // check segments themselves
2500
2501
2502 for (var i = 0; i < a.segments.length; i++) {
2503 var aSegment = a.segments[i];
2504 var bSegment = b.segments[i]; // if uris are different between segments there was a change
2505
2506 if (aSegment.uri !== bSegment.uri) {
2507 return false;
2508 } // neither segment has a byterange, there will be no byterange change.
2509
2510
2511 if (!aSegment.byterange && !bSegment.byterange) {
2512 continue;
2513 }
2514
2515 var aByterange = aSegment.byterange;
2516 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
2517
2518 if (aByterange && !bByterange || !aByterange && bByterange) {
2519 return false;
2520 } // if both segments have byterange with different offsets, there was a change.
2521
2522
2523 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
2524 return false;
2525 }
2526 } // if everything was the same with segments, this is the same playlist.
2527
2528
2529 return true;
2530};
2531/**
2532 * Parses the master XML string and updates playlist URI references.
2533 *
2534 * @param {Object} config
2535 * Object of arguments
2536 * @param {string} config.masterXml
2537 * The mpd XML
2538 * @param {string} config.srcUrl
2539 * The mpd URL
2540 * @param {Date} config.clientOffset
2541 * A time difference between server and client
2542 * @param {Object} config.sidxMapping
2543 * SIDX mappings for moof/mdat URIs and byte ranges
2544 * @return {Object}
2545 * The parsed mpd manifest object
2546 */
2547
2548
2549var parseMasterXml = function parseMasterXml(_ref) {
2550 var masterXml = _ref.masterXml,
2551 srcUrl = _ref.srcUrl,
2552 clientOffset = _ref.clientOffset,
2553 sidxMapping = _ref.sidxMapping;
2554 var master = mpdParser.parse(masterXml, {
2555 manifestUri: srcUrl,
2556 clientOffset: clientOffset,
2557 sidxMapping: sidxMapping
2558 });
2559 addPropertiesToMaster(master, srcUrl);
2560 return master;
2561};
2562var generateSidxKey = function generateSidxKey(sidxInfo) {
2563 // should be non-inclusive
2564 var sidxByteRangeEnd = sidxInfo.byterange.offset + sidxInfo.byterange.length - 1;
2565 return sidxInfo.uri + '-' + sidxInfo.byterange.offset + '-' + sidxByteRangeEnd;
2566};
2567/**
2568 * Returns a new master manifest that is the result of merging an updated master manifest
2569 * into the original version.
2570 *
2571 * @param {Object} oldMaster
2572 * The old parsed mpd object
2573 * @param {Object} newMaster
2574 * The updated parsed mpd object
2575 * @return {Object}
2576 * A new object representing the original master manifest with the updated media
2577 * playlists merged in
2578 */
2579
2580var updateMaster$1 = function updateMaster$1(oldMaster, newMaster, sidxMapping) {
2581 var noChanges = true;
2582 var update = mergeOptions$2(oldMaster, {
2583 // These are top level properties that can be updated
2584 duration: newMaster.duration,
2585 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
2586 }); // First update the playlists in playlist list
2587
2588 for (var i = 0; i < newMaster.playlists.length; i++) {
2589 var playlist = newMaster.playlists[i];
2590
2591 if (playlist.sidx) {
2592 var sidxKey = generateSidxKey(playlist.sidx);
2593
2594 if (sidxMapping && sidxMapping[sidxKey]) {
2595 mpdParser.addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
2596 }
2597 }
2598
2599 var playlistUpdate = updateMaster(update, playlist, dashPlaylistUnchanged);
2600
2601 if (playlistUpdate) {
2602 update = playlistUpdate;
2603 noChanges = false;
2604 }
2605 } // Then update media group playlists
2606
2607
2608 forEachMediaGroup(newMaster, function (properties, type, group, label) {
2609 if (properties.playlists && properties.playlists.length) {
2610 var id = properties.playlists[0].id;
2611
2612 var _playlistUpdate = updateMaster(update, properties.playlists[0], dashPlaylistUnchanged);
2613
2614 if (_playlistUpdate) {
2615 update = _playlistUpdate; // update the playlist reference within media groups
2616
2617 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
2618 noChanges = false;
2619 }
2620 }
2621 });
2622
2623 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
2624 noChanges = false;
2625 }
2626
2627 if (noChanges) {
2628 return null;
2629 }
2630
2631 return update;
2632}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
2633// If the SIDXs have maps, the two maps should match,
2634// both `a` and `b` missing SIDXs is considered matching.
2635// If `a` or `b` but not both have a map, they aren't matching.
2636
2637var equivalentSidx = function equivalentSidx(a, b) {
2638 var neitherMap = Boolean(!a.map && !b.map);
2639 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
2640 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
2641}; // exported for testing
2642
2643
2644var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
2645 var newSidxMapping = {};
2646
2647 for (var id in playlists) {
2648 var playlist = playlists[id];
2649 var currentSidxInfo = playlist.sidx;
2650
2651 if (currentSidxInfo) {
2652 var key = generateSidxKey(currentSidxInfo);
2653
2654 if (!oldSidxMapping[key]) {
2655 break;
2656 }
2657
2658 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
2659
2660 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
2661 newSidxMapping[key] = oldSidxMapping[key];
2662 }
2663 }
2664 }
2665
2666 return newSidxMapping;
2667};
2668/**
2669 * A function that filters out changed items as they need to be requested separately.
2670 *
2671 * The method is exported for testing
2672 *
2673 * @param {Object} master the parsed mpd XML returned via mpd-parser
2674 * @param {Object} oldSidxMapping the SIDX to compare against
2675 */
2676
2677var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
2678 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
2679 var mediaGroupSidx = videoSidx;
2680 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
2681 if (properties.playlists && properties.playlists.length) {
2682 var playlists = properties.playlists;
2683 mediaGroupSidx = mergeOptions$2(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
2684 }
2685 });
2686 return mediaGroupSidx;
2687};
2688
2689var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
2690 _inheritsLoose__default['default'](DashPlaylistLoader, _EventTarget);
2691
2692 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
2693 // playlist loader setups from media groups will expect to be able to pass a playlist
2694 // (since there aren't external URLs to media playlists with DASH)
2695 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
2696 var _this;
2697
2698 if (options === void 0) {
2699 options = {};
2700 }
2701
2702 _this = _EventTarget.call(this) || this;
2703 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized__default['default'](_this);
2704
2705 if (!masterPlaylistLoader) {
2706 _this.isMaster_ = true;
2707 }
2708
2709 var _options = options,
2710 _options$withCredenti = _options.withCredentials,
2711 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
2712 _options$handleManife = _options.handleManifestRedirects,
2713 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
2714 _this.vhs_ = vhs;
2715 _this.withCredentials = withCredentials;
2716 _this.handleManifestRedirects = handleManifestRedirects;
2717
2718 if (!srcUrlOrPlaylist) {
2719 throw new Error('A non-empty playlist URL or object is required');
2720 } // event naming?
2721
2722
2723 _this.on('minimumUpdatePeriod', function () {
2724 _this.refreshXml_();
2725 }); // live playlist staleness timeout
2726
2727
2728 _this.on('mediaupdatetimeout', function () {
2729 _this.refreshMedia_(_this.media().id);
2730 });
2731
2732 _this.state = 'HAVE_NOTHING';
2733 _this.loadedPlaylists_ = {}; // initialize the loader state
2734 // The masterPlaylistLoader will be created with a string
2735
2736 if (_this.isMaster_) {
2737 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
2738 // once multi-period is refactored
2739
2740 _this.masterPlaylistLoader_.sidxMapping_ = {};
2741 } else {
2742 _this.childPlaylist_ = srcUrlOrPlaylist;
2743 }
2744
2745 return _this;
2746 }
2747
2748 var _proto = DashPlaylistLoader.prototype;
2749
2750 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
2751 // disposed
2752 if (!this.request) {
2753 return true;
2754 } // pending request is cleared
2755
2756
2757 this.request = null;
2758
2759 if (err) {
2760 // use the provided error object or create one
2761 // based on the request/response
2762 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
2763 status: request.status,
2764 message: 'DASH request error at URL: ' + request.uri,
2765 response: request.response,
2766 // MEDIA_ERR_NETWORK
2767 code: 2
2768 };
2769
2770 if (startingState) {
2771 this.state = startingState;
2772 }
2773
2774 this.trigger('error');
2775 return true;
2776 }
2777 }
2778 /**
2779 * Verify that the container of the sidx segment can be parsed
2780 * and if it can, get and parse that segment.
2781 */
2782 ;
2783
2784 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
2785 var _this2 = this;
2786
2787 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
2788
2789 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
2790 // keep this function async
2791 this.mediaRequest_ = window__default['default'].setTimeout(function () {
2792 return cb(false);
2793 }, 0);
2794 return;
2795 } // resolve the segment URL relative to the playlist
2796
2797
2798 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
2799 var sidxMapping = this.masterPlaylistLoader_.sidxMapping_;
2800 sidxMapping[sidxKey] = {
2801 sidxInfo: playlist.sidx
2802 };
2803
2804 var fin = function fin(err, request) {
2805 if (_this2.requestErrored_(err, request, startingState)) {
2806 return;
2807 }
2808
2809 var sidx = parseSidx__default['default'](byteHelpers.toUint8(request.response).subarray(8));
2810 sidxMapping[sidxKey].sidx = sidx;
2811 mpdParser.addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
2812 return cb(true);
2813 };
2814
2815 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
2816 if (err) {
2817 return fin(err, request);
2818 }
2819
2820 if (!container || container !== 'mp4') {
2821 return fin({
2822 status: request.status,
2823 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
2824 // response is just bytes in this case
2825 // but we really don't want to return that.
2826 response: '',
2827 playlist: playlist,
2828 internal: true,
2829 blacklistDuration: Infinity,
2830 // MEDIA_ERR_NETWORK
2831 code: 2
2832 }, request);
2833 } // if we already downloaded the sidx bytes in the container request, use them
2834
2835
2836 var _playlist$sidx$bytera = playlist.sidx.byterange,
2837 offset = _playlist$sidx$bytera.offset,
2838 length = _playlist$sidx$bytera.length;
2839
2840 if (bytes.length >= length + offset) {
2841 return fin(err, {
2842 response: bytes.subarray(offset, offset + length),
2843 status: request.status,
2844 uri: request.uri
2845 });
2846 } // otherwise request sidx bytes
2847
2848
2849 _this2.request = _this2.vhs_.xhr({
2850 uri: uri,
2851 responseType: 'arraybuffer',
2852 headers: segmentXhrHeaders({
2853 byterange: playlist.sidx.byterange
2854 })
2855 }, fin);
2856 });
2857 };
2858
2859 _proto.dispose = function dispose() {
2860 this.trigger('dispose');
2861 this.stopRequest();
2862 this.loadedPlaylists_ = {};
2863 window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
2864 window__default['default'].clearTimeout(this.mediaRequest_);
2865 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2866 this.off();
2867 };
2868
2869 _proto.hasPendingRequest = function hasPendingRequest() {
2870 return this.request || this.mediaRequest_;
2871 };
2872
2873 _proto.stopRequest = function stopRequest() {
2874 if (this.request) {
2875 var oldRequest = this.request;
2876 this.request = null;
2877 oldRequest.onreadystatechange = null;
2878 oldRequest.abort();
2879 }
2880 };
2881
2882 _proto.media = function media(playlist) {
2883 var _this3 = this;
2884
2885 // getter
2886 if (!playlist) {
2887 return this.media_;
2888 } // setter
2889
2890
2891 if (this.state === 'HAVE_NOTHING') {
2892 throw new Error('Cannot switch media playlist from ' + this.state);
2893 }
2894
2895 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
2896
2897 if (typeof playlist === 'string') {
2898 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
2899 throw new Error('Unknown playlist URI: ' + playlist);
2900 }
2901
2902 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
2903 }
2904
2905 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
2906
2907 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
2908 this.state = 'HAVE_METADATA';
2909 this.media_ = playlist; // trigger media change if the active media has been updated
2910
2911 if (mediaChange) {
2912 this.trigger('mediachanging');
2913 this.trigger('mediachange');
2914 }
2915
2916 return;
2917 } // switching to the active playlist is a no-op
2918
2919
2920 if (!mediaChange) {
2921 return;
2922 } // switching from an already loaded playlist
2923
2924
2925 if (this.media_) {
2926 this.trigger('mediachanging');
2927 }
2928
2929 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
2930 // everything is ready just continue to haveMetadata
2931 _this3.haveMetadata({
2932 startingState: startingState,
2933 playlist: playlist
2934 });
2935 });
2936 };
2937
2938 _proto.haveMetadata = function haveMetadata(_ref2) {
2939 var startingState = _ref2.startingState,
2940 playlist = _ref2.playlist;
2941 this.state = 'HAVE_METADATA';
2942 this.loadedPlaylists_[playlist.id] = playlist;
2943 this.mediaRequest_ = null; // This will trigger loadedplaylist
2944
2945 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
2946 // to resolve setup of media groups
2947
2948 if (startingState === 'HAVE_MASTER') {
2949 this.trigger('loadedmetadata');
2950 } else {
2951 // trigger media change if the active media has been updated
2952 this.trigger('mediachange');
2953 }
2954 };
2955
2956 _proto.pause = function pause() {
2957 this.stopRequest();
2958 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2959 window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
2960
2961 if (this.state === 'HAVE_NOTHING') {
2962 // If we pause the loader before any data has been retrieved, its as if we never
2963 // started, so reset to an unstarted state.
2964 this.started = false;
2965 }
2966 };
2967
2968 _proto.load = function load(isFinalRendition) {
2969 var _this4 = this;
2970
2971 window__default['default'].clearTimeout(this.mediaUpdateTimeout);
2972 window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
2973 var media = this.media();
2974
2975 if (isFinalRendition) {
2976 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
2977 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
2978 return _this4.load();
2979 }, delay);
2980 return;
2981 } // because the playlists are internal to the manifest, load should either load the
2982 // main manifest, or do nothing but trigger an event
2983
2984
2985 if (!this.started) {
2986 this.start();
2987 return;
2988 }
2989
2990 if (media && !media.endList) {
2991 this.trigger('mediaupdatetimeout');
2992 } else {
2993 this.trigger('loadedplaylist');
2994 }
2995 };
2996
2997 _proto.start = function start() {
2998 var _this5 = this;
2999
3000 this.started = true; // We don't need to request the master manifest again
3001 // Call this asynchronously to match the xhr request behavior below
3002
3003 if (!this.isMaster_) {
3004 this.mediaRequest_ = window__default['default'].setTimeout(function () {
3005 return _this5.haveMaster_();
3006 }, 0);
3007 return;
3008 }
3009
3010 this.requestMaster_(function (req, masterChanged) {
3011 _this5.haveMaster_();
3012
3013 if (!_this5.hasPendingRequest() && !_this5.media_) {
3014 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3015 }
3016 });
3017 };
3018
3019 _proto.requestMaster_ = function requestMaster_(cb) {
3020 var _this6 = this;
3021
3022 this.request = this.vhs_.xhr({
3023 uri: this.masterPlaylistLoader_.srcUrl,
3024 withCredentials: this.withCredentials
3025 }, function (error, req) {
3026 if (_this6.requestErrored_(error, req)) {
3027 if (_this6.state === 'HAVE_NOTHING') {
3028 _this6.started = false;
3029 }
3030
3031 return;
3032 }
3033
3034 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3035 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3036
3037 if (req.responseHeaders && req.responseHeaders.date) {
3038 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3039 } else {
3040 _this6.masterLoaded_ = Date.now();
3041 }
3042
3043 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3044
3045 if (masterChanged) {
3046 _this6.handleMaster_();
3047
3048 _this6.syncClientServerClock_(function () {
3049 return cb(req, masterChanged);
3050 });
3051
3052 return;
3053 }
3054
3055 return cb(req, masterChanged);
3056 });
3057 }
3058 /**
3059 * Parses the master xml for UTCTiming node to sync the client clock to the server
3060 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3061 *
3062 * @param {Function} done
3063 * Function to call when clock sync has completed
3064 */
3065 ;
3066
3067 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3068 var _this7 = this;
3069
3070 var utcTiming = mpdParser.parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3071 // server clock
3072
3073 if (utcTiming === null) {
3074 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3075 return done();
3076 }
3077
3078 if (utcTiming.method === 'DIRECT') {
3079 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3080 return done();
3081 }
3082
3083 this.request = this.vhs_.xhr({
3084 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3085 method: utcTiming.method,
3086 withCredentials: this.withCredentials
3087 }, function (error, req) {
3088 // disposed
3089 if (!_this7.request) {
3090 return;
3091 }
3092
3093 if (error) {
3094 // sync request failed, fall back to using date header from mpd
3095 // TODO: log warning
3096 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3097 return done();
3098 }
3099
3100 var serverTime;
3101
3102 if (utcTiming.method === 'HEAD') {
3103 if (!req.responseHeaders || !req.responseHeaders.date) {
3104 // expected date header not preset, fall back to using date header from mpd
3105 // TODO: log warning
3106 serverTime = _this7.masterLoaded_;
3107 } else {
3108 serverTime = Date.parse(req.responseHeaders.date);
3109 }
3110 } else {
3111 serverTime = Date.parse(req.responseText);
3112 }
3113
3114 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3115 done();
3116 });
3117 };
3118
3119 _proto.haveMaster_ = function haveMaster_() {
3120 this.state = 'HAVE_MASTER';
3121
3122 if (this.isMaster_) {
3123 // We have the master playlist at this point, so
3124 // trigger this to allow MasterPlaylistController
3125 // to make an initial playlist selection
3126 this.trigger('loadedplaylist');
3127 } else if (!this.media_) {
3128 // no media playlist was specifically selected so select
3129 // the one the child playlist loader was created with
3130 this.media(this.childPlaylist_);
3131 }
3132 };
3133
3134 _proto.handleMaster_ = function handleMaster_() {
3135 // clear media request
3136 this.mediaRequest_ = null;
3137 var newMaster = parseMasterXml({
3138 masterXml: this.masterPlaylistLoader_.masterXml_,
3139 srcUrl: this.masterPlaylistLoader_.srcUrl,
3140 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3141 sidxMapping: this.masterPlaylistLoader_.sidxMapping_
3142 });
3143 var oldMaster = this.masterPlaylistLoader_.master; // if we have an old master to compare the new master against
3144
3145 if (oldMaster) {
3146 newMaster = updateMaster$1(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3147 } // only update master if we have a new master
3148
3149
3150 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3151 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3152
3153 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3154 this.masterPlaylistLoader_.srcUrl = location;
3155 } // if the minimumUpdatePeriod was changed, update the minimumUpdatePeriodTimeout_
3156
3157
3158 if (!oldMaster || newMaster && oldMaster.minimumUpdatePeriod !== newMaster.minimumUpdatePeriod) {
3159 this.updateMinimumUpdatePeriodTimeout_();
3160 }
3161
3162 return Boolean(newMaster);
3163 };
3164
3165 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3166 var _this8 = this;
3167
3168 // Clear existing timeout
3169 window__default['default'].clearTimeout(this.minimumUpdatePeriodTimeout_);
3170
3171 var createMUPTimeout = function createMUPTimeout(mup) {
3172 _this8.minimumUpdatePeriodTimeout_ = window__default['default'].setTimeout(function () {
3173 _this8.trigger('minimumUpdatePeriod');
3174
3175 createMUPTimeout(mup);
3176 }, mup);
3177 };
3178
3179 var minimumUpdatePeriod = this.masterPlaylistLoader_.master && this.masterPlaylistLoader_.master.minimumUpdatePeriod;
3180
3181 if (minimumUpdatePeriod > 0) {
3182 createMUPTimeout(minimumUpdatePeriod); // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3183 // MPD has no future validity, so a new one will need to be acquired when new
3184 // media segments are to be made available. Thus, we use the target duration
3185 // in this case
3186 } else if (minimumUpdatePeriod === 0) {
3187 // If we haven't yet selected a playlist, wait until then so we know the
3188 // target duration
3189 if (!this.media()) {
3190 this.one('loadedplaylist', function () {
3191 createMUPTimeout(_this8.media().targetDuration * 1000);
3192 });
3193 } else {
3194 createMUPTimeout(this.media().targetDuration * 1000);
3195 }
3196 }
3197 }
3198 /**
3199 * Sends request to refresh the master xml and updates the parsed master manifest
3200 */
3201 ;
3202
3203 _proto.refreshXml_ = function refreshXml_() {
3204 var _this9 = this;
3205
3206 this.requestMaster_(function (req, masterChanged) {
3207 if (!masterChanged) {
3208 return;
3209 }
3210
3211 if (_this9.media_) {
3212 _this9.media_ = _this9.masterPlaylistLoader_.master.playlists[_this9.media_.id];
3213 } // This will filter out updated sidx info from the mapping
3214
3215
3216 _this9.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this9.masterPlaylistLoader_.master, _this9.masterPlaylistLoader_.sidxMapping_);
3217
3218 _this9.addSidxSegments_(_this9.media(), _this9.state, function (sidxChanged) {
3219 // TODO: do we need to reload the current playlist?
3220 _this9.refreshMedia_(_this9.media().id);
3221 });
3222 });
3223 }
3224 /**
3225 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3226 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3227 * from the master loader.
3228 */
3229 ;
3230
3231 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3232 var _this10 = this;
3233
3234 if (!mediaID) {
3235 throw new Error('refreshMedia_ must take a media id');
3236 } // for master we have to reparse the master xml
3237 // to re-create segments based on current timing values
3238 // which may change media. We only skip updating master
3239 // if this is the first time this.media_ is being set.
3240 // as master was just parsed in that case.
3241
3242
3243 if (this.media_ && this.isMaster_) {
3244 this.handleMaster_();
3245 }
3246
3247 var playlists = this.masterPlaylistLoader_.master.playlists;
3248 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3249
3250 if (mediaChanged) {
3251 this.media_ = playlists[mediaID];
3252 } else {
3253 this.trigger('playlistunchanged');
3254 }
3255
3256 if (!this.media().endList) {
3257 this.mediaUpdateTimeout = window__default['default'].setTimeout(function () {
3258 _this10.trigger('mediaupdatetimeout');
3259 }, refreshDelay(this.media(), Boolean(mediaChanged)));
3260 }
3261
3262 this.trigger('loadedplaylist');
3263 };
3264
3265 return DashPlaylistLoader;
3266}(EventTarget$1);
3267
3268var Config = {
3269 GOAL_BUFFER_LENGTH: 30,
3270 MAX_GOAL_BUFFER_LENGTH: 60,
3271 BACK_BUFFER_LENGTH: 30,
3272 GOAL_BUFFER_LENGTH_RATE: 1,
3273 // 0.5 MB/s
3274 INITIAL_BANDWIDTH: 4194304,
3275 // A fudge factor to apply to advertised playlist bitrates to account for
3276 // temporary flucations in client bandwidth
3277 BANDWIDTH_VARIANCE: 1.2,
3278 // How much of the buffer must be filled before we consider upswitching
3279 BUFFER_LOW_WATER_LINE: 0,
3280 MAX_BUFFER_LOW_WATER_LINE: 30,
3281 // TODO: Remove this when experimentalBufferBasedABR is removed
3282 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
3283 BUFFER_LOW_WATER_LINE_RATE: 1,
3284 // If the buffer is greater than the high water line, we won't switch down
3285 BUFFER_HIGH_WATER_LINE: 30
3286};
3287
3288var stringToArrayBuffer = function stringToArrayBuffer(string) {
3289 var view = new Uint8Array(new ArrayBuffer(string.length));
3290
3291 for (var i = 0; i < string.length; i++) {
3292 view[i] = string.charCodeAt(i);
3293 }
3294
3295 return view.buffer;
3296};
3297
3298/* global Blob, BlobBuilder, Worker */
3299// unify worker interface
3300var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
3301 // node only supports on/off
3302 workerObj.on = workerObj.addEventListener;
3303 workerObj.off = workerObj.removeEventListener;
3304 return workerObj;
3305};
3306
3307var createObjectURL = function createObjectURL(str) {
3308 try {
3309 return URL.createObjectURL(new Blob([str], {
3310 type: 'application/javascript'
3311 }));
3312 } catch (e) {
3313 var blob = new BlobBuilder();
3314 blob.append(str);
3315 return URL.createObjectURL(blob.getBlob());
3316 }
3317};
3318
3319var factory = function factory(code) {
3320 return function () {
3321 var objectUrl = createObjectURL(code);
3322 var worker = browserWorkerPolyFill(new Worker(objectUrl));
3323 worker.objURL = objectUrl;
3324 var terminate = worker.terminate;
3325 worker.on = worker.addEventListener;
3326 worker.off = worker.removeEventListener;
3327
3328 worker.terminate = function () {
3329 URL.revokeObjectURL(objectUrl);
3330 return terminate.call(this);
3331 };
3332
3333 return worker;
3334 };
3335};
3336var transform = function transform(code) {
3337 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
3338};
3339
3340var getWorkerString = function getWorkerString(fn) {
3341 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
3342};
3343
3344/* rollup-plugin-worker-factory start for worker!/Users/gkatsevman/p/http-streaming-release/src/transmuxer-worker.js */
3345var workerCode = transform(getWorkerString(function () {
3346 /**
3347 * mux.js
3348 *
3349 * Copyright (c) Brightcove
3350 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3351 *
3352 * A lightweight readable stream implemention that handles event dispatching.
3353 * Objects that inherit from streams should call init in their constructors.
3354 */
3355
3356 var Stream = function Stream() {
3357 this.init = function () {
3358 var listeners = {};
3359 /**
3360 * Add a listener for a specified event type.
3361 * @param type {string} the event name
3362 * @param listener {function} the callback to be invoked when an event of
3363 * the specified type occurs
3364 */
3365
3366 this.on = function (type, listener) {
3367 if (!listeners[type]) {
3368 listeners[type] = [];
3369 }
3370
3371 listeners[type] = listeners[type].concat(listener);
3372 };
3373 /**
3374 * Remove a listener for a specified event type.
3375 * @param type {string} the event name
3376 * @param listener {function} a function previously registered for this
3377 * type of event through `on`
3378 */
3379
3380
3381 this.off = function (type, listener) {
3382 var index;
3383
3384 if (!listeners[type]) {
3385 return false;
3386 }
3387
3388 index = listeners[type].indexOf(listener);
3389 listeners[type] = listeners[type].slice();
3390 listeners[type].splice(index, 1);
3391 return index > -1;
3392 };
3393 /**
3394 * Trigger an event of the specified type on this stream. Any additional
3395 * arguments to this function are passed as parameters to event listeners.
3396 * @param type {string} the event name
3397 */
3398
3399
3400 this.trigger = function (type) {
3401 var callbacks, i, length, args;
3402 callbacks = listeners[type];
3403
3404 if (!callbacks) {
3405 return;
3406 } // Slicing the arguments on every invocation of this method
3407 // can add a significant amount of overhead. Avoid the
3408 // intermediate object creation for the common case of a
3409 // single callback argument
3410
3411
3412 if (arguments.length === 2) {
3413 length = callbacks.length;
3414
3415 for (i = 0; i < length; ++i) {
3416 callbacks[i].call(this, arguments[1]);
3417 }
3418 } else {
3419 args = [];
3420 i = arguments.length;
3421
3422 for (i = 1; i < arguments.length; ++i) {
3423 args.push(arguments[i]);
3424 }
3425
3426 length = callbacks.length;
3427
3428 for (i = 0; i < length; ++i) {
3429 callbacks[i].apply(this, args);
3430 }
3431 }
3432 };
3433 /**
3434 * Destroys the stream and cleans up.
3435 */
3436
3437
3438 this.dispose = function () {
3439 listeners = {};
3440 };
3441 };
3442 };
3443 /**
3444 * Forwards all `data` events on this stream to the destination stream. The
3445 * destination stream should provide a method `push` to receive the data
3446 * events as they arrive.
3447 * @param destination {stream} the stream that will receive all `data` events
3448 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
3449 * when the current stream emits a 'done' event
3450 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
3451 */
3452
3453
3454 Stream.prototype.pipe = function (destination) {
3455 this.on('data', function (data) {
3456 destination.push(data);
3457 });
3458 this.on('done', function (flushSource) {
3459 destination.flush(flushSource);
3460 });
3461 this.on('partialdone', function (flushSource) {
3462 destination.partialFlush(flushSource);
3463 });
3464 this.on('endedtimeline', function (flushSource) {
3465 destination.endTimeline(flushSource);
3466 });
3467 this.on('reset', function (flushSource) {
3468 destination.reset(flushSource);
3469 });
3470 return destination;
3471 }; // Default stream functions that are expected to be overridden to perform
3472 // actual work. These are provided by the prototype as a sort of no-op
3473 // implementation so that we don't have to check for their existence in the
3474 // `pipe` function above.
3475
3476
3477 Stream.prototype.push = function (data) {
3478 this.trigger('data', data);
3479 };
3480
3481 Stream.prototype.flush = function (flushSource) {
3482 this.trigger('done', flushSource);
3483 };
3484
3485 Stream.prototype.partialFlush = function (flushSource) {
3486 this.trigger('partialdone', flushSource);
3487 };
3488
3489 Stream.prototype.endTimeline = function (flushSource) {
3490 this.trigger('endedtimeline', flushSource);
3491 };
3492
3493 Stream.prototype.reset = function (flushSource) {
3494 this.trigger('reset', flushSource);
3495 };
3496
3497 var stream = Stream;
3498 /**
3499 * mux.js
3500 *
3501 * Copyright (c) Brightcove
3502 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
3503 *
3504 * Functions that generate fragmented MP4s suitable for use with Media
3505 * Source Extensions.
3506 */
3507
3508 var UINT32_MAX = Math.pow(2, 32) - 1;
3509 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
3510
3511 (function () {
3512 var i;
3513 types = {
3514 avc1: [],
3515 // codingname
3516 avcC: [],
3517 btrt: [],
3518 dinf: [],
3519 dref: [],
3520 esds: [],
3521 ftyp: [],
3522 hdlr: [],
3523 mdat: [],
3524 mdhd: [],
3525 mdia: [],
3526 mfhd: [],
3527 minf: [],
3528 moof: [],
3529 moov: [],
3530 mp4a: [],
3531 // codingname
3532 mvex: [],
3533 mvhd: [],
3534 pasp: [],
3535 sdtp: [],
3536 smhd: [],
3537 stbl: [],
3538 stco: [],
3539 stsc: [],
3540 stsd: [],
3541 stsz: [],
3542 stts: [],
3543 styp: [],
3544 tfdt: [],
3545 tfhd: [],
3546 traf: [],
3547 trak: [],
3548 trun: [],
3549 trex: [],
3550 tkhd: [],
3551 vmhd: []
3552 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
3553 // don't throw an error
3554
3555 if (typeof Uint8Array === 'undefined') {
3556 return;
3557 }
3558
3559 for (i in types) {
3560 if (types.hasOwnProperty(i)) {
3561 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
3562 }
3563 }
3564
3565 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
3566 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
3567 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
3568 VIDEO_HDLR = new Uint8Array([0x00, // version 0
3569 0x00, 0x00, 0x00, // flags
3570 0x00, 0x00, 0x00, 0x00, // pre_defined
3571 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
3572 0x00, 0x00, 0x00, 0x00, // reserved
3573 0x00, 0x00, 0x00, 0x00, // reserved
3574 0x00, 0x00, 0x00, 0x00, // reserved
3575 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
3576 ]);
3577 AUDIO_HDLR = new Uint8Array([0x00, // version 0
3578 0x00, 0x00, 0x00, // flags
3579 0x00, 0x00, 0x00, 0x00, // pre_defined
3580 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
3581 0x00, 0x00, 0x00, 0x00, // reserved
3582 0x00, 0x00, 0x00, 0x00, // reserved
3583 0x00, 0x00, 0x00, 0x00, // reserved
3584 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
3585 ]);
3586 HDLR_TYPES = {
3587 video: VIDEO_HDLR,
3588 audio: AUDIO_HDLR
3589 };
3590 DREF = new Uint8Array([0x00, // version 0
3591 0x00, 0x00, 0x00, // flags
3592 0x00, 0x00, 0x00, 0x01, // entry_count
3593 0x00, 0x00, 0x00, 0x0c, // entry_size
3594 0x75, 0x72, 0x6c, 0x20, // 'url' type
3595 0x00, // version 0
3596 0x00, 0x00, 0x01 // entry_flags
3597 ]);
3598 SMHD = new Uint8Array([0x00, // version
3599 0x00, 0x00, 0x00, // flags
3600 0x00, 0x00, // balance, 0 means centered
3601 0x00, 0x00 // reserved
3602 ]);
3603 STCO = new Uint8Array([0x00, // version
3604 0x00, 0x00, 0x00, // flags
3605 0x00, 0x00, 0x00, 0x00 // entry_count
3606 ]);
3607 STSC = STCO;
3608 STSZ = new Uint8Array([0x00, // version
3609 0x00, 0x00, 0x00, // flags
3610 0x00, 0x00, 0x00, 0x00, // sample_size
3611 0x00, 0x00, 0x00, 0x00 // sample_count
3612 ]);
3613 STTS = STCO;
3614 VMHD = new Uint8Array([0x00, // version
3615 0x00, 0x00, 0x01, // flags
3616 0x00, 0x00, // graphicsmode
3617 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
3618 ]);
3619 })();
3620
3621 box = function box(type) {
3622 var payload = [],
3623 size = 0,
3624 i,
3625 result,
3626 view;
3627
3628 for (i = 1; i < arguments.length; i++) {
3629 payload.push(arguments[i]);
3630 }
3631
3632 i = payload.length; // calculate the total size we need to allocate
3633
3634 while (i--) {
3635 size += payload[i].byteLength;
3636 }
3637
3638 result = new Uint8Array(size + 8);
3639 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
3640 view.setUint32(0, result.byteLength);
3641 result.set(type, 4); // copy the payload into the result
3642
3643 for (i = 0, size = 8; i < payload.length; i++) {
3644 result.set(payload[i], size);
3645 size += payload[i].byteLength;
3646 }
3647
3648 return result;
3649 };
3650
3651 dinf = function dinf() {
3652 return box(types.dinf, box(types.dref, DREF));
3653 };
3654
3655 esds = function esds(track) {
3656 return box(types.esds, new Uint8Array([0x00, // version
3657 0x00, 0x00, 0x00, // flags
3658 // ES_Descriptor
3659 0x03, // tag, ES_DescrTag
3660 0x19, // length
3661 0x00, 0x00, // ES_ID
3662 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
3663 // DecoderConfigDescriptor
3664 0x04, // tag, DecoderConfigDescrTag
3665 0x11, // length
3666 0x40, // object type
3667 0x15, // streamType
3668 0x00, 0x06, 0x00, // bufferSizeDB
3669 0x00, 0x00, 0xda, 0xc0, // maxBitrate
3670 0x00, 0x00, 0xda, 0xc0, // avgBitrate
3671 // DecoderSpecificInfo
3672 0x05, // tag, DecoderSpecificInfoTag
3673 0x02, // length
3674 // ISO/IEC 14496-3, AudioSpecificConfig
3675 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
3676 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
3677 ]));
3678 };
3679
3680 ftyp = function ftyp() {
3681 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
3682 };
3683
3684 hdlr = function hdlr(type) {
3685 return box(types.hdlr, HDLR_TYPES[type]);
3686 };
3687
3688 mdat = function mdat(data) {
3689 return box(types.mdat, data);
3690 };
3691
3692 mdhd = function mdhd(track) {
3693 var result = new Uint8Array([0x00, // version 0
3694 0x00, 0x00, 0x00, // flags
3695 0x00, 0x00, 0x00, 0x02, // creation_time
3696 0x00, 0x00, 0x00, 0x03, // modification_time
3697 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
3698 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
3699 0x55, 0xc4, // 'und' language (undetermined)
3700 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
3701 // defined. The sample rate can be parsed out of an ADTS header, for
3702 // instance.
3703
3704 if (track.samplerate) {
3705 result[12] = track.samplerate >>> 24 & 0xFF;
3706 result[13] = track.samplerate >>> 16 & 0xFF;
3707 result[14] = track.samplerate >>> 8 & 0xFF;
3708 result[15] = track.samplerate & 0xFF;
3709 }
3710
3711 return box(types.mdhd, result);
3712 };
3713
3714 mdia = function mdia(track) {
3715 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
3716 };
3717
3718 mfhd = function mfhd(sequenceNumber) {
3719 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
3720 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
3721 ]));
3722 };
3723
3724 minf = function minf(track) {
3725 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
3726 };
3727
3728 moof = function moof(sequenceNumber, tracks) {
3729 var trackFragments = [],
3730 i = tracks.length; // build traf boxes for each track fragment
3731
3732 while (i--) {
3733 trackFragments[i] = traf(tracks[i]);
3734 }
3735
3736 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
3737 };
3738 /**
3739 * Returns a movie box.
3740 * @param tracks {array} the tracks associated with this movie
3741 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
3742 */
3743
3744
3745 moov = function moov(tracks) {
3746 var i = tracks.length,
3747 boxes = [];
3748
3749 while (i--) {
3750 boxes[i] = trak(tracks[i]);
3751 }
3752
3753 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
3754 };
3755
3756 mvex = function mvex(tracks) {
3757 var i = tracks.length,
3758 boxes = [];
3759
3760 while (i--) {
3761 boxes[i] = trex(tracks[i]);
3762 }
3763
3764 return box.apply(null, [types.mvex].concat(boxes));
3765 };
3766
3767 mvhd = function mvhd(duration) {
3768 var bytes = new Uint8Array([0x00, // version 0
3769 0x00, 0x00, 0x00, // flags
3770 0x00, 0x00, 0x00, 0x01, // creation_time
3771 0x00, 0x00, 0x00, 0x02, // modification_time
3772 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
3773 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
3774 0x00, 0x01, 0x00, 0x00, // 1.0 rate
3775 0x01, 0x00, // 1.0 volume
3776 0x00, 0x00, // reserved
3777 0x00, 0x00, 0x00, 0x00, // reserved
3778 0x00, 0x00, 0x00, 0x00, // reserved
3779 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
3780 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
3781 0xff, 0xff, 0xff, 0xff // next_track_ID
3782 ]);
3783 return box(types.mvhd, bytes);
3784 };
3785
3786 sdtp = function sdtp(track) {
3787 var samples = track.samples || [],
3788 bytes = new Uint8Array(4 + samples.length),
3789 flags,
3790 i; // leave the full box header (4 bytes) all zero
3791 // write the sample table
3792
3793 for (i = 0; i < samples.length; i++) {
3794 flags = samples[i].flags;
3795 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
3796 }
3797
3798 return box(types.sdtp, bytes);
3799 };
3800
3801 stbl = function stbl(track) {
3802 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
3803 };
3804
3805 (function () {
3806 var videoSample, audioSample;
3807
3808 stsd = function stsd(track) {
3809 return box(types.stsd, new Uint8Array([0x00, // version 0
3810 0x00, 0x00, 0x00, // flags
3811 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
3812 };
3813
3814 videoSample = function videoSample(track) {
3815 var sps = track.sps || [],
3816 pps = track.pps || [],
3817 sequenceParameterSets = [],
3818 pictureParameterSets = [],
3819 i,
3820 avc1Box; // assemble the SPSs
3821
3822 for (i = 0; i < sps.length; i++) {
3823 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
3824 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
3825
3826 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
3827 } // assemble the PPSs
3828
3829
3830 for (i = 0; i < pps.length; i++) {
3831 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
3832 pictureParameterSets.push(pps[i].byteLength & 0xFF);
3833 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
3834 }
3835
3836 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
3837 0x00, 0x01, // data_reference_index
3838 0x00, 0x00, // pre_defined
3839 0x00, 0x00, // reserved
3840 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
3841 (track.width & 0xff00) >> 8, track.width & 0xff, // width
3842 (track.height & 0xff00) >> 8, track.height & 0xff, // height
3843 0x00, 0x48, 0x00, 0x00, // horizresolution
3844 0x00, 0x48, 0x00, 0x00, // vertresolution
3845 0x00, 0x00, 0x00, 0x00, // reserved
3846 0x00, 0x01, // frame_count
3847 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
3848 0x00, 0x18, // depth = 24
3849 0x11, 0x11 // pre_defined = -1
3850 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
3851 track.profileIdc, // AVCProfileIndication
3852 track.profileCompatibility, // profile_compatibility
3853 track.levelIdc, // AVCLevelIndication
3854 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
3855 ].concat([sps.length], // numOfSequenceParameterSets
3856 sequenceParameterSets, // "SPS"
3857 [pps.length], // numOfPictureParameterSets
3858 pictureParameterSets // "PPS"
3859 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
3860 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
3861 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
3862 ]))];
3863
3864 if (track.sarRatio) {
3865 var hSpacing = track.sarRatio[0],
3866 vSpacing = track.sarRatio[1];
3867 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
3868 }
3869
3870 return box.apply(null, avc1Box);
3871 };
3872
3873 audioSample = function audioSample(track) {
3874 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
3875 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
3876 0x00, 0x01, // data_reference_index
3877 // AudioSampleEntry, ISO/IEC 14496-12
3878 0x00, 0x00, 0x00, 0x00, // reserved
3879 0x00, 0x00, 0x00, 0x00, // reserved
3880 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
3881 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
3882 0x00, 0x00, // pre_defined
3883 0x00, 0x00, // reserved
3884 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
3885 // MP4AudioSampleEntry, ISO/IEC 14496-14
3886 ]), esds(track));
3887 };
3888 })();
3889
3890 tkhd = function tkhd(track) {
3891 var result = new Uint8Array([0x00, // version 0
3892 0x00, 0x00, 0x07, // flags
3893 0x00, 0x00, 0x00, 0x00, // creation_time
3894 0x00, 0x00, 0x00, 0x00, // modification_time
3895 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
3896 0x00, 0x00, 0x00, 0x00, // reserved
3897 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
3898 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
3899 0x00, 0x00, // layer
3900 0x00, 0x00, // alternate_group
3901 0x01, 0x00, // non-audio track volume
3902 0x00, 0x00, // reserved
3903 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
3904 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
3905 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
3906 ]);
3907 return box(types.tkhd, result);
3908 };
3909 /**
3910 * Generate a track fragment (traf) box. A traf box collects metadata
3911 * about tracks in a movie fragment (moof) box.
3912 */
3913
3914
3915 traf = function traf(track) {
3916 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
3917 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
3918 0x00, 0x00, 0x3a, // flags
3919 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
3920 0x00, 0x00, 0x00, 0x01, // sample_description_index
3921 0x00, 0x00, 0x00, 0x00, // default_sample_duration
3922 0x00, 0x00, 0x00, 0x00, // default_sample_size
3923 0x00, 0x00, 0x00, 0x00 // default_sample_flags
3924 ]));
3925 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
3926 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
3927 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
3928 0x00, 0x00, 0x00, // flags
3929 // baseMediaDecodeTime
3930 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
3931 // the containing moof to the first payload byte of the associated
3932 // mdat
3933
3934 dataOffset = 32 + // tfhd
3935 20 + // tfdt
3936 8 + // traf header
3937 16 + // mfhd
3938 8 + // moof header
3939 8; // mdat header
3940 // audio tracks require less metadata
3941
3942 if (track.type === 'audio') {
3943 trackFragmentRun = trun(track, dataOffset);
3944 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
3945 } // video tracks should contain an independent and disposable samples
3946 // box (sdtp)
3947 // generate one and adjust offsets to match
3948
3949
3950 sampleDependencyTable = sdtp(track);
3951 trackFragmentRun = trun(track, sampleDependencyTable.length + dataOffset);
3952 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
3953 };
3954 /**
3955 * Generate a track box.
3956 * @param track {object} a track definition
3957 * @return {Uint8Array} the track box
3958 */
3959
3960
3961 trak = function trak(track) {
3962 track.duration = track.duration || 0xffffffff;
3963 return box(types.trak, tkhd(track), mdia(track));
3964 };
3965
3966 trex = function trex(track) {
3967 var result = new Uint8Array([0x00, // version 0
3968 0x00, 0x00, 0x00, // flags
3969 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
3970 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
3971 0x00, 0x00, 0x00, 0x00, // default_sample_duration
3972 0x00, 0x00, 0x00, 0x00, // default_sample_size
3973 0x00, 0x01, 0x00, 0x01 // default_sample_flags
3974 ]); // the last two bytes of default_sample_flags is the sample
3975 // degradation priority, a hint about the importance of this sample
3976 // relative to others. Lower the degradation priority for all sample
3977 // types other than video.
3978
3979 if (track.type !== 'video') {
3980 result[result.length - 1] = 0x00;
3981 }
3982
3983 return box(types.trex, result);
3984 };
3985
3986 (function () {
3987 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
3988 // duration is present for the first sample, it will be present for
3989 // all subsequent samples.
3990 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
3991
3992 trunHeader = function trunHeader(samples, offset) {
3993 var durationPresent = 0,
3994 sizePresent = 0,
3995 flagsPresent = 0,
3996 compositionTimeOffset = 0; // trun flag constants
3997
3998 if (samples.length) {
3999 if (samples[0].duration !== undefined) {
4000 durationPresent = 0x1;
4001 }
4002
4003 if (samples[0].size !== undefined) {
4004 sizePresent = 0x2;
4005 }
4006
4007 if (samples[0].flags !== undefined) {
4008 flagsPresent = 0x4;
4009 }
4010
4011 if (samples[0].compositionTimeOffset !== undefined) {
4012 compositionTimeOffset = 0x8;
4013 }
4014 }
4015
4016 return [0x00, // version 0
4017 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4018 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4019 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4020 ];
4021 };
4022
4023 videoTrun = function videoTrun(track, offset) {
4024 var bytesOffest, bytes, header, samples, sample, i;
4025 samples = track.samples || [];
4026 offset += 8 + 12 + 16 * samples.length;
4027 header = trunHeader(samples, offset);
4028 bytes = new Uint8Array(header.length + samples.length * 16);
4029 bytes.set(header);
4030 bytesOffest = header.length;
4031
4032 for (i = 0; i < samples.length; i++) {
4033 sample = samples[i];
4034 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4035 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4036 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4037 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4038
4039 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4040 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4041 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4042 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4043
4044 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4045 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4046 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4047 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4048
4049 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4050 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4051 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4052 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4053 }
4054
4055 return box(types.trun, bytes);
4056 };
4057
4058 audioTrun = function audioTrun(track, offset) {
4059 var bytes, bytesOffest, header, samples, sample, i;
4060 samples = track.samples || [];
4061 offset += 8 + 12 + 8 * samples.length;
4062 header = trunHeader(samples, offset);
4063 bytes = new Uint8Array(header.length + samples.length * 8);
4064 bytes.set(header);
4065 bytesOffest = header.length;
4066
4067 for (i = 0; i < samples.length; i++) {
4068 sample = samples[i];
4069 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4070 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4071 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4072 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4073
4074 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4075 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4076 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4077 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4078 }
4079
4080 return box(types.trun, bytes);
4081 };
4082
4083 trun = function trun(track, offset) {
4084 if (track.type === 'audio') {
4085 return audioTrun(track, offset);
4086 }
4087
4088 return videoTrun(track, offset);
4089 };
4090 })();
4091
4092 var mp4Generator = {
4093 ftyp: ftyp,
4094 mdat: mdat,
4095 moof: moof,
4096 moov: moov,
4097 initSegment: function initSegment(tracks) {
4098 var fileType = ftyp(),
4099 movie = moov(tracks),
4100 result;
4101 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4102 result.set(fileType);
4103 result.set(movie, fileType.byteLength);
4104 return result;
4105 }
4106 };
4107 /**
4108 * mux.js
4109 *
4110 * Copyright (c) Brightcove
4111 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4112 */
4113 // Convert an array of nal units into an array of frames with each frame being
4114 // composed of the nal units that make up that frame
4115 // Also keep track of cummulative data about the frame from the nal units such
4116 // as the frame duration, starting pts, etc.
4117
4118 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4119 var i,
4120 currentNal,
4121 currentFrame = [],
4122 frames = []; // TODO added for LHLS, make sure this is OK
4123
4124 frames.byteLength = 0;
4125 frames.nalCount = 0;
4126 frames.duration = 0;
4127 currentFrame.byteLength = 0;
4128
4129 for (i = 0; i < nalUnits.length; i++) {
4130 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4131
4132 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4133 // Since the very first nal unit is expected to be an AUD
4134 // only push to the frames array when currentFrame is not empty
4135 if (currentFrame.length) {
4136 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4137
4138 frames.byteLength += currentFrame.byteLength;
4139 frames.nalCount += currentFrame.length;
4140 frames.duration += currentFrame.duration;
4141 frames.push(currentFrame);
4142 }
4143
4144 currentFrame = [currentNal];
4145 currentFrame.byteLength = currentNal.data.byteLength;
4146 currentFrame.pts = currentNal.pts;
4147 currentFrame.dts = currentNal.dts;
4148 } else {
4149 // Specifically flag key frames for ease of use later
4150 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4151 currentFrame.keyFrame = true;
4152 }
4153
4154 currentFrame.duration = currentNal.dts - currentFrame.dts;
4155 currentFrame.byteLength += currentNal.data.byteLength;
4156 currentFrame.push(currentNal);
4157 }
4158 } // For the last frame, use the duration of the previous frame if we
4159 // have nothing better to go on
4160
4161
4162 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4163 currentFrame.duration = frames[frames.length - 1].duration;
4164 } // Push the final frame
4165 // TODO added for LHLS, make sure this is OK
4166
4167
4168 frames.byteLength += currentFrame.byteLength;
4169 frames.nalCount += currentFrame.length;
4170 frames.duration += currentFrame.duration;
4171 frames.push(currentFrame);
4172 return frames;
4173 }; // Convert an array of frames into an array of Gop with each Gop being composed
4174 // of the frames that make up that Gop
4175 // Also keep track of cummulative data about the Gop from the frames such as the
4176 // Gop duration, starting pts, etc.
4177
4178
4179 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4180 var i,
4181 currentFrame,
4182 currentGop = [],
4183 gops = []; // We must pre-set some of the values on the Gop since we
4184 // keep running totals of these values
4185
4186 currentGop.byteLength = 0;
4187 currentGop.nalCount = 0;
4188 currentGop.duration = 0;
4189 currentGop.pts = frames[0].pts;
4190 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4191
4192 gops.byteLength = 0;
4193 gops.nalCount = 0;
4194 gops.duration = 0;
4195 gops.pts = frames[0].pts;
4196 gops.dts = frames[0].dts;
4197
4198 for (i = 0; i < frames.length; i++) {
4199 currentFrame = frames[i];
4200
4201 if (currentFrame.keyFrame) {
4202 // Since the very first frame is expected to be an keyframe
4203 // only push to the gops array when currentGop is not empty
4204 if (currentGop.length) {
4205 gops.push(currentGop);
4206 gops.byteLength += currentGop.byteLength;
4207 gops.nalCount += currentGop.nalCount;
4208 gops.duration += currentGop.duration;
4209 }
4210
4211 currentGop = [currentFrame];
4212 currentGop.nalCount = currentFrame.length;
4213 currentGop.byteLength = currentFrame.byteLength;
4214 currentGop.pts = currentFrame.pts;
4215 currentGop.dts = currentFrame.dts;
4216 currentGop.duration = currentFrame.duration;
4217 } else {
4218 currentGop.duration += currentFrame.duration;
4219 currentGop.nalCount += currentFrame.length;
4220 currentGop.byteLength += currentFrame.byteLength;
4221 currentGop.push(currentFrame);
4222 }
4223 }
4224
4225 if (gops.length && currentGop.duration <= 0) {
4226 currentGop.duration = gops[gops.length - 1].duration;
4227 }
4228
4229 gops.byteLength += currentGop.byteLength;
4230 gops.nalCount += currentGop.nalCount;
4231 gops.duration += currentGop.duration; // push the final Gop
4232
4233 gops.push(currentGop);
4234 return gops;
4235 };
4236 /*
4237 * Search for the first keyframe in the GOPs and throw away all frames
4238 * until that keyframe. Then extend the duration of the pulled keyframe
4239 * and pull the PTS and DTS of the keyframe so that it covers the time
4240 * range of the frames that were disposed.
4241 *
4242 * @param {Array} gops video GOPs
4243 * @returns {Array} modified video GOPs
4244 */
4245
4246
4247 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
4248 var currentGop;
4249
4250 if (!gops[0][0].keyFrame && gops.length > 1) {
4251 // Remove the first GOP
4252 currentGop = gops.shift();
4253 gops.byteLength -= currentGop.byteLength;
4254 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
4255 // first gop to cover the time period of the
4256 // frames we just removed
4257
4258 gops[0][0].dts = currentGop.dts;
4259 gops[0][0].pts = currentGop.pts;
4260 gops[0][0].duration += currentGop.duration;
4261 }
4262
4263 return gops;
4264 };
4265 /**
4266 * Default sample object
4267 * see ISO/IEC 14496-12:2012, section 8.6.4.3
4268 */
4269
4270
4271 var createDefaultSample = function createDefaultSample() {
4272 return {
4273 size: 0,
4274 flags: {
4275 isLeading: 0,
4276 dependsOn: 1,
4277 isDependedOn: 0,
4278 hasRedundancy: 0,
4279 degradationPriority: 0,
4280 isNonSyncSample: 1
4281 }
4282 };
4283 };
4284 /*
4285 * Collates information from a video frame into an object for eventual
4286 * entry into an MP4 sample table.
4287 *
4288 * @param {Object} frame the video frame
4289 * @param {Number} dataOffset the byte offset to position the sample
4290 * @return {Object} object containing sample table info for a frame
4291 */
4292
4293
4294 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
4295 var sample = createDefaultSample();
4296 sample.dataOffset = dataOffset;
4297 sample.compositionTimeOffset = frame.pts - frame.dts;
4298 sample.duration = frame.duration;
4299 sample.size = 4 * frame.length; // Space for nal unit size
4300
4301 sample.size += frame.byteLength;
4302
4303 if (frame.keyFrame) {
4304 sample.flags.dependsOn = 2;
4305 sample.flags.isNonSyncSample = 0;
4306 }
4307
4308 return sample;
4309 }; // generate the track's sample table from an array of gops
4310
4311
4312 var generateSampleTable = function generateSampleTable(gops, baseDataOffset) {
4313 var h,
4314 i,
4315 sample,
4316 currentGop,
4317 currentFrame,
4318 dataOffset = baseDataOffset || 0,
4319 samples = [];
4320
4321 for (h = 0; h < gops.length; h++) {
4322 currentGop = gops[h];
4323
4324 for (i = 0; i < currentGop.length; i++) {
4325 currentFrame = currentGop[i];
4326 sample = sampleForFrame(currentFrame, dataOffset);
4327 dataOffset += sample.size;
4328 samples.push(sample);
4329 }
4330 }
4331
4332 return samples;
4333 }; // generate the track's raw mdat data from an array of gops
4334
4335
4336 var concatenateNalData = function concatenateNalData(gops) {
4337 var h,
4338 i,
4339 j,
4340 currentGop,
4341 currentFrame,
4342 currentNal,
4343 dataOffset = 0,
4344 nalsByteLength = gops.byteLength,
4345 numberOfNals = gops.nalCount,
4346 totalByteLength = nalsByteLength + 4 * numberOfNals,
4347 data = new Uint8Array(totalByteLength),
4348 view = new DataView(data.buffer); // For each Gop..
4349
4350 for (h = 0; h < gops.length; h++) {
4351 currentGop = gops[h]; // For each Frame..
4352
4353 for (i = 0; i < currentGop.length; i++) {
4354 currentFrame = currentGop[i]; // For each NAL..
4355
4356 for (j = 0; j < currentFrame.length; j++) {
4357 currentNal = currentFrame[j];
4358 view.setUint32(dataOffset, currentNal.data.byteLength);
4359 dataOffset += 4;
4360 data.set(currentNal.data, dataOffset);
4361 dataOffset += currentNal.data.byteLength;
4362 }
4363 }
4364 }
4365
4366 return data;
4367 }; // generate the track's sample table from a frame
4368
4369
4370 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
4371 var sample,
4372 dataOffset = baseDataOffset || 0,
4373 samples = [];
4374 sample = sampleForFrame(frame, dataOffset);
4375 samples.push(sample);
4376 return samples;
4377 }; // generate the track's raw mdat data from a frame
4378
4379
4380 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
4381 var i,
4382 currentNal,
4383 dataOffset = 0,
4384 nalsByteLength = frame.byteLength,
4385 numberOfNals = frame.length,
4386 totalByteLength = nalsByteLength + 4 * numberOfNals,
4387 data = new Uint8Array(totalByteLength),
4388 view = new DataView(data.buffer); // For each NAL..
4389
4390 for (i = 0; i < frame.length; i++) {
4391 currentNal = frame[i];
4392 view.setUint32(dataOffset, currentNal.data.byteLength);
4393 dataOffset += 4;
4394 data.set(currentNal.data, dataOffset);
4395 dataOffset += currentNal.data.byteLength;
4396 }
4397
4398 return data;
4399 };
4400
4401 var frameUtils = {
4402 groupNalsIntoFrames: groupNalsIntoFrames,
4403 groupFramesIntoGops: groupFramesIntoGops,
4404 extendFirstKeyFrame: extendFirstKeyFrame,
4405 generateSampleTable: generateSampleTable,
4406 concatenateNalData: concatenateNalData,
4407 generateSampleTableForFrame: generateSampleTableForFrame,
4408 concatenateNalDataForFrame: concatenateNalDataForFrame
4409 };
4410 /**
4411 * mux.js
4412 *
4413 * Copyright (c) Brightcove
4414 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4415 */
4416
4417 var highPrefix = [33, 16, 5, 32, 164, 27];
4418 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
4419
4420 var zeroFill = function zeroFill(count) {
4421 var a = [];
4422
4423 while (count--) {
4424 a.push(0);
4425 }
4426
4427 return a;
4428 };
4429
4430 var makeTable = function makeTable(metaTable) {
4431 return Object.keys(metaTable).reduce(function (obj, key) {
4432 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
4433 return arr.concat(part);
4434 }, []));
4435 return obj;
4436 }, {});
4437 };
4438
4439 var silence;
4440
4441 var silence_1 = function silence_1() {
4442 if (!silence) {
4443 // Frames-of-silence to use for filling in missing AAC frames
4444 var coneOfSilence = {
4445 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
4446 88200: [highPrefix, [231], zeroFill(170), [56]],
4447 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
4448 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
4449 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
4450 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
4451 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
4452 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
4453 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
4454 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
4455 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
4456 };
4457 silence = makeTable(coneOfSilence);
4458 }
4459
4460 return silence;
4461 };
4462 /**
4463 * mux.js
4464 *
4465 * Copyright (c) Brightcove
4466 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4467 */
4468
4469
4470 var ONE_SECOND_IN_TS = 90000,
4471 // 90kHz clock
4472 secondsToVideoTs,
4473 secondsToAudioTs,
4474 videoTsToSeconds,
4475 audioTsToSeconds,
4476 audioTsToVideoTs,
4477 videoTsToAudioTs,
4478 metadataTsToSeconds;
4479
4480 secondsToVideoTs = function secondsToVideoTs(seconds) {
4481 return seconds * ONE_SECOND_IN_TS;
4482 };
4483
4484 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
4485 return seconds * sampleRate;
4486 };
4487
4488 videoTsToSeconds = function videoTsToSeconds(timestamp) {
4489 return timestamp / ONE_SECOND_IN_TS;
4490 };
4491
4492 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
4493 return timestamp / sampleRate;
4494 };
4495
4496 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
4497 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
4498 };
4499
4500 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
4501 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
4502 };
4503 /**
4504 * Adjust ID3 tag or caption timing information by the timeline pts values
4505 * (if keepOriginalTimestamps is false) and convert to seconds
4506 */
4507
4508
4509 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
4510 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
4511 };
4512
4513 var clock = {
4514 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
4515 secondsToVideoTs: secondsToVideoTs,
4516 secondsToAudioTs: secondsToAudioTs,
4517 videoTsToSeconds: videoTsToSeconds,
4518 audioTsToSeconds: audioTsToSeconds,
4519 audioTsToVideoTs: audioTsToVideoTs,
4520 videoTsToAudioTs: videoTsToAudioTs,
4521 metadataTsToSeconds: metadataTsToSeconds
4522 };
4523 /**
4524 * mux.js
4525 *
4526 * Copyright (c) Brightcove
4527 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4528 */
4529
4530 /**
4531 * Sum the `byteLength` properties of the data in each AAC frame
4532 */
4533
4534 var sumFrameByteLengths = function sumFrameByteLengths(array) {
4535 var i,
4536 currentObj,
4537 sum = 0; // sum the byteLength's all each nal unit in the frame
4538
4539 for (i = 0; i < array.length; i++) {
4540 currentObj = array[i];
4541 sum += currentObj.data.byteLength;
4542 }
4543
4544 return sum;
4545 }; // Possibly pad (prefix) the audio track with silence if appending this track
4546 // would lead to the introduction of a gap in the audio buffer
4547
4548
4549 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
4550 var baseMediaDecodeTimeTs,
4551 frameDuration = 0,
4552 audioGapDuration = 0,
4553 audioFillFrameCount = 0,
4554 audioFillDuration = 0,
4555 silentFrame,
4556 i,
4557 firstFrame;
4558
4559 if (!frames.length) {
4560 return;
4561 }
4562
4563 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
4564
4565 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
4566
4567 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
4568 // insert the shortest possible amount (audio gap or audio to video gap)
4569 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
4570
4571 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
4572 audioFillDuration = audioFillFrameCount * frameDuration;
4573 } // don't attempt to fill gaps smaller than a single frame or larger
4574 // than a half second
4575
4576
4577 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
4578 return;
4579 }
4580
4581 silentFrame = silence_1()[track.samplerate];
4582
4583 if (!silentFrame) {
4584 // we don't have a silent frame pregenerated for the sample rate, so use a frame
4585 // from the content instead
4586 silentFrame = frames[0].data;
4587 }
4588
4589 for (i = 0; i < audioFillFrameCount; i++) {
4590 firstFrame = frames[0];
4591 frames.splice(0, 0, {
4592 data: silentFrame,
4593 dts: firstFrame.dts - frameDuration,
4594 pts: firstFrame.pts - frameDuration
4595 });
4596 }
4597
4598 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
4599 return audioFillDuration;
4600 }; // If the audio segment extends before the earliest allowed dts
4601 // value, remove AAC frames until starts at or after the earliest
4602 // allowed DTS so that we don't end up with a negative baseMedia-
4603 // DecodeTime for the audio track
4604
4605
4606 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
4607 if (track.minSegmentDts >= earliestAllowedDts) {
4608 return adtsFrames;
4609 } // We will need to recalculate the earliest segment Dts
4610
4611
4612 track.minSegmentDts = Infinity;
4613 return adtsFrames.filter(function (currentFrame) {
4614 // If this is an allowed frame, keep it and record it's Dts
4615 if (currentFrame.dts >= earliestAllowedDts) {
4616 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
4617 track.minSegmentPts = track.minSegmentDts;
4618 return true;
4619 } // Otherwise, discard it
4620
4621
4622 return false;
4623 });
4624 }; // generate the track's raw mdat data from an array of frames
4625
4626
4627 var generateSampleTable$1 = function generateSampleTable(frames) {
4628 var i,
4629 currentFrame,
4630 samples = [];
4631
4632 for (i = 0; i < frames.length; i++) {
4633 currentFrame = frames[i];
4634 samples.push({
4635 size: currentFrame.data.byteLength,
4636 duration: 1024 // For AAC audio, all samples contain 1024 samples
4637
4638 });
4639 }
4640
4641 return samples;
4642 }; // generate the track's sample table from an array of frames
4643
4644
4645 var concatenateFrameData = function concatenateFrameData(frames) {
4646 var i,
4647 currentFrame,
4648 dataOffset = 0,
4649 data = new Uint8Array(sumFrameByteLengths(frames));
4650
4651 for (i = 0; i < frames.length; i++) {
4652 currentFrame = frames[i];
4653 data.set(currentFrame.data, dataOffset);
4654 dataOffset += currentFrame.data.byteLength;
4655 }
4656
4657 return data;
4658 };
4659
4660 var audioFrameUtils = {
4661 prefixWithSilence: prefixWithSilence,
4662 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
4663 generateSampleTable: generateSampleTable$1,
4664 concatenateFrameData: concatenateFrameData
4665 };
4666 /**
4667 * mux.js
4668 *
4669 * Copyright (c) Brightcove
4670 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4671 */
4672
4673 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS;
4674 /**
4675 * Store information about the start and end of the track and the
4676 * duration for each frame/sample we process in order to calculate
4677 * the baseMediaDecodeTime
4678 */
4679
4680 var collectDtsInfo = function collectDtsInfo(track, data) {
4681 if (typeof data.pts === 'number') {
4682 if (track.timelineStartInfo.pts === undefined) {
4683 track.timelineStartInfo.pts = data.pts;
4684 }
4685
4686 if (track.minSegmentPts === undefined) {
4687 track.minSegmentPts = data.pts;
4688 } else {
4689 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
4690 }
4691
4692 if (track.maxSegmentPts === undefined) {
4693 track.maxSegmentPts = data.pts;
4694 } else {
4695 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
4696 }
4697 }
4698
4699 if (typeof data.dts === 'number') {
4700 if (track.timelineStartInfo.dts === undefined) {
4701 track.timelineStartInfo.dts = data.dts;
4702 }
4703
4704 if (track.minSegmentDts === undefined) {
4705 track.minSegmentDts = data.dts;
4706 } else {
4707 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
4708 }
4709
4710 if (track.maxSegmentDts === undefined) {
4711 track.maxSegmentDts = data.dts;
4712 } else {
4713 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
4714 }
4715 }
4716 };
4717 /**
4718 * Clear values used to calculate the baseMediaDecodeTime between
4719 * tracks
4720 */
4721
4722
4723 var clearDtsInfo = function clearDtsInfo(track) {
4724 delete track.minSegmentDts;
4725 delete track.maxSegmentDts;
4726 delete track.minSegmentPts;
4727 delete track.maxSegmentPts;
4728 };
4729 /**
4730 * Calculate the track's baseMediaDecodeTime based on the earliest
4731 * DTS the transmuxer has ever seen and the minimum DTS for the
4732 * current track
4733 * @param track {object} track metadata configuration
4734 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
4735 * in the source; false to adjust the first segment to start at 0.
4736 */
4737
4738
4739 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
4740 var baseMediaDecodeTime,
4741 scale,
4742 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
4743
4744 if (!keepOriginalTimestamps) {
4745 minSegmentDts -= track.timelineStartInfo.dts;
4746 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
4747 // we want the start of the first segment to be placed
4748
4749
4750 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
4751
4752 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
4753
4754 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
4755
4756 if (track.type === 'audio') {
4757 // Audio has a different clock equal to the sampling_rate so we need to
4758 // scale the PTS values into the clock rate of the track
4759 scale = track.samplerate / ONE_SECOND_IN_TS$1;
4760 baseMediaDecodeTime *= scale;
4761 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
4762 }
4763
4764 return baseMediaDecodeTime;
4765 };
4766
4767 var trackDecodeInfo = {
4768 clearDtsInfo: clearDtsInfo,
4769 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
4770 collectDtsInfo: collectDtsInfo
4771 };
4772 /**
4773 * mux.js
4774 *
4775 * Copyright (c) Brightcove
4776 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4777 *
4778 * Reads in-band caption information from a video elementary
4779 * stream. Captions must follow the CEA-708 standard for injection
4780 * into an MPEG-2 transport streams.
4781 * @see https://en.wikipedia.org/wiki/CEA-708
4782 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
4783 */
4784 // payload type field to indicate how they are to be
4785 // interpreted. CEAS-708 caption content is always transmitted with
4786 // payload type 0x04.
4787
4788 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
4789 RBSP_TRAILING_BITS = 128;
4790 /**
4791 * Parse a supplemental enhancement information (SEI) NAL unit.
4792 * Stops parsing once a message of type ITU T T35 has been found.
4793 *
4794 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
4795 * @return {object} the parsed SEI payload
4796 * @see Rec. ITU-T H.264, 7.3.2.3.1
4797 */
4798
4799 var parseSei = function parseSei(bytes) {
4800 var i = 0,
4801 result = {
4802 payloadType: -1,
4803 payloadSize: 0
4804 },
4805 payloadType = 0,
4806 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
4807
4808 while (i < bytes.byteLength) {
4809 // stop once we have hit the end of the sei_rbsp
4810 if (bytes[i] === RBSP_TRAILING_BITS) {
4811 break;
4812 } // Parse payload type
4813
4814
4815 while (bytes[i] === 0xFF) {
4816 payloadType += 255;
4817 i++;
4818 }
4819
4820 payloadType += bytes[i++]; // Parse payload size
4821
4822 while (bytes[i] === 0xFF) {
4823 payloadSize += 255;
4824 i++;
4825 }
4826
4827 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
4828 // there can only ever be one caption message in a frame's sei
4829
4830 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
4831 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
4832
4833 if (userIdentifier === 'GA94') {
4834 result.payloadType = payloadType;
4835 result.payloadSize = payloadSize;
4836 result.payload = bytes.subarray(i, i + payloadSize);
4837 break;
4838 } else {
4839 result.payload = void 0;
4840 }
4841 } // skip the payload and parse the next message
4842
4843
4844 i += payloadSize;
4845 payloadType = 0;
4846 payloadSize = 0;
4847 }
4848
4849 return result;
4850 }; // see ANSI/SCTE 128-1 (2013), section 8.1
4851
4852
4853 var parseUserData = function parseUserData(sei) {
4854 // itu_t_t35_contry_code must be 181 (United States) for
4855 // captions
4856 if (sei.payload[0] !== 181) {
4857 return null;
4858 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
4859
4860
4861 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
4862 return null;
4863 } // the user_identifier should be "GA94" to indicate ATSC1 data
4864
4865
4866 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
4867 return null;
4868 } // finally, user_data_type_code should be 0x03 for caption data
4869
4870
4871 if (sei.payload[7] !== 0x03) {
4872 return null;
4873 } // return the user_data_type_structure and strip the trailing
4874 // marker bits
4875
4876
4877 return sei.payload.subarray(8, sei.payload.length - 1);
4878 }; // see CEA-708-D, section 4.4
4879
4880
4881 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
4882 var results = [],
4883 i,
4884 count,
4885 offset,
4886 data; // if this is just filler, return immediately
4887
4888 if (!(userData[0] & 0x40)) {
4889 return results;
4890 } // parse out the cc_data_1 and cc_data_2 fields
4891
4892
4893 count = userData[0] & 0x1f;
4894
4895 for (i = 0; i < count; i++) {
4896 offset = i * 3;
4897 data = {
4898 type: userData[offset + 2] & 0x03,
4899 pts: pts
4900 }; // capture cc data when cc_valid is 1
4901
4902 if (userData[offset + 2] & 0x04) {
4903 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
4904 results.push(data);
4905 }
4906 }
4907
4908 return results;
4909 };
4910
4911 var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
4912 var length = data.byteLength,
4913 emulationPreventionBytesPositions = [],
4914 i = 1,
4915 newLength,
4916 newData; // Find all `Emulation Prevention Bytes`
4917
4918 while (i < length - 2) {
4919 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
4920 emulationPreventionBytesPositions.push(i + 2);
4921 i += 2;
4922 } else {
4923 i++;
4924 }
4925 } // If no Emulation Prevention Bytes were found just return the original
4926 // array
4927
4928
4929 if (emulationPreventionBytesPositions.length === 0) {
4930 return data;
4931 } // Create a new array to hold the NAL unit data
4932
4933
4934 newLength = length - emulationPreventionBytesPositions.length;
4935 newData = new Uint8Array(newLength);
4936 var sourceIndex = 0;
4937
4938 for (i = 0; i < newLength; sourceIndex++, i++) {
4939 if (sourceIndex === emulationPreventionBytesPositions[0]) {
4940 // Skip this byte
4941 sourceIndex++; // Remove this position index
4942
4943 emulationPreventionBytesPositions.shift();
4944 }
4945
4946 newData[i] = data[sourceIndex];
4947 }
4948
4949 return newData;
4950 }; // exports
4951
4952
4953 var captionPacketParser = {
4954 parseSei: parseSei,
4955 parseUserData: parseUserData,
4956 parseCaptionPackets: parseCaptionPackets,
4957 discardEmulationPreventionBytes: discardEmulationPreventionBytes,
4958 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
4959 }; // Link To Transport
4960 // -----------------
4961
4962 var CaptionStream = function CaptionStream(options) {
4963 options = options || {};
4964 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
4965
4966 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
4967 this.captionPackets_ = [];
4968 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
4969 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
4970 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
4971 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
4972 ];
4973
4974 if (this.parse708captions_) {
4975 this.cc708Stream_ = new Cea708Stream(); // eslint-disable-line no-use-before-define
4976 }
4977
4978 this.reset(); // forward data and done events from CCs to this CaptionStream
4979
4980 this.ccStreams_.forEach(function (cc) {
4981 cc.on('data', this.trigger.bind(this, 'data'));
4982 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
4983 cc.on('done', this.trigger.bind(this, 'done'));
4984 }, this);
4985
4986 if (this.parse708captions_) {
4987 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
4988 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
4989 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
4990 }
4991 };
4992
4993 CaptionStream.prototype = new stream();
4994
4995 CaptionStream.prototype.push = function (event) {
4996 var sei, userData, newCaptionPackets; // only examine SEI NALs
4997
4998 if (event.nalUnitType !== 'sei_rbsp') {
4999 return;
5000 } // parse the sei
5001
5002
5003 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5004
5005 if (!sei.payload) {
5006 return;
5007 } // ignore everything but user_data_registered_itu_t_t35
5008
5009
5010 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5011 return;
5012 } // parse out the user data payload
5013
5014
5015 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5016
5017 if (!userData) {
5018 return;
5019 } // Sometimes, the same segment # will be downloaded twice. To stop the
5020 // caption data from being processed twice, we track the latest dts we've
5021 // received and ignore everything with a dts before that. However, since
5022 // data for a specific dts can be split across packets on either side of
5023 // a segment boundary, we need to make sure we *don't* ignore the packets
5024 // from the *next* segment that have dts === this.latestDts_. By constantly
5025 // tracking the number of packets received with dts === this.latestDts_, we
5026 // know how many should be ignored once we start receiving duplicates.
5027
5028
5029 if (event.dts < this.latestDts_) {
5030 // We've started getting older data, so set the flag.
5031 this.ignoreNextEqualDts_ = true;
5032 return;
5033 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5034 this.numSameDts_--;
5035
5036 if (!this.numSameDts_) {
5037 // We've received the last duplicate packet, time to start processing again
5038 this.ignoreNextEqualDts_ = false;
5039 }
5040
5041 return;
5042 } // parse out CC data packets and save them for later
5043
5044
5045 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5046 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5047
5048 if (this.latestDts_ !== event.dts) {
5049 this.numSameDts_ = 0;
5050 }
5051
5052 this.numSameDts_++;
5053 this.latestDts_ = event.dts;
5054 };
5055
5056 CaptionStream.prototype.flushCCStreams = function (flushType) {
5057 this.ccStreams_.forEach(function (cc) {
5058 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5059 }, this);
5060 };
5061
5062 CaptionStream.prototype.flushStream = function (flushType) {
5063 // make sure we actually parsed captions before proceeding
5064 if (!this.captionPackets_.length) {
5065 this.flushCCStreams(flushType);
5066 return;
5067 } // In Chrome, the Array#sort function is not stable so add a
5068 // presortIndex that we can use to ensure we get a stable-sort
5069
5070
5071 this.captionPackets_.forEach(function (elem, idx) {
5072 elem.presortIndex = idx;
5073 }); // sort caption byte-pairs based on their PTS values
5074
5075 this.captionPackets_.sort(function (a, b) {
5076 if (a.pts === b.pts) {
5077 return a.presortIndex - b.presortIndex;
5078 }
5079
5080 return a.pts - b.pts;
5081 });
5082 this.captionPackets_.forEach(function (packet) {
5083 if (packet.type < 2) {
5084 // Dispatch packet to the right Cea608Stream
5085 this.dispatchCea608Packet(packet);
5086 } else {
5087 // Dispatch packet to the Cea708Stream
5088 this.dispatchCea708Packet(packet);
5089 }
5090 }, this);
5091 this.captionPackets_.length = 0;
5092 this.flushCCStreams(flushType);
5093 };
5094
5095 CaptionStream.prototype.flush = function () {
5096 return this.flushStream('flush');
5097 }; // Only called if handling partial data
5098
5099
5100 CaptionStream.prototype.partialFlush = function () {
5101 return this.flushStream('partialFlush');
5102 };
5103
5104 CaptionStream.prototype.reset = function () {
5105 this.latestDts_ = null;
5106 this.ignoreNextEqualDts_ = false;
5107 this.numSameDts_ = 0;
5108 this.activeCea608Channel_ = [null, null];
5109 this.ccStreams_.forEach(function (ccStream) {
5110 ccStream.reset();
5111 });
5112 }; // From the CEA-608 spec:
5113
5114 /*
5115 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5116 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5117 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5118 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5119 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5120 * to switch to captioning or Text.
5121 */
5122 // With that in mind, we ignore any data between an XDS control code and a
5123 // subsequent closed-captioning control code.
5124
5125
5126 CaptionStream.prototype.dispatchCea608Packet = function (packet) {
5127 // NOTE: packet.type is the CEA608 field
5128 if (this.setsTextOrXDSActive(packet)) {
5129 this.activeCea608Channel_[packet.type] = null;
5130 } else if (this.setsChannel1Active(packet)) {
5131 this.activeCea608Channel_[packet.type] = 0;
5132 } else if (this.setsChannel2Active(packet)) {
5133 this.activeCea608Channel_[packet.type] = 1;
5134 }
5135
5136 if (this.activeCea608Channel_[packet.type] === null) {
5137 // If we haven't received anything to set the active channel, or the
5138 // packets are Text/XDS data, discard the data; we don't want jumbled
5139 // captions
5140 return;
5141 }
5142
5143 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5144 };
5145
5146 CaptionStream.prototype.setsChannel1Active = function (packet) {
5147 return (packet.ccData & 0x7800) === 0x1000;
5148 };
5149
5150 CaptionStream.prototype.setsChannel2Active = function (packet) {
5151 return (packet.ccData & 0x7800) === 0x1800;
5152 };
5153
5154 CaptionStream.prototype.setsTextOrXDSActive = function (packet) {
5155 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5156 };
5157
5158 CaptionStream.prototype.dispatchCea708Packet = function (packet) {
5159 if (this.parse708captions_) {
5160 this.cc708Stream_.push(packet);
5161 }
5162 }; // ----------------------
5163 // Session to Application
5164 // ----------------------
5165 // This hash maps special and extended character codes to their
5166 // proper Unicode equivalent. The first one-byte key is just a
5167 // non-standard character code. The two-byte keys that follow are
5168 // the extended CEA708 character codes, along with the preceding
5169 // 0x10 extended character byte to distinguish these codes from
5170 // non-extended character codes. Every CEA708 character code that
5171 // is not in this object maps directly to a standard unicode
5172 // character code.
5173 // The transparent space and non-breaking transparent space are
5174 // technically not fully supported since there is no code to
5175 // make them transparent, so they have normal non-transparent
5176 // stand-ins.
5177 // The special closed caption (CC) character isn't a standard
5178 // unicode character, so a fairly similar unicode character was
5179 // chosen in it's place.
5180
5181
5182 var CHARACTER_TRANSLATION_708 = {
5183 0x7f: 0x266a,
5184 // ♪
5185 0x1020: 0x20,
5186 // Transparent Space
5187 0x1021: 0xa0,
5188 // Nob-breaking Transparent Space
5189 0x1025: 0x2026,
5190 // …
5191 0x102a: 0x0160,
5192 // Š
5193 0x102c: 0x0152,
5194 // Œ
5195 0x1030: 0x2588,
5196 // █
5197 0x1031: 0x2018,
5198 // ‘
5199 0x1032: 0x2019,
5200 // ’
5201 0x1033: 0x201c,
5202 // “
5203 0x1034: 0x201d,
5204 // ”
5205 0x1035: 0x2022,
5206 // •
5207 0x1039: 0x2122,
5208 // ™
5209 0x103a: 0x0161,
5210 // š
5211 0x103c: 0x0153,
5212 // œ
5213 0x103d: 0x2120,
5214 // ℠
5215 0x103f: 0x0178,
5216 // Ÿ
5217 0x1076: 0x215b,
5218 // ⅛
5219 0x1077: 0x215c,
5220 // ⅜
5221 0x1078: 0x215d,
5222 // ⅝
5223 0x1079: 0x215e,
5224 // ⅞
5225 0x107a: 0x23d0,
5226 // ⏐
5227 0x107b: 0x23a4,
5228 // ⎤
5229 0x107c: 0x23a3,
5230 // ⎣
5231 0x107d: 0x23af,
5232 // ⎯
5233 0x107e: 0x23a6,
5234 // ⎦
5235 0x107f: 0x23a1,
5236 // ⎡
5237 0x10a0: 0x3138 // ㄸ (CC char)
5238
5239 };
5240
5241 var get708CharFromCode = function get708CharFromCode(code) {
5242 var newCode = CHARACTER_TRANSLATION_708[code] || code;
5243
5244 if (code & 0x1000 && code === newCode) {
5245 // Invalid extended code
5246 return '';
5247 }
5248
5249 return String.fromCharCode(newCode);
5250 };
5251
5252 var within708TextBlock = function within708TextBlock(b) {
5253 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
5254 };
5255
5256 var Cea708Window = function Cea708Window(windowNum) {
5257 this.windowNum = windowNum;
5258 this.reset();
5259 };
5260
5261 Cea708Window.prototype.reset = function () {
5262 this.clearText();
5263 this.pendingNewLine = false;
5264 this.winAttr = {};
5265 this.penAttr = {};
5266 this.penLoc = {};
5267 this.penColor = {}; // These default values are arbitrary,
5268 // defineWindow will usually override them
5269
5270 this.visible = 0;
5271 this.rowLock = 0;
5272 this.columnLock = 0;
5273 this.priority = 0;
5274 this.relativePositioning = 0;
5275 this.anchorVertical = 0;
5276 this.anchorHorizontal = 0;
5277 this.anchorPoint = 0;
5278 this.rowCount = 1;
5279 this.virtualRowCount = this.rowCount + 1;
5280 this.columnCount = 41;
5281 this.windowStyle = 0;
5282 this.penStyle = 0;
5283 };
5284
5285 Cea708Window.prototype.getText = function () {
5286 return this.rows.join('\n');
5287 };
5288
5289 Cea708Window.prototype.clearText = function () {
5290 this.rows = [''];
5291 this.rowIdx = 0;
5292 };
5293
5294 Cea708Window.prototype.newLine = function (pts) {
5295 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
5296 this.beforeRowOverflow(pts);
5297 }
5298
5299 if (this.rows.length > 0) {
5300 this.rows.push('');
5301 this.rowIdx++;
5302 } // Show all virtual rows since there's no visible scrolling
5303
5304
5305 while (this.rows.length > this.virtualRowCount) {
5306 this.rows.shift();
5307 this.rowIdx--;
5308 }
5309 };
5310
5311 Cea708Window.prototype.isEmpty = function () {
5312 if (this.rows.length === 0) {
5313 return true;
5314 } else if (this.rows.length === 1) {
5315 return this.rows[0] === '';
5316 }
5317
5318 return false;
5319 };
5320
5321 Cea708Window.prototype.addText = function (text) {
5322 this.rows[this.rowIdx] += text;
5323 };
5324
5325 Cea708Window.prototype.backspace = function () {
5326 if (!this.isEmpty()) {
5327 var row = this.rows[this.rowIdx];
5328 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
5329 }
5330 };
5331
5332 var Cea708Service = function Cea708Service(serviceNum) {
5333 this.serviceNum = serviceNum;
5334 this.text = '';
5335 this.currentWindow = new Cea708Window(-1);
5336 this.windows = [];
5337 };
5338 /**
5339 * Initialize service windows
5340 * Must be run before service use
5341 *
5342 * @param {Integer} pts PTS value
5343 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
5344 */
5345
5346
5347 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
5348 this.startPts = pts;
5349
5350 for (var win = 0; win < 8; win++) {
5351 this.windows[win] = new Cea708Window(win);
5352
5353 if (typeof beforeRowOverflow === 'function') {
5354 this.windows[win].beforeRowOverflow = beforeRowOverflow;
5355 }
5356 }
5357 };
5358 /**
5359 * Set current window of service to be affected by commands
5360 *
5361 * @param {Integer} windowNum Window number
5362 */
5363
5364
5365 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
5366 this.currentWindow = this.windows[windowNum];
5367 };
5368
5369 var Cea708Stream = function Cea708Stream() {
5370 Cea708Stream.prototype.init.call(this);
5371 var self = this;
5372 this.current708Packet = null;
5373 this.services = {};
5374
5375 this.push = function (packet) {
5376 if (packet.type === 3) {
5377 // 708 packet start
5378 self.new708Packet();
5379 self.add708Bytes(packet);
5380 } else {
5381 if (self.current708Packet === null) {
5382 // This should only happen at the start of a file if there's no packet start.
5383 self.new708Packet();
5384 }
5385
5386 self.add708Bytes(packet);
5387 }
5388 };
5389 };
5390
5391 Cea708Stream.prototype = new stream();
5392 /**
5393 * Push current 708 packet, create new 708 packet.
5394 */
5395
5396 Cea708Stream.prototype.new708Packet = function () {
5397 if (this.current708Packet !== null) {
5398 this.push708Packet();
5399 }
5400
5401 this.current708Packet = {
5402 data: [],
5403 ptsVals: []
5404 };
5405 };
5406 /**
5407 * Add pts and both bytes from packet into current 708 packet.
5408 */
5409
5410
5411 Cea708Stream.prototype.add708Bytes = function (packet) {
5412 var data = packet.ccData;
5413 var byte0 = data >>> 8;
5414 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
5415 // that service blocks will always line up with byte pairs.
5416
5417 this.current708Packet.ptsVals.push(packet.pts);
5418 this.current708Packet.data.push(byte0);
5419 this.current708Packet.data.push(byte1);
5420 };
5421 /**
5422 * Parse completed 708 packet into service blocks and push each service block.
5423 */
5424
5425
5426 Cea708Stream.prototype.push708Packet = function () {
5427 var packet708 = this.current708Packet;
5428 var packetData = packet708.data;
5429 var serviceNum = null;
5430 var blockSize = null;
5431 var i = 0;
5432 var b = packetData[i++];
5433 packet708.seq = b >> 6;
5434 packet708.sizeCode = b & 0x3f; // 0b00111111;
5435
5436 for (; i < packetData.length; i++) {
5437 b = packetData[i++];
5438 serviceNum = b >> 5;
5439 blockSize = b & 0x1f; // 0b00011111
5440
5441 if (serviceNum === 7 && blockSize > 0) {
5442 // Extended service num
5443 b = packetData[i++];
5444 serviceNum = b;
5445 }
5446
5447 this.pushServiceBlock(serviceNum, i, blockSize);
5448
5449 if (blockSize > 0) {
5450 i += blockSize - 1;
5451 }
5452 }
5453 };
5454 /**
5455 * Parse service block, execute commands, read text.
5456 *
5457 * Note: While many of these commands serve important purposes,
5458 * many others just parse out the parameters or attributes, but
5459 * nothing is done with them because this is not a full and complete
5460 * implementation of the entire 708 spec.
5461 *
5462 * @param {Integer} serviceNum Service number
5463 * @param {Integer} start Start index of the 708 packet data
5464 * @param {Integer} size Block size
5465 */
5466
5467
5468 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
5469 var b;
5470 var i = start;
5471 var packetData = this.current708Packet.data;
5472 var service = this.services[serviceNum];
5473
5474 if (!service) {
5475 service = this.initService(serviceNum, i);
5476 }
5477
5478 for (; i < start + size && i < packetData.length; i++) {
5479 b = packetData[i];
5480
5481 if (within708TextBlock(b)) {
5482 i = this.handleText(i, service);
5483 } else if (b === 0x10) {
5484 i = this.extendedCommands(i, service);
5485 } else if (0x80 <= b && b <= 0x87) {
5486 i = this.setCurrentWindow(i, service);
5487 } else if (0x98 <= b && b <= 0x9f) {
5488 i = this.defineWindow(i, service);
5489 } else if (b === 0x88) {
5490 i = this.clearWindows(i, service);
5491 } else if (b === 0x8c) {
5492 i = this.deleteWindows(i, service);
5493 } else if (b === 0x89) {
5494 i = this.displayWindows(i, service);
5495 } else if (b === 0x8a) {
5496 i = this.hideWindows(i, service);
5497 } else if (b === 0x8b) {
5498 i = this.toggleWindows(i, service);
5499 } else if (b === 0x97) {
5500 i = this.setWindowAttributes(i, service);
5501 } else if (b === 0x90) {
5502 i = this.setPenAttributes(i, service);
5503 } else if (b === 0x91) {
5504 i = this.setPenColor(i, service);
5505 } else if (b === 0x92) {
5506 i = this.setPenLocation(i, service);
5507 } else if (b === 0x8f) {
5508 service = this.reset(i, service);
5509 } else if (b === 0x08) {
5510 // BS: Backspace
5511 service.currentWindow.backspace();
5512 } else if (b === 0x0c) {
5513 // FF: Form feed
5514 service.currentWindow.clearText();
5515 } else if (b === 0x0d) {
5516 // CR: Carriage return
5517 service.currentWindow.pendingNewLine = true;
5518 } else if (b === 0x0e) {
5519 // HCR: Horizontal carriage return
5520 service.currentWindow.clearText();
5521 } else if (b === 0x8d) {
5522 // DLY: Delay, nothing to do
5523 i++;
5524 } else ;
5525 }
5526 };
5527 /**
5528 * Execute an extended command
5529 *
5530 * @param {Integer} i Current index in the 708 packet
5531 * @param {Service} service The service object to be affected
5532 * @return {Integer} New index after parsing
5533 */
5534
5535
5536 Cea708Stream.prototype.extendedCommands = function (i, service) {
5537 var packetData = this.current708Packet.data;
5538 var b = packetData[++i];
5539
5540 if (within708TextBlock(b)) {
5541 i = this.handleText(i, service, true);
5542 }
5543
5544 return i;
5545 };
5546 /**
5547 * Get PTS value of a given byte index
5548 *
5549 * @param {Integer} byteIndex Index of the byte
5550 * @return {Integer} PTS
5551 */
5552
5553
5554 Cea708Stream.prototype.getPts = function (byteIndex) {
5555 // There's 1 pts value per 2 bytes
5556 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
5557 };
5558 /**
5559 * Initializes a service
5560 *
5561 * @param {Integer} serviceNum Service number
5562 * @return {Service} Initialized service object
5563 */
5564
5565
5566 Cea708Stream.prototype.initService = function (serviceNum, i) {
5567 var self = this;
5568 this.services[serviceNum] = new Cea708Service(serviceNum);
5569 this.services[serviceNum].init(this.getPts(i), function (pts) {
5570 self.flushDisplayed(pts, self.services[serviceNum]);
5571 });
5572 return this.services[serviceNum];
5573 };
5574 /**
5575 * Execute text writing to current window
5576 *
5577 * @param {Integer} i Current index in the 708 packet
5578 * @param {Service} service The service object to be affected
5579 * @return {Integer} New index after parsing
5580 */
5581
5582
5583 Cea708Stream.prototype.handleText = function (i, service, isExtended) {
5584 var packetData = this.current708Packet.data;
5585 var b = packetData[i];
5586 var extended = isExtended ? 0x1000 : 0x0000;
5587 var char = get708CharFromCode(extended | b);
5588 var win = service.currentWindow;
5589
5590 if (win.pendingNewLine && !win.isEmpty()) {
5591 win.newLine(this.getPts(i));
5592 }
5593
5594 win.pendingNewLine = false;
5595 win.addText(char);
5596 return i;
5597 };
5598 /**
5599 * Parse and execute the CW# command.
5600 *
5601 * Set the current window.
5602 *
5603 * @param {Integer} i Current index in the 708 packet
5604 * @param {Service} service The service object to be affected
5605 * @return {Integer} New index after parsing
5606 */
5607
5608
5609 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
5610 var packetData = this.current708Packet.data;
5611 var b = packetData[i];
5612 var windowNum = b & 0x07;
5613 service.setCurrentWindow(windowNum);
5614 return i;
5615 };
5616 /**
5617 * Parse and execute the DF# command.
5618 *
5619 * Define a window and set it as the current window.
5620 *
5621 * @param {Integer} i Current index in the 708 packet
5622 * @param {Service} service The service object to be affected
5623 * @return {Integer} New index after parsing
5624 */
5625
5626
5627 Cea708Stream.prototype.defineWindow = function (i, service) {
5628 var packetData = this.current708Packet.data;
5629 var b = packetData[i];
5630 var windowNum = b & 0x07;
5631 service.setCurrentWindow(windowNum);
5632 var win = service.currentWindow;
5633 b = packetData[++i];
5634 win.visible = (b & 0x20) >> 5; // v
5635
5636 win.rowLock = (b & 0x10) >> 4; // rl
5637
5638 win.columnLock = (b & 0x08) >> 3; // cl
5639
5640 win.priority = b & 0x07; // p
5641
5642 b = packetData[++i];
5643 win.relativePositioning = (b & 0x80) >> 7; // rp
5644
5645 win.anchorVertical = b & 0x7f; // av
5646
5647 b = packetData[++i];
5648 win.anchorHorizontal = b; // ah
5649
5650 b = packetData[++i];
5651 win.anchorPoint = (b & 0xf0) >> 4; // ap
5652
5653 win.rowCount = b & 0x0f; // rc
5654
5655 b = packetData[++i];
5656 win.columnCount = b & 0x3f; // cc
5657
5658 b = packetData[++i];
5659 win.windowStyle = (b & 0x38) >> 3; // ws
5660
5661 win.penStyle = b & 0x07; // ps
5662 // The spec says there are (rowCount+1) "virtual rows"
5663
5664 win.virtualRowCount = win.rowCount + 1;
5665 return i;
5666 };
5667 /**
5668 * Parse and execute the SWA command.
5669 *
5670 * Set attributes of the current window.
5671 *
5672 * @param {Integer} i Current index in the 708 packet
5673 * @param {Service} service The service object to be affected
5674 * @return {Integer} New index after parsing
5675 */
5676
5677
5678 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
5679 var packetData = this.current708Packet.data;
5680 var b = packetData[i];
5681 var winAttr = service.currentWindow.winAttr;
5682 b = packetData[++i];
5683 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
5684
5685 winAttr.fillRed = (b & 0x30) >> 4; // fr
5686
5687 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
5688
5689 winAttr.fillBlue = b & 0x03; // fb
5690
5691 b = packetData[++i];
5692 winAttr.borderType = (b & 0xc0) >> 6; // bt
5693
5694 winAttr.borderRed = (b & 0x30) >> 4; // br
5695
5696 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
5697
5698 winAttr.borderBlue = b & 0x03; // bb
5699
5700 b = packetData[++i];
5701 winAttr.borderType += (b & 0x80) >> 5; // bt
5702
5703 winAttr.wordWrap = (b & 0x40) >> 6; // ww
5704
5705 winAttr.printDirection = (b & 0x30) >> 4; // pd
5706
5707 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
5708
5709 winAttr.justify = b & 0x03; // j
5710
5711 b = packetData[++i];
5712 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
5713
5714 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
5715
5716 winAttr.displayEffect = b & 0x03; // de
5717
5718 return i;
5719 };
5720 /**
5721 * Gather text from all displayed windows and push a caption to output.
5722 *
5723 * @param {Integer} i Current index in the 708 packet
5724 * @param {Service} service The service object to be affected
5725 */
5726
5727
5728 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
5729 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
5730 // display text in the correct order, but sample files so far have not shown any issue.
5731
5732 for (var winId = 0; winId < 8; winId++) {
5733 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
5734 displayedText.push(service.windows[winId].getText());
5735 }
5736 }
5737
5738 service.endPts = pts;
5739 service.text = displayedText.join('\n\n');
5740 this.pushCaption(service);
5741 service.startPts = pts;
5742 };
5743 /**
5744 * Push a caption to output if the caption contains text.
5745 *
5746 * @param {Service} service The service object to be affected
5747 */
5748
5749
5750 Cea708Stream.prototype.pushCaption = function (service) {
5751 if (service.text !== '') {
5752 this.trigger('data', {
5753 startPts: service.startPts,
5754 endPts: service.endPts,
5755 text: service.text,
5756 stream: 'cc708_' + service.serviceNum
5757 });
5758 service.text = '';
5759 service.startPts = service.endPts;
5760 }
5761 };
5762 /**
5763 * Parse and execute the DSW command.
5764 *
5765 * Set visible property of windows based on the parsed bitmask.
5766 *
5767 * @param {Integer} i Current index in the 708 packet
5768 * @param {Service} service The service object to be affected
5769 * @return {Integer} New index after parsing
5770 */
5771
5772
5773 Cea708Stream.prototype.displayWindows = function (i, service) {
5774 var packetData = this.current708Packet.data;
5775 var b = packetData[++i];
5776 var pts = this.getPts(i);
5777 this.flushDisplayed(pts, service);
5778
5779 for (var winId = 0; winId < 8; winId++) {
5780 if (b & 0x01 << winId) {
5781 service.windows[winId].visible = 1;
5782 }
5783 }
5784
5785 return i;
5786 };
5787 /**
5788 * Parse and execute the HDW command.
5789 *
5790 * Set visible property of windows based on the parsed bitmask.
5791 *
5792 * @param {Integer} i Current index in the 708 packet
5793 * @param {Service} service The service object to be affected
5794 * @return {Integer} New index after parsing
5795 */
5796
5797
5798 Cea708Stream.prototype.hideWindows = function (i, service) {
5799 var packetData = this.current708Packet.data;
5800 var b = packetData[++i];
5801 var pts = this.getPts(i);
5802 this.flushDisplayed(pts, service);
5803
5804 for (var winId = 0; winId < 8; winId++) {
5805 if (b & 0x01 << winId) {
5806 service.windows[winId].visible = 0;
5807 }
5808 }
5809
5810 return i;
5811 };
5812 /**
5813 * Parse and execute the TGW command.
5814 *
5815 * Set visible property of windows based on the parsed bitmask.
5816 *
5817 * @param {Integer} i Current index in the 708 packet
5818 * @param {Service} service The service object to be affected
5819 * @return {Integer} New index after parsing
5820 */
5821
5822
5823 Cea708Stream.prototype.toggleWindows = function (i, service) {
5824 var packetData = this.current708Packet.data;
5825 var b = packetData[++i];
5826 var pts = this.getPts(i);
5827 this.flushDisplayed(pts, service);
5828
5829 for (var winId = 0; winId < 8; winId++) {
5830 if (b & 0x01 << winId) {
5831 service.windows[winId].visible ^= 1;
5832 }
5833 }
5834
5835 return i;
5836 };
5837 /**
5838 * Parse and execute the CLW command.
5839 *
5840 * Clear text of windows based on the parsed bitmask.
5841 *
5842 * @param {Integer} i Current index in the 708 packet
5843 * @param {Service} service The service object to be affected
5844 * @return {Integer} New index after parsing
5845 */
5846
5847
5848 Cea708Stream.prototype.clearWindows = function (i, service) {
5849 var packetData = this.current708Packet.data;
5850 var b = packetData[++i];
5851 var pts = this.getPts(i);
5852 this.flushDisplayed(pts, service);
5853
5854 for (var winId = 0; winId < 8; winId++) {
5855 if (b & 0x01 << winId) {
5856 service.windows[winId].clearText();
5857 }
5858 }
5859
5860 return i;
5861 };
5862 /**
5863 * Parse and execute the DLW command.
5864 *
5865 * Re-initialize windows based on the parsed bitmask.
5866 *
5867 * @param {Integer} i Current index in the 708 packet
5868 * @param {Service} service The service object to be affected
5869 * @return {Integer} New index after parsing
5870 */
5871
5872
5873 Cea708Stream.prototype.deleteWindows = function (i, service) {
5874 var packetData = this.current708Packet.data;
5875 var b = packetData[++i];
5876 var pts = this.getPts(i);
5877 this.flushDisplayed(pts, service);
5878
5879 for (var winId = 0; winId < 8; winId++) {
5880 if (b & 0x01 << winId) {
5881 service.windows[winId].reset();
5882 }
5883 }
5884
5885 return i;
5886 };
5887 /**
5888 * Parse and execute the SPA command.
5889 *
5890 * Set pen attributes of the current window.
5891 *
5892 * @param {Integer} i Current index in the 708 packet
5893 * @param {Service} service The service object to be affected
5894 * @return {Integer} New index after parsing
5895 */
5896
5897
5898 Cea708Stream.prototype.setPenAttributes = function (i, service) {
5899 var packetData = this.current708Packet.data;
5900 var b = packetData[i];
5901 var penAttr = service.currentWindow.penAttr;
5902 b = packetData[++i];
5903 penAttr.textTag = (b & 0xf0) >> 4; // tt
5904
5905 penAttr.offset = (b & 0x0c) >> 2; // o
5906
5907 penAttr.penSize = b & 0x03; // s
5908
5909 b = packetData[++i];
5910 penAttr.italics = (b & 0x80) >> 7; // i
5911
5912 penAttr.underline = (b & 0x40) >> 6; // u
5913
5914 penAttr.edgeType = (b & 0x38) >> 3; // et
5915
5916 penAttr.fontStyle = b & 0x07; // fs
5917
5918 return i;
5919 };
5920 /**
5921 * Parse and execute the SPC command.
5922 *
5923 * Set pen color of the current window.
5924 *
5925 * @param {Integer} i Current index in the 708 packet
5926 * @param {Service} service The service object to be affected
5927 * @return {Integer} New index after parsing
5928 */
5929
5930
5931 Cea708Stream.prototype.setPenColor = function (i, service) {
5932 var packetData = this.current708Packet.data;
5933 var b = packetData[i];
5934 var penColor = service.currentWindow.penColor;
5935 b = packetData[++i];
5936 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
5937
5938 penColor.fgRed = (b & 0x30) >> 4; // fr
5939
5940 penColor.fgGreen = (b & 0x0c) >> 2; // fg
5941
5942 penColor.fgBlue = b & 0x03; // fb
5943
5944 b = packetData[++i];
5945 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
5946
5947 penColor.bgRed = (b & 0x30) >> 4; // br
5948
5949 penColor.bgGreen = (b & 0x0c) >> 2; // bg
5950
5951 penColor.bgBlue = b & 0x03; // bb
5952
5953 b = packetData[++i];
5954 penColor.edgeRed = (b & 0x30) >> 4; // er
5955
5956 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
5957
5958 penColor.edgeBlue = b & 0x03; // eb
5959
5960 return i;
5961 };
5962 /**
5963 * Parse and execute the SPL command.
5964 *
5965 * Set pen location of the current window.
5966 *
5967 * @param {Integer} i Current index in the 708 packet
5968 * @param {Service} service The service object to be affected
5969 * @return {Integer} New index after parsing
5970 */
5971
5972
5973 Cea708Stream.prototype.setPenLocation = function (i, service) {
5974 var packetData = this.current708Packet.data;
5975 var b = packetData[i];
5976 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
5977
5978 service.currentWindow.pendingNewLine = true;
5979 b = packetData[++i];
5980 penLoc.row = b & 0x0f; // r
5981
5982 b = packetData[++i];
5983 penLoc.column = b & 0x3f; // c
5984
5985 return i;
5986 };
5987 /**
5988 * Execute the RST command.
5989 *
5990 * Reset service to a clean slate. Re-initialize.
5991 *
5992 * @param {Integer} i Current index in the 708 packet
5993 * @param {Service} service The service object to be affected
5994 * @return {Service} Re-initialized service
5995 */
5996
5997
5998 Cea708Stream.prototype.reset = function (i, service) {
5999 var pts = this.getPts(i);
6000 this.flushDisplayed(pts, service);
6001 return this.initService(service.serviceNum, i);
6002 }; // This hash maps non-ASCII, special, and extended character codes to their
6003 // proper Unicode equivalent. The first keys that are only a single byte
6004 // are the non-standard ASCII characters, which simply map the CEA608 byte
6005 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6006 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6007 // can be performed regardless of the field and data channel on which the
6008 // character code was received.
6009
6010
6011 var CHARACTER_TRANSLATION = {
6012 0x2a: 0xe1,
6013 // á
6014 0x5c: 0xe9,
6015 // é
6016 0x5e: 0xed,
6017 // í
6018 0x5f: 0xf3,
6019 // ó
6020 0x60: 0xfa,
6021 // ú
6022 0x7b: 0xe7,
6023 // ç
6024 0x7c: 0xf7,
6025 // ÷
6026 0x7d: 0xd1,
6027 // Ñ
6028 0x7e: 0xf1,
6029 // ñ
6030 0x7f: 0x2588,
6031 // █
6032 0x0130: 0xae,
6033 // ®
6034 0x0131: 0xb0,
6035 // °
6036 0x0132: 0xbd,
6037 // ½
6038 0x0133: 0xbf,
6039 // ¿
6040 0x0134: 0x2122,
6041 // ™
6042 0x0135: 0xa2,
6043 // ¢
6044 0x0136: 0xa3,
6045 // £
6046 0x0137: 0x266a,
6047 // ♪
6048 0x0138: 0xe0,
6049 // à
6050 0x0139: 0xa0,
6051 //
6052 0x013a: 0xe8,
6053 // è
6054 0x013b: 0xe2,
6055 // â
6056 0x013c: 0xea,
6057 // ê
6058 0x013d: 0xee,
6059 // î
6060 0x013e: 0xf4,
6061 // ô
6062 0x013f: 0xfb,
6063 // û
6064 0x0220: 0xc1,
6065 // Á
6066 0x0221: 0xc9,
6067 // É
6068 0x0222: 0xd3,
6069 // Ó
6070 0x0223: 0xda,
6071 // Ú
6072 0x0224: 0xdc,
6073 // Ü
6074 0x0225: 0xfc,
6075 // ü
6076 0x0226: 0x2018,
6077 // ‘
6078 0x0227: 0xa1,
6079 // ¡
6080 0x0228: 0x2a,
6081 // *
6082 0x0229: 0x27,
6083 // '
6084 0x022a: 0x2014,
6085 // —
6086 0x022b: 0xa9,
6087 // ©
6088 0x022c: 0x2120,
6089 // ℠
6090 0x022d: 0x2022,
6091 // •
6092 0x022e: 0x201c,
6093 // “
6094 0x022f: 0x201d,
6095 // ”
6096 0x0230: 0xc0,
6097 // À
6098 0x0231: 0xc2,
6099 // Â
6100 0x0232: 0xc7,
6101 // Ç
6102 0x0233: 0xc8,
6103 // È
6104 0x0234: 0xca,
6105 // Ê
6106 0x0235: 0xcb,
6107 // Ë
6108 0x0236: 0xeb,
6109 // ë
6110 0x0237: 0xce,
6111 // Î
6112 0x0238: 0xcf,
6113 // Ï
6114 0x0239: 0xef,
6115 // ï
6116 0x023a: 0xd4,
6117 // Ô
6118 0x023b: 0xd9,
6119 // Ù
6120 0x023c: 0xf9,
6121 // ù
6122 0x023d: 0xdb,
6123 // Û
6124 0x023e: 0xab,
6125 // «
6126 0x023f: 0xbb,
6127 // »
6128 0x0320: 0xc3,
6129 // Ã
6130 0x0321: 0xe3,
6131 // ã
6132 0x0322: 0xcd,
6133 // Í
6134 0x0323: 0xcc,
6135 // Ì
6136 0x0324: 0xec,
6137 // ì
6138 0x0325: 0xd2,
6139 // Ò
6140 0x0326: 0xf2,
6141 // ò
6142 0x0327: 0xd5,
6143 // Õ
6144 0x0328: 0xf5,
6145 // õ
6146 0x0329: 0x7b,
6147 // {
6148 0x032a: 0x7d,
6149 // }
6150 0x032b: 0x5c,
6151 // \
6152 0x032c: 0x5e,
6153 // ^
6154 0x032d: 0x5f,
6155 // _
6156 0x032e: 0x7c,
6157 // |
6158 0x032f: 0x7e,
6159 // ~
6160 0x0330: 0xc4,
6161 // Ä
6162 0x0331: 0xe4,
6163 // ä
6164 0x0332: 0xd6,
6165 // Ö
6166 0x0333: 0xf6,
6167 // ö
6168 0x0334: 0xdf,
6169 // ß
6170 0x0335: 0xa5,
6171 // ¥
6172 0x0336: 0xa4,
6173 // ¤
6174 0x0337: 0x2502,
6175 // │
6176 0x0338: 0xc5,
6177 // Å
6178 0x0339: 0xe5,
6179 // å
6180 0x033a: 0xd8,
6181 // Ø
6182 0x033b: 0xf8,
6183 // ø
6184 0x033c: 0x250c,
6185 // ┌
6186 0x033d: 0x2510,
6187 // ┐
6188 0x033e: 0x2514,
6189 // └
6190 0x033f: 0x2518 // ┘
6191
6192 };
6193
6194 var getCharFromCode = function getCharFromCode(code) {
6195 if (code === null) {
6196 return '';
6197 }
6198
6199 code = CHARACTER_TRANSLATION[code] || code;
6200 return String.fromCharCode(code);
6201 }; // the index of the last row in a CEA-608 display buffer
6202
6203
6204 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
6205 // getting it through bit logic.
6206
6207 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
6208 // cells. The "bottom" row is the last element in the outer array.
6209
6210 var createDisplayBuffer = function createDisplayBuffer() {
6211 var result = [],
6212 i = BOTTOM_ROW + 1;
6213
6214 while (i--) {
6215 result.push('');
6216 }
6217
6218 return result;
6219 };
6220
6221 var Cea608Stream = function Cea608Stream(field, dataChannel) {
6222 Cea608Stream.prototype.init.call(this);
6223 this.field_ = field || 0;
6224 this.dataChannel_ = dataChannel || 0;
6225 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
6226 this.setConstants();
6227 this.reset();
6228
6229 this.push = function (packet) {
6230 var data, swap, char0, char1, text; // remove the parity bits
6231
6232 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
6233
6234 if (data === this.lastControlCode_) {
6235 this.lastControlCode_ = null;
6236 return;
6237 } // Store control codes
6238
6239
6240 if ((data & 0xf000) === 0x1000) {
6241 this.lastControlCode_ = data;
6242 } else if (data !== this.PADDING_) {
6243 this.lastControlCode_ = null;
6244 }
6245
6246 char0 = data >>> 8;
6247 char1 = data & 0xff;
6248
6249 if (data === this.PADDING_) {
6250 return;
6251 } else if (data === this.RESUME_CAPTION_LOADING_) {
6252 this.mode_ = 'popOn';
6253 } else if (data === this.END_OF_CAPTION_) {
6254 // If an EOC is received while in paint-on mode, the displayed caption
6255 // text should be swapped to non-displayed memory as if it was a pop-on
6256 // caption. Because of that, we should explicitly switch back to pop-on
6257 // mode
6258 this.mode_ = 'popOn';
6259 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
6260
6261 this.flushDisplayed(packet.pts); // flip memory
6262
6263 swap = this.displayed_;
6264 this.displayed_ = this.nonDisplayed_;
6265 this.nonDisplayed_ = swap; // start measuring the time to display the caption
6266
6267 this.startPts_ = packet.pts;
6268 } else if (data === this.ROLL_UP_2_ROWS_) {
6269 this.rollUpRows_ = 2;
6270 this.setRollUp(packet.pts);
6271 } else if (data === this.ROLL_UP_3_ROWS_) {
6272 this.rollUpRows_ = 3;
6273 this.setRollUp(packet.pts);
6274 } else if (data === this.ROLL_UP_4_ROWS_) {
6275 this.rollUpRows_ = 4;
6276 this.setRollUp(packet.pts);
6277 } else if (data === this.CARRIAGE_RETURN_) {
6278 this.clearFormatting(packet.pts);
6279 this.flushDisplayed(packet.pts);
6280 this.shiftRowsUp_();
6281 this.startPts_ = packet.pts;
6282 } else if (data === this.BACKSPACE_) {
6283 if (this.mode_ === 'popOn') {
6284 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6285 } else {
6286 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6287 }
6288 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
6289 this.flushDisplayed(packet.pts);
6290 this.displayed_ = createDisplayBuffer();
6291 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
6292 this.nonDisplayed_ = createDisplayBuffer();
6293 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
6294 if (this.mode_ !== 'paintOn') {
6295 // NOTE: This should be removed when proper caption positioning is
6296 // implemented
6297 this.flushDisplayed(packet.pts);
6298 this.displayed_ = createDisplayBuffer();
6299 }
6300
6301 this.mode_ = 'paintOn';
6302 this.startPts_ = packet.pts; // Append special characters to caption text
6303 } else if (this.isSpecialCharacter(char0, char1)) {
6304 // Bitmask char0 so that we can apply character transformations
6305 // regardless of field and data channel.
6306 // Then byte-shift to the left and OR with char1 so we can pass the
6307 // entire character code to `getCharFromCode`.
6308 char0 = (char0 & 0x03) << 8;
6309 text = getCharFromCode(char0 | char1);
6310 this[this.mode_](packet.pts, text);
6311 this.column_++; // Append extended characters to caption text
6312 } else if (this.isExtCharacter(char0, char1)) {
6313 // Extended characters always follow their "non-extended" equivalents.
6314 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
6315 // decoders are supposed to drop the "è", while compliant decoders
6316 // backspace the "e" and insert "è".
6317 // Delete the previous character
6318 if (this.mode_ === 'popOn') {
6319 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
6320 } else {
6321 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
6322 } // Bitmask char0 so that we can apply character transformations
6323 // regardless of field and data channel.
6324 // Then byte-shift to the left and OR with char1 so we can pass the
6325 // entire character code to `getCharFromCode`.
6326
6327
6328 char0 = (char0 & 0x03) << 8;
6329 text = getCharFromCode(char0 | char1);
6330 this[this.mode_](packet.pts, text);
6331 this.column_++; // Process mid-row codes
6332 } else if (this.isMidRowCode(char0, char1)) {
6333 // Attributes are not additive, so clear all formatting
6334 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
6335 // should be replaced with spaces, so add one now
6336
6337 this[this.mode_](packet.pts, ' ');
6338 this.column_++;
6339
6340 if ((char1 & 0xe) === 0xe) {
6341 this.addFormatting(packet.pts, ['i']);
6342 }
6343
6344 if ((char1 & 0x1) === 0x1) {
6345 this.addFormatting(packet.pts, ['u']);
6346 } // Detect offset control codes and adjust cursor
6347
6348 } else if (this.isOffsetControlCode(char0, char1)) {
6349 // Cursor position is set by indent PAC (see below) in 4-column
6350 // increments, with an additional offset code of 1-3 to reach any
6351 // of the 32 columns specified by CEA-608. So all we need to do
6352 // here is increment the column cursor by the given offset.
6353 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
6354 } else if (this.isPAC(char0, char1)) {
6355 // There's no logic for PAC -> row mapping, so we have to just
6356 // find the row code in an array and use its index :(
6357 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
6358
6359 if (this.mode_ === 'rollUp') {
6360 // This implies that the base row is incorrectly set.
6361 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
6362 // of roll-up rows set.
6363 if (row - this.rollUpRows_ + 1 < 0) {
6364 row = this.rollUpRows_ - 1;
6365 }
6366
6367 this.setRollUp(packet.pts, row);
6368 }
6369
6370 if (row !== this.row_) {
6371 // formatting is only persistent for current row
6372 this.clearFormatting(packet.pts);
6373 this.row_ = row;
6374 } // All PACs can apply underline, so detect and apply
6375 // (All odd-numbered second bytes set underline)
6376
6377
6378 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
6379 this.addFormatting(packet.pts, ['u']);
6380 }
6381
6382 if ((data & 0x10) === 0x10) {
6383 // We've got an indent level code. Each successive even number
6384 // increments the column cursor by 4, so we can get the desired
6385 // column position by bit-shifting to the right (to get n/2)
6386 // and multiplying by 4.
6387 this.column_ = ((data & 0xe) >> 1) * 4;
6388 }
6389
6390 if (this.isColorPAC(char1)) {
6391 // it's a color code, though we only support white, which
6392 // can be either normal or italicized. white italics can be
6393 // either 0x4e or 0x6e depending on the row, so we just
6394 // bitwise-and with 0xe to see if italics should be turned on
6395 if ((char1 & 0xe) === 0xe) {
6396 this.addFormatting(packet.pts, ['i']);
6397 }
6398 } // We have a normal character in char0, and possibly one in char1
6399
6400 } else if (this.isNormalChar(char0)) {
6401 if (char1 === 0x00) {
6402 char1 = null;
6403 }
6404
6405 text = getCharFromCode(char0);
6406 text += getCharFromCode(char1);
6407 this[this.mode_](packet.pts, text);
6408 this.column_ += text.length;
6409 } // finish data processing
6410
6411 };
6412 };
6413
6414 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
6415 // display buffer
6416
6417 Cea608Stream.prototype.flushDisplayed = function (pts) {
6418 var content = this.displayed_ // remove spaces from the start and end of the string
6419 .map(function (row) {
6420 try {
6421 return row.trim();
6422 } catch (e) {
6423 // Ordinarily, this shouldn't happen. However, caption
6424 // parsing errors should not throw exceptions and
6425 // break playback.
6426 // eslint-disable-next-line no-console
6427 console.error('Skipping malformed caption.');
6428 return '';
6429 }
6430 }) // combine all text rows to display in one cue
6431 .join('\n') // and remove blank rows from the start and end, but not the middle
6432 .replace(/^\n+|\n+$/g, '');
6433
6434 if (content.length) {
6435 this.trigger('data', {
6436 startPts: this.startPts_,
6437 endPts: pts,
6438 text: content,
6439 stream: this.name_
6440 });
6441 }
6442 };
6443 /**
6444 * Zero out the data, used for startup and on seek
6445 */
6446
6447
6448 Cea608Stream.prototype.reset = function () {
6449 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
6450 // actually display captions. If a caption is shifted to a row
6451 // with a lower index than this, it is cleared from the display
6452 // buffer
6453
6454 this.topRow_ = 0;
6455 this.startPts_ = 0;
6456 this.displayed_ = createDisplayBuffer();
6457 this.nonDisplayed_ = createDisplayBuffer();
6458 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
6459
6460 this.column_ = 0;
6461 this.row_ = BOTTOM_ROW;
6462 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
6463
6464 this.formatting_ = [];
6465 };
6466 /**
6467 * Sets up control code and related constants for this instance
6468 */
6469
6470
6471 Cea608Stream.prototype.setConstants = function () {
6472 // The following attributes have these uses:
6473 // ext_ : char0 for mid-row codes, and the base for extended
6474 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
6475 // extended codes)
6476 // control_: char0 for control codes, except byte-shifted to the
6477 // left so that we can do this.control_ | CONTROL_CODE
6478 // offset_: char0 for tab offset codes
6479 //
6480 // It's also worth noting that control codes, and _only_ control codes,
6481 // differ between field 1 and field2. Field 2 control codes are always
6482 // their field 1 value plus 1. That's why there's the "| field" on the
6483 // control value.
6484 if (this.dataChannel_ === 0) {
6485 this.BASE_ = 0x10;
6486 this.EXT_ = 0x11;
6487 this.CONTROL_ = (0x14 | this.field_) << 8;
6488 this.OFFSET_ = 0x17;
6489 } else if (this.dataChannel_ === 1) {
6490 this.BASE_ = 0x18;
6491 this.EXT_ = 0x19;
6492 this.CONTROL_ = (0x1c | this.field_) << 8;
6493 this.OFFSET_ = 0x1f;
6494 } // Constants for the LSByte command codes recognized by Cea608Stream. This
6495 // list is not exhaustive. For a more comprehensive listing and semantics see
6496 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
6497 // Padding
6498
6499
6500 this.PADDING_ = 0x0000; // Pop-on Mode
6501
6502 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
6503 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
6504
6505 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
6506 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
6507 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
6508 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
6509
6510 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
6511
6512 this.BACKSPACE_ = this.CONTROL_ | 0x21;
6513 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
6514 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
6515 };
6516 /**
6517 * Detects if the 2-byte packet data is a special character
6518 *
6519 * Special characters have a second byte in the range 0x30 to 0x3f,
6520 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
6521 * data channel 2).
6522 *
6523 * @param {Integer} char0 The first byte
6524 * @param {Integer} char1 The second byte
6525 * @return {Boolean} Whether the 2 bytes are an special character
6526 */
6527
6528
6529 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
6530 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
6531 };
6532 /**
6533 * Detects if the 2-byte packet data is an extended character
6534 *
6535 * Extended characters have a second byte in the range 0x20 to 0x3f,
6536 * with the first byte being 0x12 or 0x13 (for data channel 1) or
6537 * 0x1a or 0x1b (for data channel 2).
6538 *
6539 * @param {Integer} char0 The first byte
6540 * @param {Integer} char1 The second byte
6541 * @return {Boolean} Whether the 2 bytes are an extended character
6542 */
6543
6544
6545 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
6546 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
6547 };
6548 /**
6549 * Detects if the 2-byte packet is a mid-row code
6550 *
6551 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
6552 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
6553 * channel 2).
6554 *
6555 * @param {Integer} char0 The first byte
6556 * @param {Integer} char1 The second byte
6557 * @return {Boolean} Whether the 2 bytes are a mid-row code
6558 */
6559
6560
6561 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
6562 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
6563 };
6564 /**
6565 * Detects if the 2-byte packet is an offset control code
6566 *
6567 * Offset control codes have a second byte in the range 0x21 to 0x23,
6568 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
6569 * data channel 2).
6570 *
6571 * @param {Integer} char0 The first byte
6572 * @param {Integer} char1 The second byte
6573 * @return {Boolean} Whether the 2 bytes are an offset control code
6574 */
6575
6576
6577 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
6578 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
6579 };
6580 /**
6581 * Detects if the 2-byte packet is a Preamble Address Code
6582 *
6583 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
6584 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
6585 * range 0x40 to 0x7f.
6586 *
6587 * @param {Integer} char0 The first byte
6588 * @param {Integer} char1 The second byte
6589 * @return {Boolean} Whether the 2 bytes are a PAC
6590 */
6591
6592
6593 Cea608Stream.prototype.isPAC = function (char0, char1) {
6594 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
6595 };
6596 /**
6597 * Detects if a packet's second byte is in the range of a PAC color code
6598 *
6599 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
6600 * 0x60 to 0x6f.
6601 *
6602 * @param {Integer} char1 The second byte
6603 * @return {Boolean} Whether the byte is a color PAC
6604 */
6605
6606
6607 Cea608Stream.prototype.isColorPAC = function (char1) {
6608 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
6609 };
6610 /**
6611 * Detects if a single byte is in the range of a normal character
6612 *
6613 * Normal text bytes are in the range 0x20 to 0x7f.
6614 *
6615 * @param {Integer} char The byte
6616 * @return {Boolean} Whether the byte is a normal character
6617 */
6618
6619
6620 Cea608Stream.prototype.isNormalChar = function (char) {
6621 return char >= 0x20 && char <= 0x7f;
6622 };
6623 /**
6624 * Configures roll-up
6625 *
6626 * @param {Integer} pts Current PTS
6627 * @param {Integer} newBaseRow Used by PACs to slide the current window to
6628 * a new position
6629 */
6630
6631
6632 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
6633 // Reset the base row to the bottom row when switching modes
6634 if (this.mode_ !== 'rollUp') {
6635 this.row_ = BOTTOM_ROW;
6636 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
6637
6638 this.flushDisplayed(pts);
6639 this.nonDisplayed_ = createDisplayBuffer();
6640 this.displayed_ = createDisplayBuffer();
6641 }
6642
6643 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
6644 // move currently displayed captions (up or down) to the new base row
6645 for (var i = 0; i < this.rollUpRows_; i++) {
6646 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
6647 this.displayed_[this.row_ - i] = '';
6648 }
6649 }
6650
6651 if (newBaseRow === undefined) {
6652 newBaseRow = this.row_;
6653 }
6654
6655 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
6656 }; // Adds the opening HTML tag for the passed character to the caption text,
6657 // and keeps track of it for later closing
6658
6659
6660 Cea608Stream.prototype.addFormatting = function (pts, format) {
6661 this.formatting_ = this.formatting_.concat(format);
6662 var text = format.reduce(function (text, format) {
6663 return text + '<' + format + '>';
6664 }, '');
6665 this[this.mode_](pts, text);
6666 }; // Adds HTML closing tags for current formatting to caption text and
6667 // clears remembered formatting
6668
6669
6670 Cea608Stream.prototype.clearFormatting = function (pts) {
6671 if (!this.formatting_.length) {
6672 return;
6673 }
6674
6675 var text = this.formatting_.reverse().reduce(function (text, format) {
6676 return text + '</' + format + '>';
6677 }, '');
6678 this.formatting_ = [];
6679 this[this.mode_](pts, text);
6680 }; // Mode Implementations
6681
6682
6683 Cea608Stream.prototype.popOn = function (pts, text) {
6684 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
6685
6686 baseRow += text;
6687 this.nonDisplayed_[this.row_] = baseRow;
6688 };
6689
6690 Cea608Stream.prototype.rollUp = function (pts, text) {
6691 var baseRow = this.displayed_[this.row_];
6692 baseRow += text;
6693 this.displayed_[this.row_] = baseRow;
6694 };
6695
6696 Cea608Stream.prototype.shiftRowsUp_ = function () {
6697 var i; // clear out inactive rows
6698
6699 for (i = 0; i < this.topRow_; i++) {
6700 this.displayed_[i] = '';
6701 }
6702
6703 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
6704 this.displayed_[i] = '';
6705 } // shift displayed rows up
6706
6707
6708 for (i = this.topRow_; i < this.row_; i++) {
6709 this.displayed_[i] = this.displayed_[i + 1];
6710 } // clear out the bottom row
6711
6712
6713 this.displayed_[this.row_] = '';
6714 };
6715
6716 Cea608Stream.prototype.paintOn = function (pts, text) {
6717 var baseRow = this.displayed_[this.row_];
6718 baseRow += text;
6719 this.displayed_[this.row_] = baseRow;
6720 }; // exports
6721
6722
6723 var captionStream = {
6724 CaptionStream: CaptionStream,
6725 Cea608Stream: Cea608Stream,
6726 Cea708Stream: Cea708Stream
6727 };
6728 /**
6729 * mux.js
6730 *
6731 * Copyright (c) Brightcove
6732 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
6733 */
6734
6735 var streamTypes = {
6736 H264_STREAM_TYPE: 0x1B,
6737 ADTS_STREAM_TYPE: 0x0F,
6738 METADATA_STREAM_TYPE: 0x15
6739 };
6740 var MAX_TS = 8589934592;
6741 var RO_THRESH = 4294967296;
6742 var TYPE_SHARED = 'shared';
6743
6744 var handleRollover = function handleRollover(value, reference) {
6745 var direction = 1;
6746
6747 if (value > reference) {
6748 // If the current timestamp value is greater than our reference timestamp and we detect a
6749 // timestamp rollover, this means the roll over is happening in the opposite direction.
6750 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
6751 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
6752 // rollover point. In loading this segment, the timestamp values will be very large,
6753 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
6754 // the time stamp to be `value - 2^33`.
6755 direction = -1;
6756 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
6757 // cause an incorrect adjustment.
6758
6759
6760 while (Math.abs(reference - value) > RO_THRESH) {
6761 value += direction * MAX_TS;
6762 }
6763
6764 return value;
6765 };
6766
6767 var TimestampRolloverStream = function TimestampRolloverStream(type) {
6768 var lastDTS, referenceDTS;
6769 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
6770 // video and audio. We could use `undefined` here, but having a string
6771 // makes debugging a little clearer.
6772
6773 this.type_ = type || TYPE_SHARED;
6774
6775 this.push = function (data) {
6776 // Any "shared" rollover streams will accept _all_ data. Otherwise,
6777 // streams will only accept data that matches their type.
6778 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
6779 return;
6780 }
6781
6782 if (referenceDTS === undefined) {
6783 referenceDTS = data.dts;
6784 }
6785
6786 data.dts = handleRollover(data.dts, referenceDTS);
6787 data.pts = handleRollover(data.pts, referenceDTS);
6788 lastDTS = data.dts;
6789 this.trigger('data', data);
6790 };
6791
6792 this.flush = function () {
6793 referenceDTS = lastDTS;
6794 this.trigger('done');
6795 };
6796
6797 this.endTimeline = function () {
6798 this.flush();
6799 this.trigger('endedtimeline');
6800 };
6801
6802 this.discontinuity = function () {
6803 referenceDTS = void 0;
6804 lastDTS = void 0;
6805 };
6806
6807 this.reset = function () {
6808 this.discontinuity();
6809 this.trigger('reset');
6810 };
6811 };
6812
6813 TimestampRolloverStream.prototype = new stream();
6814 var timestampRolloverStream = {
6815 TimestampRolloverStream: TimestampRolloverStream,
6816 handleRollover: handleRollover
6817 };
6818
6819 var percentEncode = function percentEncode(bytes, start, end) {
6820 var i,
6821 result = '';
6822
6823 for (i = start; i < end; i++) {
6824 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
6825 }
6826
6827 return result;
6828 },
6829 // return the string representation of the specified byte range,
6830 // interpreted as UTf-8.
6831 parseUtf8 = function parseUtf8(bytes, start, end) {
6832 return decodeURIComponent(percentEncode(bytes, start, end));
6833 },
6834 // return the string representation of the specified byte range,
6835 // interpreted as ISO-8859-1.
6836 parseIso88591 = function parseIso88591(bytes, start, end) {
6837 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
6838 },
6839 parseSyncSafeInteger = function parseSyncSafeInteger(data) {
6840 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
6841 },
6842 tagParsers = {
6843 TXXX: function TXXX(tag) {
6844 var i;
6845
6846 if (tag.data[0] !== 3) {
6847 // ignore frames with unrecognized character encodings
6848 return;
6849 }
6850
6851 for (i = 1; i < tag.data.length; i++) {
6852 if (tag.data[i] === 0) {
6853 // parse the text fields
6854 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
6855
6856 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
6857 break;
6858 }
6859 }
6860
6861 tag.data = tag.value;
6862 },
6863 WXXX: function WXXX(tag) {
6864 var i;
6865
6866 if (tag.data[0] !== 3) {
6867 // ignore frames with unrecognized character encodings
6868 return;
6869 }
6870
6871 for (i = 1; i < tag.data.length; i++) {
6872 if (tag.data[i] === 0) {
6873 // parse the description and URL fields
6874 tag.description = parseUtf8(tag.data, 1, i);
6875 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
6876 break;
6877 }
6878 }
6879 },
6880 PRIV: function PRIV(tag) {
6881 var i;
6882
6883 for (i = 0; i < tag.data.length; i++) {
6884 if (tag.data[i] === 0) {
6885 // parse the description and URL fields
6886 tag.owner = parseIso88591(tag.data, 0, i);
6887 break;
6888 }
6889 }
6890
6891 tag.privateData = tag.data.subarray(i + 1);
6892 tag.data = tag.privateData;
6893 }
6894 },
6895 _MetadataStream;
6896
6897 _MetadataStream = function MetadataStream(options) {
6898 var settings = {
6899 debug: !!(options && options.debug),
6900 // the bytes of the program-level descriptor field in MP2T
6901 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
6902 // program element descriptors"
6903 descriptor: options && options.descriptor
6904 },
6905 // the total size in bytes of the ID3 tag being parsed
6906 tagSize = 0,
6907 // tag data that is not complete enough to be parsed
6908 buffer = [],
6909 // the total number of bytes currently in the buffer
6910 bufferSize = 0,
6911 i;
6912
6913 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
6914 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
6915
6916
6917 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
6918
6919 if (settings.descriptor) {
6920 for (i = 0; i < settings.descriptor.length; i++) {
6921 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
6922 }
6923 }
6924
6925 this.push = function (chunk) {
6926 var tag, frameStart, frameSize, frame, i, frameHeader;
6927
6928 if (chunk.type !== 'timed-metadata') {
6929 return;
6930 } // if data_alignment_indicator is set in the PES header,
6931 // we must have the start of a new ID3 tag. Assume anything
6932 // remaining in the buffer was malformed and throw it out
6933
6934
6935 if (chunk.dataAlignmentIndicator) {
6936 bufferSize = 0;
6937 buffer.length = 0;
6938 } // ignore events that don't look like ID3 data
6939
6940
6941 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
6942 if (settings.debug) {
6943 // eslint-disable-next-line no-console
6944 console.log('Skipping unrecognized metadata packet');
6945 }
6946
6947 return;
6948 } // add this chunk to the data we've collected so far
6949
6950
6951 buffer.push(chunk);
6952 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
6953
6954 if (buffer.length === 1) {
6955 // the frame size is transmitted as a 28-bit integer in the
6956 // last four bytes of the ID3 header.
6957 // The most significant bit of each byte is dropped and the
6958 // results concatenated to recover the actual value.
6959 tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
6960 // convenient for our comparisons to include it
6961
6962 tagSize += 10;
6963 } // if the entire frame has not arrived, wait for more data
6964
6965
6966 if (bufferSize < tagSize) {
6967 return;
6968 } // collect the entire frame so it can be parsed
6969
6970
6971 tag = {
6972 data: new Uint8Array(tagSize),
6973 frames: [],
6974 pts: buffer[0].pts,
6975 dts: buffer[0].dts
6976 };
6977
6978 for (i = 0; i < tagSize;) {
6979 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
6980 i += buffer[0].data.byteLength;
6981 bufferSize -= buffer[0].data.byteLength;
6982 buffer.shift();
6983 } // find the start of the first frame and the end of the tag
6984
6985
6986 frameStart = 10;
6987
6988 if (tag.data[5] & 0x40) {
6989 // advance the frame start past the extended header
6990 frameStart += 4; // header size field
6991
6992 frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14)); // clip any padding off the end
6993
6994 tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
6995 } // parse one or more ID3 frames
6996 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
6997
6998
6999 do {
7000 // determine the number of bytes in this frame
7001 frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
7002
7003 if (frameSize < 1) {
7004 // eslint-disable-next-line no-console
7005 return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
7006 }
7007
7008 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7009 frame = {
7010 id: frameHeader,
7011 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7012 };
7013 frame.key = frame.id;
7014
7015 if (tagParsers[frame.id]) {
7016 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7017 // time for raw AAC data
7018
7019 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7020 var d = frame.data,
7021 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7022 size *= 4;
7023 size += d[7] & 0x03;
7024 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7025 // on the value of this frame
7026 // we couldn't have known the appropriate pts and dts before
7027 // parsing this ID3 tag so set those values now
7028
7029 if (tag.pts === undefined && tag.dts === undefined) {
7030 tag.pts = frame.timeStamp;
7031 tag.dts = frame.timeStamp;
7032 }
7033
7034 this.trigger('timestamp', frame);
7035 }
7036 }
7037
7038 tag.frames.push(frame);
7039 frameStart += 10; // advance past the frame header
7040
7041 frameStart += frameSize; // advance past the frame body
7042 } while (frameStart < tagSize);
7043
7044 this.trigger('data', tag);
7045 };
7046 };
7047
7048 _MetadataStream.prototype = new stream();
7049 var metadataStream = _MetadataStream;
7050 var TimestampRolloverStream$1 = timestampRolloverStream.TimestampRolloverStream; // object types
7051
7052 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7053
7054
7055 var MP2T_PACKET_LENGTH = 188,
7056 // bytes
7057 SYNC_BYTE = 0x47;
7058 /**
7059 * Splits an incoming stream of binary data into MPEG-2 Transport
7060 * Stream packets.
7061 */
7062
7063 _TransportPacketStream = function TransportPacketStream() {
7064 var buffer = new Uint8Array(MP2T_PACKET_LENGTH),
7065 bytesInBuffer = 0;
7066
7067 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7068
7069 /**
7070 * Split a stream of data into M2TS packets
7071 **/
7072
7073
7074 this.push = function (bytes) {
7075 var startIndex = 0,
7076 endIndex = MP2T_PACKET_LENGTH,
7077 everything; // If there are bytes remaining from the last segment, prepend them to the
7078 // bytes that were pushed in
7079
7080 if (bytesInBuffer) {
7081 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7082 everything.set(buffer.subarray(0, bytesInBuffer));
7083 everything.set(bytes, bytesInBuffer);
7084 bytesInBuffer = 0;
7085 } else {
7086 everything = bytes;
7087 } // While we have enough data for a packet
7088
7089
7090 while (endIndex < everything.byteLength) {
7091 // Look for a pair of start and end sync bytes in the data..
7092 if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
7093 // We found a packet so emit it and jump one whole packet forward in
7094 // the stream
7095 this.trigger('data', everything.subarray(startIndex, endIndex));
7096 startIndex += MP2T_PACKET_LENGTH;
7097 endIndex += MP2T_PACKET_LENGTH;
7098 continue;
7099 } // If we get here, we have somehow become de-synchronized and we need to step
7100 // forward one byte at a time until we find a pair of sync bytes that denote
7101 // a packet
7102
7103
7104 startIndex++;
7105 endIndex++;
7106 } // If there was some data left over at the end of the segment that couldn't
7107 // possibly be a whole packet, keep it because it might be the start of a packet
7108 // that continues in the next segment
7109
7110
7111 if (startIndex < everything.byteLength) {
7112 buffer.set(everything.subarray(startIndex), 0);
7113 bytesInBuffer = everything.byteLength - startIndex;
7114 }
7115 };
7116 /**
7117 * Passes identified M2TS packets to the TransportParseStream to be parsed
7118 **/
7119
7120
7121 this.flush = function () {
7122 // If the buffer contains a whole packet when we are being flushed, emit it
7123 // and empty the buffer. Otherwise hold onto the data because it may be
7124 // important for decoding the next segment
7125 if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
7126 this.trigger('data', buffer);
7127 bytesInBuffer = 0;
7128 }
7129
7130 this.trigger('done');
7131 };
7132
7133 this.endTimeline = function () {
7134 this.flush();
7135 this.trigger('endedtimeline');
7136 };
7137
7138 this.reset = function () {
7139 bytesInBuffer = 0;
7140 this.trigger('reset');
7141 };
7142 };
7143
7144 _TransportPacketStream.prototype = new stream();
7145 /**
7146 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7147 * forms of the individual transport stream packets.
7148 */
7149
7150 _TransportParseStream = function TransportParseStream() {
7151 var parsePsi, parsePat, parsePmt, self;
7152
7153 _TransportParseStream.prototype.init.call(this);
7154
7155 self = this;
7156 this.packetsWaitingForPmt = [];
7157 this.programMapTable = undefined;
7158
7159 parsePsi = function parsePsi(payload, psi) {
7160 var offset = 0; // PSI packets may be split into multiple sections and those
7161 // sections may be split into multiple packets. If a PSI
7162 // section starts in this packet, the payload_unit_start_indicator
7163 // will be true and the first byte of the payload will indicate
7164 // the offset from the current position to the start of the
7165 // section.
7166
7167 if (psi.payloadUnitStartIndicator) {
7168 offset += payload[offset] + 1;
7169 }
7170
7171 if (psi.type === 'pat') {
7172 parsePat(payload.subarray(offset), psi);
7173 } else {
7174 parsePmt(payload.subarray(offset), psi);
7175 }
7176 };
7177
7178 parsePat = function parsePat(payload, pat) {
7179 pat.section_number = payload[7]; // eslint-disable-line camelcase
7180
7181 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
7182 // skip the PSI header and parse the first PMT entry
7183
7184 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
7185 pat.pmtPid = self.pmtPid;
7186 };
7187 /**
7188 * Parse out the relevant fields of a Program Map Table (PMT).
7189 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
7190 * packet. The first byte in this array should be the table_id
7191 * field.
7192 * @param pmt {object} the object that should be decorated with
7193 * fields parsed from the PMT.
7194 */
7195
7196
7197 parsePmt = function parsePmt(payload, pmt) {
7198 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
7199 // take effect. We don't believe this should ever be the case
7200 // for HLS but we'll ignore "forward" PMT declarations if we see
7201 // them. Future PMT declarations have the current_next_indicator
7202 // set to zero.
7203
7204 if (!(payload[5] & 0x01)) {
7205 return;
7206 } // overwrite any existing program map table
7207
7208
7209 self.programMapTable = {
7210 video: null,
7211 audio: null,
7212 'timed-metadata': {}
7213 }; // the mapping table ends at the end of the current section
7214
7215 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
7216 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
7217 // long the program info descriptors are
7218
7219 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
7220
7221 offset = 12 + programInfoLength;
7222
7223 while (offset < tableEnd) {
7224 var streamType = payload[offset];
7225 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
7226 // TODO: should this be done for metadata too? for now maintain behavior of
7227 // multiple metadata streams
7228
7229 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
7230 self.programMapTable.video = pid;
7231 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
7232 self.programMapTable.audio = pid;
7233 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
7234 // map pid to stream type for metadata streams
7235 self.programMapTable['timed-metadata'][pid] = streamType;
7236 } // move to the next table entry
7237 // skip past the elementary stream descriptors, if present
7238
7239
7240 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
7241 } // record the map on the packet as well
7242
7243
7244 pmt.programMapTable = self.programMapTable;
7245 };
7246 /**
7247 * Deliver a new MP2T packet to the next stream in the pipeline.
7248 */
7249
7250
7251 this.push = function (packet) {
7252 var result = {},
7253 offset = 4;
7254 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
7255
7256 result.pid = packet[1] & 0x1f;
7257 result.pid <<= 8;
7258 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
7259 // fifth byte of the TS packet header. The adaptation field is
7260 // used to add stuffing to PES packets that don't fill a complete
7261 // TS packet, and to specify some forms of timing and control data
7262 // that we do not currently use.
7263
7264 if ((packet[3] & 0x30) >>> 4 > 0x01) {
7265 offset += packet[offset] + 1;
7266 } // parse the rest of the packet based on the type
7267
7268
7269 if (result.pid === 0) {
7270 result.type = 'pat';
7271 parsePsi(packet.subarray(offset), result);
7272 this.trigger('data', result);
7273 } else if (result.pid === this.pmtPid) {
7274 result.type = 'pmt';
7275 parsePsi(packet.subarray(offset), result);
7276 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
7277
7278 while (this.packetsWaitingForPmt.length) {
7279 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
7280 }
7281 } else if (this.programMapTable === undefined) {
7282 // When we have not seen a PMT yet, defer further processing of
7283 // PES packets until one has been parsed
7284 this.packetsWaitingForPmt.push([packet, offset, result]);
7285 } else {
7286 this.processPes_(packet, offset, result);
7287 }
7288 };
7289
7290 this.processPes_ = function (packet, offset, result) {
7291 // set the appropriate stream type
7292 if (result.pid === this.programMapTable.video) {
7293 result.streamType = streamTypes.H264_STREAM_TYPE;
7294 } else if (result.pid === this.programMapTable.audio) {
7295 result.streamType = streamTypes.ADTS_STREAM_TYPE;
7296 } else {
7297 // if not video or audio, it is timed-metadata or unknown
7298 // if unknown, streamType will be undefined
7299 result.streamType = this.programMapTable['timed-metadata'][result.pid];
7300 }
7301
7302 result.type = 'pes';
7303 result.data = packet.subarray(offset);
7304 this.trigger('data', result);
7305 };
7306 };
7307
7308 _TransportParseStream.prototype = new stream();
7309 _TransportParseStream.STREAM_TYPES = {
7310 h264: 0x1b,
7311 adts: 0x0f
7312 };
7313 /**
7314 * Reconsistutes program elementary stream (PES) packets from parsed
7315 * transport stream packets. That is, if you pipe an
7316 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
7317 * events will be events which capture the bytes for individual PES
7318 * packets plus relevant metadata that has been extracted from the
7319 * container.
7320 */
7321
7322 _ElementaryStream = function ElementaryStream() {
7323 var self = this,
7324 // PES packet fragments
7325 video = {
7326 data: [],
7327 size: 0
7328 },
7329 audio = {
7330 data: [],
7331 size: 0
7332 },
7333 timedMetadata = {
7334 data: [],
7335 size: 0
7336 },
7337 programMapTable,
7338 parsePes = function parsePes(payload, pes) {
7339 var ptsDtsFlags; // get the packet length, this will be 0 for video
7340
7341 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
7342
7343 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
7344 // and a DTS value. Determine what combination of values is
7345 // available to work with.
7346
7347 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
7348 // performs all bitwise operations on 32-bit integers but javascript
7349 // supports a much greater range (52-bits) of integer using standard
7350 // mathematical operations.
7351 // We construct a 31-bit value using bitwise operators over the 31
7352 // most significant bits and then multiply by 4 (equal to a left-shift
7353 // of 2) before we add the final 2 least significant bits of the
7354 // timestamp (equal to an OR.)
7355
7356 if (ptsDtsFlags & 0xC0) {
7357 // the PTS and DTS are not written out directly. For information
7358 // on how they are encoded, see
7359 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
7360 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
7361 pes.pts *= 4; // Left shift by 2
7362
7363 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
7364
7365 pes.dts = pes.pts;
7366
7367 if (ptsDtsFlags & 0x40) {
7368 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
7369 pes.dts *= 4; // Left shift by 2
7370
7371 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
7372 }
7373 } // the data section starts immediately after the PES header.
7374 // pes_header_data_length specifies the number of header bytes
7375 // that follow the last byte of the field.
7376
7377
7378 pes.data = payload.subarray(9 + payload[8]);
7379 },
7380
7381 /**
7382 * Pass completely parsed PES packets to the next stream in the pipeline
7383 **/
7384 flushStream = function flushStream(stream, type, forceFlush) {
7385 var packetData = new Uint8Array(stream.size),
7386 event = {
7387 type: type
7388 },
7389 i = 0,
7390 offset = 0,
7391 packetFlushable = false,
7392 fragment; // do nothing if there is not enough buffered data for a complete
7393 // PES header
7394
7395 if (!stream.data.length || stream.size < 9) {
7396 return;
7397 }
7398
7399 event.trackId = stream.data[0].pid; // reassemble the packet
7400
7401 for (i = 0; i < stream.data.length; i++) {
7402 fragment = stream.data[i];
7403 packetData.set(fragment.data, offset);
7404 offset += fragment.data.byteLength;
7405 } // parse assembled packet's PES header
7406
7407
7408 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
7409 // check that there is enough stream data to fill the packet
7410
7411 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
7412
7413 if (forceFlush || packetFlushable) {
7414 stream.size = 0;
7415 stream.data.length = 0;
7416 } // only emit packets that are complete. this is to avoid assembling
7417 // incomplete PES packets due to poor segmentation
7418
7419
7420 if (packetFlushable) {
7421 self.trigger('data', event);
7422 }
7423 };
7424
7425 _ElementaryStream.prototype.init.call(this);
7426 /**
7427 * Identifies M2TS packet types and parses PES packets using metadata
7428 * parsed from the PMT
7429 **/
7430
7431
7432 this.push = function (data) {
7433 ({
7434 pat: function pat() {// we have to wait for the PMT to arrive as well before we
7435 // have any meaningful metadata
7436 },
7437 pes: function pes() {
7438 var stream, streamType;
7439
7440 switch (data.streamType) {
7441 case streamTypes.H264_STREAM_TYPE:
7442 stream = video;
7443 streamType = 'video';
7444 break;
7445
7446 case streamTypes.ADTS_STREAM_TYPE:
7447 stream = audio;
7448 streamType = 'audio';
7449 break;
7450
7451 case streamTypes.METADATA_STREAM_TYPE:
7452 stream = timedMetadata;
7453 streamType = 'timed-metadata';
7454 break;
7455
7456 default:
7457 // ignore unknown stream types
7458 return;
7459 } // if a new packet is starting, we can flush the completed
7460 // packet
7461
7462
7463 if (data.payloadUnitStartIndicator) {
7464 flushStream(stream, streamType, true);
7465 } // buffer this fragment until we are sure we've received the
7466 // complete payload
7467
7468
7469 stream.data.push(data);
7470 stream.size += data.data.byteLength;
7471 },
7472 pmt: function pmt() {
7473 var event = {
7474 type: 'metadata',
7475 tracks: []
7476 };
7477 programMapTable = data.programMapTable; // translate audio and video streams to tracks
7478
7479 if (programMapTable.video !== null) {
7480 event.tracks.push({
7481 timelineStartInfo: {
7482 baseMediaDecodeTime: 0
7483 },
7484 id: +programMapTable.video,
7485 codec: 'avc',
7486 type: 'video'
7487 });
7488 }
7489
7490 if (programMapTable.audio !== null) {
7491 event.tracks.push({
7492 timelineStartInfo: {
7493 baseMediaDecodeTime: 0
7494 },
7495 id: +programMapTable.audio,
7496 codec: 'adts',
7497 type: 'audio'
7498 });
7499 }
7500
7501 self.trigger('data', event);
7502 }
7503 })[data.type]();
7504 };
7505
7506 this.reset = function () {
7507 video.size = 0;
7508 video.data.length = 0;
7509 audio.size = 0;
7510 audio.data.length = 0;
7511 this.trigger('reset');
7512 };
7513 /**
7514 * Flush any remaining input. Video PES packets may be of variable
7515 * length. Normally, the start of a new video packet can trigger the
7516 * finalization of the previous packet. That is not possible if no
7517 * more video is forthcoming, however. In that case, some other
7518 * mechanism (like the end of the file) has to be employed. When it is
7519 * clear that no additional data is forthcoming, calling this method
7520 * will flush the buffered packets.
7521 */
7522
7523
7524 this.flushStreams_ = function () {
7525 // !!THIS ORDER IS IMPORTANT!!
7526 // video first then audio
7527 flushStream(video, 'video');
7528 flushStream(audio, 'audio');
7529 flushStream(timedMetadata, 'timed-metadata');
7530 };
7531
7532 this.flush = function () {
7533 this.flushStreams_();
7534 this.trigger('done');
7535 };
7536 };
7537
7538 _ElementaryStream.prototype = new stream();
7539 var m2ts = {
7540 PAT_PID: 0x0000,
7541 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
7542 TransportPacketStream: _TransportPacketStream,
7543 TransportParseStream: _TransportParseStream,
7544 ElementaryStream: _ElementaryStream,
7545 TimestampRolloverStream: TimestampRolloverStream$1,
7546 CaptionStream: captionStream.CaptionStream,
7547 Cea608Stream: captionStream.Cea608Stream,
7548 Cea708Stream: captionStream.Cea708Stream,
7549 MetadataStream: metadataStream
7550 };
7551
7552 for (var type in streamTypes) {
7553 if (streamTypes.hasOwnProperty(type)) {
7554 m2ts[type] = streamTypes[type];
7555 }
7556 }
7557
7558 var m2ts_1 = m2ts;
7559 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
7560
7561 var _AdtsStream;
7562
7563 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
7564 /*
7565 * Accepts a ElementaryStream and emits data events with parsed
7566 * AAC Audio Frames of the individual packets. Input audio in ADTS
7567 * format is unpacked and re-emitted as AAC frames.
7568 *
7569 * @see http://wiki.multimedia.cx/index.php?title=ADTS
7570 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
7571 */
7572
7573 _AdtsStream = function AdtsStream(handlePartialSegments) {
7574 var buffer,
7575 frameNum = 0;
7576
7577 _AdtsStream.prototype.init.call(this);
7578
7579 this.push = function (packet) {
7580 var i = 0,
7581 frameLength,
7582 protectionSkipBytes,
7583 frameEnd,
7584 oldBuffer,
7585 sampleCount,
7586 adtsFrameDuration;
7587
7588 if (!handlePartialSegments) {
7589 frameNum = 0;
7590 }
7591
7592 if (packet.type !== 'audio') {
7593 // ignore non-audio data
7594 return;
7595 } // Prepend any data in the buffer to the input data so that we can parse
7596 // aac frames the cross a PES packet boundary
7597
7598
7599 if (buffer) {
7600 oldBuffer = buffer;
7601 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
7602 buffer.set(oldBuffer);
7603 buffer.set(packet.data, oldBuffer.byteLength);
7604 } else {
7605 buffer = packet.data;
7606 } // unpack any ADTS frames which have been fully received
7607 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
7608
7609
7610 while (i + 5 < buffer.length) {
7611 // Look for the start of an ADTS header..
7612 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
7613 // If a valid header was not found, jump one forward and attempt to
7614 // find a valid ADTS header starting at the next byte
7615 i++;
7616 continue;
7617 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
7618 // end of the ADTS header
7619
7620
7621 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
7622 // end of the sync sequence
7623
7624 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
7625 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
7626 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
7627 frameEnd = i + frameLength; // If we don't have enough data to actually finish this ADTS frame, return
7628 // and wait for more data
7629
7630 if (buffer.byteLength < frameEnd) {
7631 return;
7632 } // Otherwise, deliver the complete AAC frame
7633
7634
7635 this.trigger('data', {
7636 pts: packet.pts + frameNum * adtsFrameDuration,
7637 dts: packet.dts + frameNum * adtsFrameDuration,
7638 sampleCount: sampleCount,
7639 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
7640 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
7641 samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
7642 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
7643 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
7644 samplesize: 16,
7645 data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
7646 });
7647 frameNum++; // If the buffer is empty, clear it and return
7648
7649 if (buffer.byteLength === frameEnd) {
7650 buffer = undefined;
7651 return;
7652 } // Remove the finished frame from the buffer and start the process again
7653
7654
7655 buffer = buffer.subarray(frameEnd);
7656 }
7657 };
7658
7659 this.flush = function () {
7660 frameNum = 0;
7661 this.trigger('done');
7662 };
7663
7664 this.reset = function () {
7665 buffer = void 0;
7666 this.trigger('reset');
7667 };
7668
7669 this.endTimeline = function () {
7670 buffer = void 0;
7671 this.trigger('endedtimeline');
7672 };
7673 };
7674
7675 _AdtsStream.prototype = new stream();
7676 var adts = _AdtsStream;
7677 /**
7678 * mux.js
7679 *
7680 * Copyright (c) Brightcove
7681 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7682 */
7683
7684 var ExpGolomb;
7685 /**
7686 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
7687 * scheme used by h264.
7688 */
7689
7690 ExpGolomb = function ExpGolomb(workingData) {
7691 var // the number of bytes left to examine in workingData
7692 workingBytesAvailable = workingData.byteLength,
7693 // the current word being examined
7694 workingWord = 0,
7695 // :uint
7696 // the number of bits left to examine in the current word
7697 workingBitsAvailable = 0; // :uint;
7698 // ():uint
7699
7700 this.length = function () {
7701 return 8 * workingBytesAvailable;
7702 }; // ():uint
7703
7704
7705 this.bitsAvailable = function () {
7706 return 8 * workingBytesAvailable + workingBitsAvailable;
7707 }; // ():void
7708
7709
7710 this.loadWord = function () {
7711 var position = workingData.byteLength - workingBytesAvailable,
7712 workingBytes = new Uint8Array(4),
7713 availableBytes = Math.min(4, workingBytesAvailable);
7714
7715 if (availableBytes === 0) {
7716 throw new Error('no bytes available');
7717 }
7718
7719 workingBytes.set(workingData.subarray(position, position + availableBytes));
7720 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
7721
7722 workingBitsAvailable = availableBytes * 8;
7723 workingBytesAvailable -= availableBytes;
7724 }; // (count:int):void
7725
7726
7727 this.skipBits = function (count) {
7728 var skipBytes; // :int
7729
7730 if (workingBitsAvailable > count) {
7731 workingWord <<= count;
7732 workingBitsAvailable -= count;
7733 } else {
7734 count -= workingBitsAvailable;
7735 skipBytes = Math.floor(count / 8);
7736 count -= skipBytes * 8;
7737 workingBytesAvailable -= skipBytes;
7738 this.loadWord();
7739 workingWord <<= count;
7740 workingBitsAvailable -= count;
7741 }
7742 }; // (size:int):uint
7743
7744
7745 this.readBits = function (size) {
7746 var bits = Math.min(workingBitsAvailable, size),
7747 // :uint
7748 valu = workingWord >>> 32 - bits; // :uint
7749 // if size > 31, handle error
7750
7751 workingBitsAvailable -= bits;
7752
7753 if (workingBitsAvailable > 0) {
7754 workingWord <<= bits;
7755 } else if (workingBytesAvailable > 0) {
7756 this.loadWord();
7757 }
7758
7759 bits = size - bits;
7760
7761 if (bits > 0) {
7762 return valu << bits | this.readBits(bits);
7763 }
7764
7765 return valu;
7766 }; // ():uint
7767
7768
7769 this.skipLeadingZeros = function () {
7770 var leadingZeroCount; // :uint
7771
7772 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
7773 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
7774 // the first bit of working word is 1
7775 workingWord <<= leadingZeroCount;
7776 workingBitsAvailable -= leadingZeroCount;
7777 return leadingZeroCount;
7778 }
7779 } // we exhausted workingWord and still have not found a 1
7780
7781
7782 this.loadWord();
7783 return leadingZeroCount + this.skipLeadingZeros();
7784 }; // ():void
7785
7786
7787 this.skipUnsignedExpGolomb = function () {
7788 this.skipBits(1 + this.skipLeadingZeros());
7789 }; // ():void
7790
7791
7792 this.skipExpGolomb = function () {
7793 this.skipBits(1 + this.skipLeadingZeros());
7794 }; // ():uint
7795
7796
7797 this.readUnsignedExpGolomb = function () {
7798 var clz = this.skipLeadingZeros(); // :uint
7799
7800 return this.readBits(clz + 1) - 1;
7801 }; // ():int
7802
7803
7804 this.readExpGolomb = function () {
7805 var valu = this.readUnsignedExpGolomb(); // :int
7806
7807 if (0x01 & valu) {
7808 // the number is odd if the low order bit is set
7809 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
7810 }
7811
7812 return -1 * (valu >>> 1); // divide by two then make it negative
7813 }; // Some convenience functions
7814 // :Boolean
7815
7816
7817 this.readBoolean = function () {
7818 return this.readBits(1) === 1;
7819 }; // ():int
7820
7821
7822 this.readUnsignedByte = function () {
7823 return this.readBits(8);
7824 };
7825
7826 this.loadWord();
7827 };
7828
7829 var expGolomb = ExpGolomb;
7830
7831 var _H264Stream, _NalByteStream;
7832
7833 var PROFILES_WITH_OPTIONAL_SPS_DATA;
7834 /**
7835 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
7836 */
7837
7838 _NalByteStream = function NalByteStream() {
7839 var syncPoint = 0,
7840 i,
7841 buffer;
7842
7843 _NalByteStream.prototype.init.call(this);
7844 /*
7845 * Scans a byte stream and triggers a data event with the NAL units found.
7846 * @param {Object} data Event received from H264Stream
7847 * @param {Uint8Array} data.data The h264 byte stream to be scanned
7848 *
7849 * @see H264Stream.push
7850 */
7851
7852
7853 this.push = function (data) {
7854 var swapBuffer;
7855
7856 if (!buffer) {
7857 buffer = data.data;
7858 } else {
7859 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
7860 swapBuffer.set(buffer);
7861 swapBuffer.set(data.data, buffer.byteLength);
7862 buffer = swapBuffer;
7863 }
7864
7865 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
7866 // scan for NAL unit boundaries
7867 // a match looks like this:
7868 // 0 0 1 .. NAL .. 0 0 1
7869 // ^ sync point ^ i
7870 // or this:
7871 // 0 0 1 .. NAL .. 0 0 0
7872 // ^ sync point ^ i
7873 // advance the sync point to a NAL start, if necessary
7874
7875 for (; syncPoint < len - 3; syncPoint++) {
7876 if (buffer[syncPoint + 2] === 1) {
7877 // the sync point is properly aligned
7878 i = syncPoint + 5;
7879 break;
7880 }
7881 }
7882
7883 while (i < len) {
7884 // look at the current byte to determine if we've hit the end of
7885 // a NAL unit boundary
7886 switch (buffer[i]) {
7887 case 0:
7888 // skip past non-sync sequences
7889 if (buffer[i - 1] !== 0) {
7890 i += 2;
7891 break;
7892 } else if (buffer[i - 2] !== 0) {
7893 i++;
7894 break;
7895 } // deliver the NAL unit if it isn't empty
7896
7897
7898 if (syncPoint + 3 !== i - 2) {
7899 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
7900 } // drop trailing zeroes
7901
7902
7903 do {
7904 i++;
7905 } while (buffer[i] !== 1 && i < len);
7906
7907 syncPoint = i - 2;
7908 i += 3;
7909 break;
7910
7911 case 1:
7912 // skip past non-sync sequences
7913 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
7914 i += 3;
7915 break;
7916 } // deliver the NAL unit
7917
7918
7919 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
7920 syncPoint = i - 2;
7921 i += 3;
7922 break;
7923
7924 default:
7925 // the current byte isn't a one or zero, so it cannot be part
7926 // of a sync sequence
7927 i += 3;
7928 break;
7929 }
7930 } // filter out the NAL units that were delivered
7931
7932
7933 buffer = buffer.subarray(syncPoint);
7934 i -= syncPoint;
7935 syncPoint = 0;
7936 };
7937
7938 this.reset = function () {
7939 buffer = null;
7940 syncPoint = 0;
7941 this.trigger('reset');
7942 };
7943
7944 this.flush = function () {
7945 // deliver the last buffered NAL unit
7946 if (buffer && buffer.byteLength > 3) {
7947 this.trigger('data', buffer.subarray(syncPoint + 3));
7948 } // reset the stream state
7949
7950
7951 buffer = null;
7952 syncPoint = 0;
7953 this.trigger('done');
7954 };
7955
7956 this.endTimeline = function () {
7957 this.flush();
7958 this.trigger('endedtimeline');
7959 };
7960 };
7961
7962 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
7963 // see Recommendation ITU-T H.264 (4/2013),
7964 // 7.3.2.1.1 Sequence parameter set data syntax
7965
7966 PROFILES_WITH_OPTIONAL_SPS_DATA = {
7967 100: true,
7968 110: true,
7969 122: true,
7970 244: true,
7971 44: true,
7972 83: true,
7973 86: true,
7974 118: true,
7975 128: true,
7976 138: true,
7977 139: true,
7978 134: true
7979 };
7980 /**
7981 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
7982 * events.
7983 */
7984
7985 _H264Stream = function H264Stream() {
7986 var nalByteStream = new _NalByteStream(),
7987 self,
7988 trackId,
7989 currentPts,
7990 currentDts,
7991 discardEmulationPreventionBytes,
7992 readSequenceParameterSet,
7993 skipScalingList;
7994
7995 _H264Stream.prototype.init.call(this);
7996
7997 self = this;
7998 /*
7999 * Pushes a packet from a stream onto the NalByteStream
8000 *
8001 * @param {Object} packet - A packet received from a stream
8002 * @param {Uint8Array} packet.data - The raw bytes of the packet
8003 * @param {Number} packet.dts - Decode timestamp of the packet
8004 * @param {Number} packet.pts - Presentation timestamp of the packet
8005 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8006 * @param {('video'|'audio')} packet.type - The type of packet
8007 *
8008 */
8009
8010 this.push = function (packet) {
8011 if (packet.type !== 'video') {
8012 return;
8013 }
8014
8015 trackId = packet.trackId;
8016 currentPts = packet.pts;
8017 currentDts = packet.dts;
8018 nalByteStream.push(packet);
8019 };
8020 /*
8021 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8022 * for the NALUs to the next stream component.
8023 * Also, preprocess caption and sequence parameter NALUs.
8024 *
8025 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8026 * @see NalByteStream.push
8027 */
8028
8029
8030 nalByteStream.on('data', function (data) {
8031 var event = {
8032 trackId: trackId,
8033 pts: currentPts,
8034 dts: currentDts,
8035 data: data
8036 };
8037
8038 switch (data[0] & 0x1f) {
8039 case 0x05:
8040 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8041 break;
8042
8043 case 0x06:
8044 event.nalUnitType = 'sei_rbsp';
8045 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8046 break;
8047
8048 case 0x07:
8049 event.nalUnitType = 'seq_parameter_set_rbsp';
8050 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8051 event.config = readSequenceParameterSet(event.escapedRBSP);
8052 break;
8053
8054 case 0x08:
8055 event.nalUnitType = 'pic_parameter_set_rbsp';
8056 break;
8057
8058 case 0x09:
8059 event.nalUnitType = 'access_unit_delimiter_rbsp';
8060 break;
8061 } // This triggers data on the H264Stream
8062
8063
8064 self.trigger('data', event);
8065 });
8066 nalByteStream.on('done', function () {
8067 self.trigger('done');
8068 });
8069 nalByteStream.on('partialdone', function () {
8070 self.trigger('partialdone');
8071 });
8072 nalByteStream.on('reset', function () {
8073 self.trigger('reset');
8074 });
8075 nalByteStream.on('endedtimeline', function () {
8076 self.trigger('endedtimeline');
8077 });
8078
8079 this.flush = function () {
8080 nalByteStream.flush();
8081 };
8082
8083 this.partialFlush = function () {
8084 nalByteStream.partialFlush();
8085 };
8086
8087 this.reset = function () {
8088 nalByteStream.reset();
8089 };
8090
8091 this.endTimeline = function () {
8092 nalByteStream.endTimeline();
8093 };
8094 /**
8095 * Advance the ExpGolomb decoder past a scaling list. The scaling
8096 * list is optionally transmitted as part of a sequence parameter
8097 * set and is not relevant to transmuxing.
8098 * @param count {number} the number of entries in this scaling list
8099 * @param expGolombDecoder {object} an ExpGolomb pointed to the
8100 * start of a scaling list
8101 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
8102 */
8103
8104
8105 skipScalingList = function skipScalingList(count, expGolombDecoder) {
8106 var lastScale = 8,
8107 nextScale = 8,
8108 j,
8109 deltaScale;
8110
8111 for (j = 0; j < count; j++) {
8112 if (nextScale !== 0) {
8113 deltaScale = expGolombDecoder.readExpGolomb();
8114 nextScale = (lastScale + deltaScale + 256) % 256;
8115 }
8116
8117 lastScale = nextScale === 0 ? lastScale : nextScale;
8118 }
8119 };
8120 /**
8121 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8122 * Sequence Payload"
8123 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8124 * unit
8125 * @return {Uint8Array} the RBSP without any Emulation
8126 * Prevention Bytes
8127 */
8128
8129
8130 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
8131 var length = data.byteLength,
8132 emulationPreventionBytesPositions = [],
8133 i = 1,
8134 newLength,
8135 newData; // Find all `Emulation Prevention Bytes`
8136
8137 while (i < length - 2) {
8138 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
8139 emulationPreventionBytesPositions.push(i + 2);
8140 i += 2;
8141 } else {
8142 i++;
8143 }
8144 } // If no Emulation Prevention Bytes were found just return the original
8145 // array
8146
8147
8148 if (emulationPreventionBytesPositions.length === 0) {
8149 return data;
8150 } // Create a new array to hold the NAL unit data
8151
8152
8153 newLength = length - emulationPreventionBytesPositions.length;
8154 newData = new Uint8Array(newLength);
8155 var sourceIndex = 0;
8156
8157 for (i = 0; i < newLength; sourceIndex++, i++) {
8158 if (sourceIndex === emulationPreventionBytesPositions[0]) {
8159 // Skip this byte
8160 sourceIndex++; // Remove this position index
8161
8162 emulationPreventionBytesPositions.shift();
8163 }
8164
8165 newData[i] = data[sourceIndex];
8166 }
8167
8168 return newData;
8169 };
8170 /**
8171 * Read a sequence parameter set and return some interesting video
8172 * properties. A sequence parameter set is the H264 metadata that
8173 * describes the properties of upcoming video frames.
8174 * @param data {Uint8Array} the bytes of a sequence parameter set
8175 * @return {object} an object with configuration parsed from the
8176 * sequence parameter set, including the dimensions of the
8177 * associated video frames.
8178 */
8179
8180
8181 readSequenceParameterSet = function readSequenceParameterSet(data) {
8182 var frameCropLeftOffset = 0,
8183 frameCropRightOffset = 0,
8184 frameCropTopOffset = 0,
8185 frameCropBottomOffset = 0,
8186 sarScale = 1,
8187 expGolombDecoder,
8188 profileIdc,
8189 levelIdc,
8190 profileCompatibility,
8191 chromaFormatIdc,
8192 picOrderCntType,
8193 numRefFramesInPicOrderCntCycle,
8194 picWidthInMbsMinus1,
8195 picHeightInMapUnitsMinus1,
8196 frameMbsOnlyFlag,
8197 scalingListCount,
8198 sarRatio,
8199 aspectRatioIdc,
8200 i;
8201 expGolombDecoder = new expGolomb(data);
8202 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
8203
8204 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
8205
8206 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
8207
8208 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
8209 // some profiles have more optional data we don't need
8210
8211 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
8212 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
8213
8214 if (chromaFormatIdc === 3) {
8215 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
8216 }
8217
8218 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
8219
8220 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
8221
8222 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
8223
8224 if (expGolombDecoder.readBoolean()) {
8225 // seq_scaling_matrix_present_flag
8226 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
8227
8228 for (i = 0; i < scalingListCount; i++) {
8229 if (expGolombDecoder.readBoolean()) {
8230 // seq_scaling_list_present_flag[ i ]
8231 if (i < 6) {
8232 skipScalingList(16, expGolombDecoder);
8233 } else {
8234 skipScalingList(64, expGolombDecoder);
8235 }
8236 }
8237 }
8238 }
8239 }
8240
8241 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
8242
8243 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
8244
8245 if (picOrderCntType === 0) {
8246 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
8247 } else if (picOrderCntType === 1) {
8248 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
8249
8250 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
8251
8252 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
8253
8254 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
8255
8256 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
8257 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
8258 }
8259 }
8260
8261 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
8262
8263 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
8264
8265 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8266 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
8267 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
8268
8269 if (frameMbsOnlyFlag === 0) {
8270 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
8271 }
8272
8273 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
8274
8275 if (expGolombDecoder.readBoolean()) {
8276 // frame_cropping_flag
8277 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
8278 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
8279 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
8280 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
8281 }
8282
8283 if (expGolombDecoder.readBoolean()) {
8284 // vui_parameters_present_flag
8285 if (expGolombDecoder.readBoolean()) {
8286 // aspect_ratio_info_present_flag
8287 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
8288
8289 switch (aspectRatioIdc) {
8290 case 1:
8291 sarRatio = [1, 1];
8292 break;
8293
8294 case 2:
8295 sarRatio = [12, 11];
8296 break;
8297
8298 case 3:
8299 sarRatio = [10, 11];
8300 break;
8301
8302 case 4:
8303 sarRatio = [16, 11];
8304 break;
8305
8306 case 5:
8307 sarRatio = [40, 33];
8308 break;
8309
8310 case 6:
8311 sarRatio = [24, 11];
8312 break;
8313
8314 case 7:
8315 sarRatio = [20, 11];
8316 break;
8317
8318 case 8:
8319 sarRatio = [32, 11];
8320 break;
8321
8322 case 9:
8323 sarRatio = [80, 33];
8324 break;
8325
8326 case 10:
8327 sarRatio = [18, 11];
8328 break;
8329
8330 case 11:
8331 sarRatio = [15, 11];
8332 break;
8333
8334 case 12:
8335 sarRatio = [64, 33];
8336 break;
8337
8338 case 13:
8339 sarRatio = [160, 99];
8340 break;
8341
8342 case 14:
8343 sarRatio = [4, 3];
8344 break;
8345
8346 case 15:
8347 sarRatio = [3, 2];
8348 break;
8349
8350 case 16:
8351 sarRatio = [2, 1];
8352 break;
8353
8354 case 255:
8355 {
8356 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
8357 break;
8358 }
8359 }
8360
8361 if (sarRatio) {
8362 sarScale = sarRatio[0] / sarRatio[1];
8363 }
8364 }
8365 }
8366
8367 return {
8368 profileIdc: profileIdc,
8369 levelIdc: levelIdc,
8370 profileCompatibility: profileCompatibility,
8371 width: Math.ceil(((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
8372 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
8373 sarRatio: sarRatio
8374 };
8375 };
8376 };
8377
8378 _H264Stream.prototype = new stream();
8379 var h264 = {
8380 H264Stream: _H264Stream,
8381 NalByteStream: _NalByteStream
8382 };
8383 /**
8384 * mux.js
8385 *
8386 * Copyright (c) Brightcove
8387 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8388 *
8389 * Utilities to detect basic properties and metadata about Aac data.
8390 */
8391
8392 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8393
8394 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
8395 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
8396 flags = header[byteIndex + 5],
8397 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
8398
8399 returnSize = returnSize >= 0 ? returnSize : 0;
8400
8401 if (footerPresent) {
8402 return returnSize + 20;
8403 }
8404
8405 return returnSize + 10;
8406 };
8407
8408 var getId3Offset = function getId3Offset(data, offset) {
8409 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
8410 return offset;
8411 }
8412
8413 offset += parseId3TagSize(data, offset);
8414 return getId3Offset(data, offset);
8415 }; // TODO: use vhs-utils
8416
8417
8418 var isLikelyAacData = function isLikelyAacData(data) {
8419 var offset = getId3Offset(data, 0);
8420 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
8421 // is not mp3 data but aac data.
8422 (data[offset + 1] & 0x16) === 0x10;
8423 };
8424
8425 var parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
8426 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
8427 }; // return a percent-encoded representation of the specified byte range
8428 // @see http://en.wikipedia.org/wiki/Percent-encoding
8429
8430
8431 var percentEncode$1 = function percentEncode(bytes, start, end) {
8432 var i,
8433 result = '';
8434
8435 for (i = start; i < end; i++) {
8436 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
8437 }
8438
8439 return result;
8440 }; // return the string representation of the specified byte range,
8441 // interpreted as ISO-8859-1.
8442
8443
8444 var parseIso88591$1 = function parseIso88591(bytes, start, end) {
8445 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
8446 };
8447
8448 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
8449 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
8450 middle = header[byteIndex + 4] << 3,
8451 highTwo = header[byteIndex + 3] & 0x3 << 11;
8452 return highTwo | middle | lowThree;
8453 };
8454
8455 var parseType = function parseType(header, byteIndex) {
8456 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
8457 return 'timed-metadata';
8458 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
8459 return 'audio';
8460 }
8461
8462 return null;
8463 };
8464
8465 var parseSampleRate = function parseSampleRate(packet) {
8466 var i = 0;
8467
8468 while (i + 5 < packet.length) {
8469 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
8470 // If a valid header was not found, jump one forward and attempt to
8471 // find a valid ADTS header starting at the next byte
8472 i++;
8473 continue;
8474 }
8475
8476 return ADTS_SAMPLING_FREQUENCIES$1[(packet[i + 2] & 0x3c) >>> 2];
8477 }
8478
8479 return null;
8480 };
8481
8482 var parseAacTimestamp = function parseAacTimestamp(packet) {
8483 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
8484
8485 frameStart = 10;
8486
8487 if (packet[5] & 0x40) {
8488 // advance the frame start past the extended header
8489 frameStart += 4; // header size field
8490
8491 frameStart += parseSyncSafeInteger$1(packet.subarray(10, 14));
8492 } // parse one or more ID3 frames
8493 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
8494
8495
8496 do {
8497 // determine the number of bytes in this frame
8498 frameSize = parseSyncSafeInteger$1(packet.subarray(frameStart + 4, frameStart + 8));
8499
8500 if (frameSize < 1) {
8501 return null;
8502 }
8503
8504 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
8505
8506 if (frameHeader === 'PRIV') {
8507 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
8508
8509 for (var i = 0; i < frame.byteLength; i++) {
8510 if (frame[i] === 0) {
8511 var owner = parseIso88591$1(frame, 0, i);
8512
8513 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
8514 var d = frame.subarray(i + 1);
8515 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
8516 size *= 4;
8517 size += d[7] & 0x03;
8518 return size;
8519 }
8520
8521 break;
8522 }
8523 }
8524 }
8525
8526 frameStart += 10; // advance past the frame header
8527
8528 frameStart += frameSize; // advance past the frame body
8529 } while (frameStart < packet.byteLength);
8530
8531 return null;
8532 };
8533
8534 var utils = {
8535 isLikelyAacData: isLikelyAacData,
8536 parseId3TagSize: parseId3TagSize,
8537 parseAdtsSize: parseAdtsSize,
8538 parseType: parseType,
8539 parseSampleRate: parseSampleRate,
8540 parseAacTimestamp: parseAacTimestamp
8541 };
8542
8543 var _AacStream;
8544 /**
8545 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
8546 */
8547
8548
8549 _AacStream = function AacStream() {
8550 var everything = new Uint8Array(),
8551 timeStamp = 0;
8552
8553 _AacStream.prototype.init.call(this);
8554
8555 this.setTimestamp = function (timestamp) {
8556 timeStamp = timestamp;
8557 };
8558
8559 this.push = function (bytes) {
8560 var frameSize = 0,
8561 byteIndex = 0,
8562 bytesLeft,
8563 chunk,
8564 packet,
8565 tempLength; // If there are bytes remaining from the last segment, prepend them to the
8566 // bytes that were pushed in
8567
8568 if (everything.length) {
8569 tempLength = everything.length;
8570 everything = new Uint8Array(bytes.byteLength + tempLength);
8571 everything.set(everything.subarray(0, tempLength));
8572 everything.set(bytes, tempLength);
8573 } else {
8574 everything = bytes;
8575 }
8576
8577 while (everything.length - byteIndex >= 3) {
8578 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
8579 // Exit early because we don't have enough to parse
8580 // the ID3 tag header
8581 if (everything.length - byteIndex < 10) {
8582 break;
8583 } // check framesize
8584
8585
8586 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
8587 // to emit a full packet
8588 // Add to byteIndex to support multiple ID3 tags in sequence
8589
8590 if (byteIndex + frameSize > everything.length) {
8591 break;
8592 }
8593
8594 chunk = {
8595 type: 'timed-metadata',
8596 data: everything.subarray(byteIndex, byteIndex + frameSize)
8597 };
8598 this.trigger('data', chunk);
8599 byteIndex += frameSize;
8600 continue;
8601 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
8602 // Exit early because we don't have enough to parse
8603 // the ADTS frame header
8604 if (everything.length - byteIndex < 7) {
8605 break;
8606 }
8607
8608 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
8609 // to emit a full packet
8610
8611 if (byteIndex + frameSize > everything.length) {
8612 break;
8613 }
8614
8615 packet = {
8616 type: 'audio',
8617 data: everything.subarray(byteIndex, byteIndex + frameSize),
8618 pts: timeStamp,
8619 dts: timeStamp
8620 };
8621 this.trigger('data', packet);
8622 byteIndex += frameSize;
8623 continue;
8624 }
8625
8626 byteIndex++;
8627 }
8628
8629 bytesLeft = everything.length - byteIndex;
8630
8631 if (bytesLeft > 0) {
8632 everything = everything.subarray(byteIndex);
8633 } else {
8634 everything = new Uint8Array();
8635 }
8636 };
8637
8638 this.reset = function () {
8639 everything = new Uint8Array();
8640 this.trigger('reset');
8641 };
8642
8643 this.endTimeline = function () {
8644 everything = new Uint8Array();
8645 this.trigger('endedtimeline');
8646 };
8647 };
8648
8649 _AacStream.prototype = new stream();
8650 var aac = _AacStream; // constants
8651
8652 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
8653 var audioProperties = AUDIO_PROPERTIES;
8654 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
8655 var videoProperties = VIDEO_PROPERTIES;
8656 var H264Stream = h264.H264Stream;
8657 var isLikelyAacData$1 = utils.isLikelyAacData;
8658 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS; // object types
8659
8660 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
8661 /**
8662 * Compare two arrays (even typed) for same-ness
8663 */
8664
8665
8666 var arrayEquals = function arrayEquals(a, b) {
8667 var i;
8668
8669 if (a.length !== b.length) {
8670 return false;
8671 } // compare the value of each element in the array
8672
8673
8674 for (i = 0; i < a.length; i++) {
8675 if (a[i] !== b[i]) {
8676 return false;
8677 }
8678 }
8679
8680 return true;
8681 };
8682
8683 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
8684 var ptsOffsetFromDts = startPts - startDts,
8685 decodeDuration = endDts - startDts,
8686 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
8687 // however, the player time values will reflect a start from the baseMediaDecodeTime.
8688 // In order to provide relevant values for the player times, base timing info on the
8689 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
8690
8691 return {
8692 start: {
8693 dts: baseMediaDecodeTime,
8694 pts: baseMediaDecodeTime + ptsOffsetFromDts
8695 },
8696 end: {
8697 dts: baseMediaDecodeTime + decodeDuration,
8698 pts: baseMediaDecodeTime + presentationDuration
8699 },
8700 prependedContentDuration: prependedContentDuration,
8701 baseMediaDecodeTime: baseMediaDecodeTime
8702 };
8703 };
8704 /**
8705 * Constructs a single-track, ISO BMFF media segment from AAC data
8706 * events. The output of this stream can be fed to a SourceBuffer
8707 * configured with a suitable initialization segment.
8708 * @param track {object} track metadata configuration
8709 * @param options {object} transmuxer options object
8710 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
8711 * in the source; false to adjust the first segment to start at 0.
8712 */
8713
8714
8715 _AudioSegmentStream = function AudioSegmentStream(track, options) {
8716 var adtsFrames = [],
8717 sequenceNumber = 0,
8718 earliestAllowedDts = 0,
8719 audioAppendStartTs = 0,
8720 videoBaseMediaDecodeTime = Infinity;
8721 options = options || {};
8722
8723 _AudioSegmentStream.prototype.init.call(this);
8724
8725 this.push = function (data) {
8726 trackDecodeInfo.collectDtsInfo(track, data);
8727
8728 if (track) {
8729 audioProperties.forEach(function (prop) {
8730 track[prop] = data[prop];
8731 });
8732 } // buffer audio data until end() is called
8733
8734
8735 adtsFrames.push(data);
8736 };
8737
8738 this.setEarliestDts = function (earliestDts) {
8739 earliestAllowedDts = earliestDts;
8740 };
8741
8742 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
8743 videoBaseMediaDecodeTime = baseMediaDecodeTime;
8744 };
8745
8746 this.setAudioAppendStart = function (timestamp) {
8747 audioAppendStartTs = timestamp;
8748 };
8749
8750 this.flush = function () {
8751 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
8752
8753 if (adtsFrames.length === 0) {
8754 this.trigger('done', 'AudioSegmentStream');
8755 return;
8756 }
8757
8758 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
8759 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
8760
8761 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
8762 // samples (that is, adts frames) in the audio data
8763
8764 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
8765
8766 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
8767 adtsFrames = [];
8768 moof = mp4Generator.moof(sequenceNumber, [track]);
8769 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
8770
8771 sequenceNumber++;
8772 boxes.set(moof);
8773 boxes.set(mdat, moof.byteLength);
8774 trackDecodeInfo.clearDtsInfo(track);
8775 frameDuration = Math.ceil(ONE_SECOND_IN_TS$3 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
8776 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
8777 // valid use-case where an init segment/data should be triggered without associated
8778 // frames. Leaving for now, but should be looked into.
8779
8780 if (frames.length) {
8781 segmentDuration = frames.length * frameDuration;
8782 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
8783 // frame info is in video clock cycles. Convert to match expectation of
8784 // listeners (that all timestamps will be based on video clock cycles).
8785 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
8786 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
8787 this.trigger('timingInfo', {
8788 start: frames[0].pts,
8789 end: frames[0].pts + segmentDuration
8790 });
8791 }
8792
8793 this.trigger('data', {
8794 track: track,
8795 boxes: boxes
8796 });
8797 this.trigger('done', 'AudioSegmentStream');
8798 };
8799
8800 this.reset = function () {
8801 trackDecodeInfo.clearDtsInfo(track);
8802 adtsFrames = [];
8803 this.trigger('reset');
8804 };
8805 };
8806
8807 _AudioSegmentStream.prototype = new stream();
8808 /**
8809 * Constructs a single-track, ISO BMFF media segment from H264 data
8810 * events. The output of this stream can be fed to a SourceBuffer
8811 * configured with a suitable initialization segment.
8812 * @param track {object} track metadata configuration
8813 * @param options {object} transmuxer options object
8814 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
8815 * gopsToAlignWith list when attempting to align gop pts
8816 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
8817 * in the source; false to adjust the first segment to start at 0.
8818 */
8819
8820 _VideoSegmentStream = function VideoSegmentStream(track, options) {
8821 var sequenceNumber = 0,
8822 nalUnits = [],
8823 gopsToAlignWith = [],
8824 config,
8825 pps;
8826 options = options || {};
8827
8828 _VideoSegmentStream.prototype.init.call(this);
8829
8830 delete track.minPTS;
8831 this.gopCache_ = [];
8832 /**
8833 * Constructs a ISO BMFF segment given H264 nalUnits
8834 * @param {Object} nalUnit A data event representing a nalUnit
8835 * @param {String} nalUnit.nalUnitType
8836 * @param {Object} nalUnit.config Properties for a mp4 track
8837 * @param {Uint8Array} nalUnit.data The nalUnit bytes
8838 * @see lib/codecs/h264.js
8839 **/
8840
8841 this.push = function (nalUnit) {
8842 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
8843
8844 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
8845 config = nalUnit.config;
8846 track.sps = [nalUnit.data];
8847 videoProperties.forEach(function (prop) {
8848 track[prop] = config[prop];
8849 }, this);
8850 }
8851
8852 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
8853 pps = nalUnit.data;
8854 track.pps = [nalUnit.data];
8855 } // buffer video until flush() is called
8856
8857
8858 nalUnits.push(nalUnit);
8859 };
8860 /**
8861 * Pass constructed ISO BMFF track and boxes on to the
8862 * next stream in the pipeline
8863 **/
8864
8865
8866 this.flush = function () {
8867 var frames,
8868 gopForFusion,
8869 gops,
8870 moof,
8871 mdat,
8872 boxes,
8873 prependedContentDuration = 0,
8874 firstGop,
8875 lastGop; // Throw away nalUnits at the start of the byte stream until
8876 // we find the first AUD
8877
8878 while (nalUnits.length) {
8879 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
8880 break;
8881 }
8882
8883 nalUnits.shift();
8884 } // Return early if no video data has been observed
8885
8886
8887 if (nalUnits.length === 0) {
8888 this.resetStream_();
8889 this.trigger('done', 'VideoSegmentStream');
8890 return;
8891 } // Organize the raw nal-units into arrays that represent
8892 // higher-level constructs such as frames and gops
8893 // (group-of-pictures)
8894
8895
8896 frames = frameUtils.groupNalsIntoFrames(nalUnits);
8897 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
8898 // a problem since MSE (on Chrome) requires a leading keyframe.
8899 //
8900 // We have two approaches to repairing this situation:
8901 // 1) GOP-FUSION:
8902 // This is where we keep track of the GOPS (group-of-pictures)
8903 // from previous fragments and attempt to find one that we can
8904 // prepend to the current fragment in order to create a valid
8905 // fragment.
8906 // 2) KEYFRAME-PULLING:
8907 // Here we search for the first keyframe in the fragment and
8908 // throw away all the frames between the start of the fragment
8909 // and that keyframe. We then extend the duration and pull the
8910 // PTS of the keyframe forward so that it covers the time range
8911 // of the frames that were disposed of.
8912 //
8913 // #1 is far prefereable over #2 which can cause "stuttering" but
8914 // requires more things to be just right.
8915
8916 if (!gops[0][0].keyFrame) {
8917 // Search for a gop for fusion from our gopCache
8918 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
8919
8920 if (gopForFusion) {
8921 // in order to provide more accurate timing information about the segment, save
8922 // the number of seconds prepended to the original segment due to GOP fusion
8923 prependedContentDuration = gopForFusion.duration;
8924 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
8925 // new gop at the beginning
8926
8927 gops.byteLength += gopForFusion.byteLength;
8928 gops.nalCount += gopForFusion.nalCount;
8929 gops.pts = gopForFusion.pts;
8930 gops.dts = gopForFusion.dts;
8931 gops.duration += gopForFusion.duration;
8932 } else {
8933 // If we didn't find a candidate gop fall back to keyframe-pulling
8934 gops = frameUtils.extendFirstKeyFrame(gops);
8935 }
8936 } // Trim gops to align with gopsToAlignWith
8937
8938
8939 if (gopsToAlignWith.length) {
8940 var alignedGops;
8941
8942 if (options.alignGopsAtEnd) {
8943 alignedGops = this.alignGopsAtEnd_(gops);
8944 } else {
8945 alignedGops = this.alignGopsAtStart_(gops);
8946 }
8947
8948 if (!alignedGops) {
8949 // save all the nals in the last GOP into the gop cache
8950 this.gopCache_.unshift({
8951 gop: gops.pop(),
8952 pps: track.pps,
8953 sps: track.sps
8954 }); // Keep a maximum of 6 GOPs in the cache
8955
8956 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
8957
8958 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
8959
8960 this.resetStream_();
8961 this.trigger('done', 'VideoSegmentStream');
8962 return;
8963 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
8964 // when recalculated before sending off to CoalesceStream
8965
8966
8967 trackDecodeInfo.clearDtsInfo(track);
8968 gops = alignedGops;
8969 }
8970
8971 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
8972 // samples (that is, frames) in the video data
8973
8974 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
8975
8976 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
8977 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
8978 this.trigger('processedGopsInfo', gops.map(function (gop) {
8979 return {
8980 pts: gop.pts,
8981 dts: gop.dts,
8982 byteLength: gop.byteLength
8983 };
8984 }));
8985 firstGop = gops[0];
8986 lastGop = gops[gops.length - 1];
8987 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
8988 this.trigger('timingInfo', {
8989 start: gops[0].pts,
8990 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
8991 }); // save all the nals in the last GOP into the gop cache
8992
8993 this.gopCache_.unshift({
8994 gop: gops.pop(),
8995 pps: track.pps,
8996 sps: track.sps
8997 }); // Keep a maximum of 6 GOPs in the cache
8998
8999 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9000
9001 nalUnits = [];
9002 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9003 this.trigger('timelineStartInfo', track.timelineStartInfo);
9004 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9005 // throwing away hundreds of media segment fragments
9006
9007 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9008
9009 sequenceNumber++;
9010 boxes.set(moof);
9011 boxes.set(mdat, moof.byteLength);
9012 this.trigger('data', {
9013 track: track,
9014 boxes: boxes
9015 });
9016 this.resetStream_(); // Continue with the flush process now
9017
9018 this.trigger('done', 'VideoSegmentStream');
9019 };
9020
9021 this.reset = function () {
9022 this.resetStream_();
9023 nalUnits = [];
9024 this.gopCache_.length = 0;
9025 gopsToAlignWith.length = 0;
9026 this.trigger('reset');
9027 };
9028
9029 this.resetStream_ = function () {
9030 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9031 // for instance, when we are rendition switching
9032
9033 config = undefined;
9034 pps = undefined;
9035 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9036 // return it or return null if no good candidate was found
9037
9038
9039 this.getGopForFusion_ = function (nalUnit) {
9040 var halfSecond = 45000,
9041 // Half-a-second in a 90khz clock
9042 allowableOverlap = 10000,
9043 // About 3 frames @ 30fps
9044 nearestDistance = Infinity,
9045 dtsDistance,
9046 nearestGopObj,
9047 currentGop,
9048 currentGopObj,
9049 i; // Search for the GOP nearest to the beginning of this nal unit
9050
9051 for (i = 0; i < this.gopCache_.length; i++) {
9052 currentGopObj = this.gopCache_[i];
9053 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
9054
9055 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9056 continue;
9057 } // Reject Gops that would require a negative baseMediaDecodeTime
9058
9059
9060 if (currentGop.dts < track.timelineStartInfo.dts) {
9061 continue;
9062 } // The distance between the end of the gop and the start of the nalUnit
9063
9064
9065 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
9066 // a half-second of the nal unit
9067
9068 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9069 // Always use the closest GOP we found if there is more than
9070 // one candidate
9071 if (!nearestGopObj || nearestDistance > dtsDistance) {
9072 nearestGopObj = currentGopObj;
9073 nearestDistance = dtsDistance;
9074 }
9075 }
9076 }
9077
9078 if (nearestGopObj) {
9079 return nearestGopObj.gop;
9080 }
9081
9082 return null;
9083 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
9084 // of gopsToAlignWith starting from the START of the list
9085
9086
9087 this.alignGopsAtStart_ = function (gops) {
9088 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
9089 byteLength = gops.byteLength;
9090 nalCount = gops.nalCount;
9091 duration = gops.duration;
9092 alignIndex = gopIndex = 0;
9093
9094 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
9095 align = gopsToAlignWith[alignIndex];
9096 gop = gops[gopIndex];
9097
9098 if (align.pts === gop.pts) {
9099 break;
9100 }
9101
9102 if (gop.pts > align.pts) {
9103 // this current gop starts after the current gop we want to align on, so increment
9104 // align index
9105 alignIndex++;
9106 continue;
9107 } // current gop starts before the current gop we want to align on. so increment gop
9108 // index
9109
9110
9111 gopIndex++;
9112 byteLength -= gop.byteLength;
9113 nalCount -= gop.nalCount;
9114 duration -= gop.duration;
9115 }
9116
9117 if (gopIndex === 0) {
9118 // no gops to trim
9119 return gops;
9120 }
9121
9122 if (gopIndex === gops.length) {
9123 // all gops trimmed, skip appending all gops
9124 return null;
9125 }
9126
9127 alignedGops = gops.slice(gopIndex);
9128 alignedGops.byteLength = byteLength;
9129 alignedGops.duration = duration;
9130 alignedGops.nalCount = nalCount;
9131 alignedGops.pts = alignedGops[0].pts;
9132 alignedGops.dts = alignedGops[0].dts;
9133 return alignedGops;
9134 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
9135 // of gopsToAlignWith starting from the END of the list
9136
9137
9138 this.alignGopsAtEnd_ = function (gops) {
9139 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
9140 alignIndex = gopsToAlignWith.length - 1;
9141 gopIndex = gops.length - 1;
9142 alignEndIndex = null;
9143 matchFound = false;
9144
9145 while (alignIndex >= 0 && gopIndex >= 0) {
9146 align = gopsToAlignWith[alignIndex];
9147 gop = gops[gopIndex];
9148
9149 if (align.pts === gop.pts) {
9150 matchFound = true;
9151 break;
9152 }
9153
9154 if (align.pts > gop.pts) {
9155 alignIndex--;
9156 continue;
9157 }
9158
9159 if (alignIndex === gopsToAlignWith.length - 1) {
9160 // gop.pts is greater than the last alignment candidate. If no match is found
9161 // by the end of this loop, we still want to append gops that come after this
9162 // point
9163 alignEndIndex = gopIndex;
9164 }
9165
9166 gopIndex--;
9167 }
9168
9169 if (!matchFound && alignEndIndex === null) {
9170 return null;
9171 }
9172
9173 var trimIndex;
9174
9175 if (matchFound) {
9176 trimIndex = gopIndex;
9177 } else {
9178 trimIndex = alignEndIndex;
9179 }
9180
9181 if (trimIndex === 0) {
9182 return gops;
9183 }
9184
9185 var alignedGops = gops.slice(trimIndex);
9186 var metadata = alignedGops.reduce(function (total, gop) {
9187 total.byteLength += gop.byteLength;
9188 total.duration += gop.duration;
9189 total.nalCount += gop.nalCount;
9190 return total;
9191 }, {
9192 byteLength: 0,
9193 duration: 0,
9194 nalCount: 0
9195 });
9196 alignedGops.byteLength = metadata.byteLength;
9197 alignedGops.duration = metadata.duration;
9198 alignedGops.nalCount = metadata.nalCount;
9199 alignedGops.pts = alignedGops[0].pts;
9200 alignedGops.dts = alignedGops[0].dts;
9201 return alignedGops;
9202 };
9203
9204 this.alignGopsWith = function (newGopsToAlignWith) {
9205 gopsToAlignWith = newGopsToAlignWith;
9206 };
9207 };
9208
9209 _VideoSegmentStream.prototype = new stream();
9210 /**
9211 * A Stream that can combine multiple streams (ie. audio & video)
9212 * into a single output segment for MSE. Also supports audio-only
9213 * and video-only streams.
9214 * @param options {object} transmuxer options object
9215 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9216 * in the source; false to adjust the first segment to start at media timeline start.
9217 */
9218
9219 _CoalesceStream = function CoalesceStream(options, metadataStream) {
9220 // Number of Tracks per output segment
9221 // If greater than 1, we combine multiple
9222 // tracks into a single segment
9223 this.numberOfTracks = 0;
9224 this.metadataStream = metadataStream;
9225 options = options || {};
9226
9227 if (typeof options.remux !== 'undefined') {
9228 this.remuxTracks = !!options.remux;
9229 } else {
9230 this.remuxTracks = true;
9231 }
9232
9233 if (typeof options.keepOriginalTimestamps === 'boolean') {
9234 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
9235 } else {
9236 this.keepOriginalTimestamps = false;
9237 }
9238
9239 this.pendingTracks = [];
9240 this.videoTrack = null;
9241 this.pendingBoxes = [];
9242 this.pendingCaptions = [];
9243 this.pendingMetadata = [];
9244 this.pendingBytes = 0;
9245 this.emittedTracks = 0;
9246
9247 _CoalesceStream.prototype.init.call(this); // Take output from multiple
9248
9249
9250 this.push = function (output) {
9251 // buffer incoming captions until the associated video segment
9252 // finishes
9253 if (output.text) {
9254 return this.pendingCaptions.push(output);
9255 } // buffer incoming id3 tags until the final flush
9256
9257
9258 if (output.frames) {
9259 return this.pendingMetadata.push(output);
9260 } // Add this track to the list of pending tracks and store
9261 // important information required for the construction of
9262 // the final segment
9263
9264
9265 this.pendingTracks.push(output.track);
9266 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
9267 // We unshift audio and push video because
9268 // as of Chrome 75 when switching from
9269 // one init segment to another if the video
9270 // mdat does not appear after the audio mdat
9271 // only audio will play for the duration of our transmux.
9272
9273 if (output.track.type === 'video') {
9274 this.videoTrack = output.track;
9275 this.pendingBoxes.push(output.boxes);
9276 }
9277
9278 if (output.track.type === 'audio') {
9279 this.audioTrack = output.track;
9280 this.pendingBoxes.unshift(output.boxes);
9281 }
9282 };
9283 };
9284
9285 _CoalesceStream.prototype = new stream();
9286
9287 _CoalesceStream.prototype.flush = function (flushSource) {
9288 var offset = 0,
9289 event = {
9290 captions: [],
9291 captionStreams: {},
9292 metadata: [],
9293 info: {}
9294 },
9295 caption,
9296 id3,
9297 initSegment,
9298 timelineStartPts = 0,
9299 i;
9300
9301 if (this.pendingTracks.length < this.numberOfTracks) {
9302 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
9303 // Return because we haven't received a flush from a data-generating
9304 // portion of the segment (meaning that we have only recieved meta-data
9305 // or captions.)
9306 return;
9307 } else if (this.remuxTracks) {
9308 // Return until we have enough tracks from the pipeline to remux (if we
9309 // are remuxing audio and video into a single MP4)
9310 return;
9311 } else if (this.pendingTracks.length === 0) {
9312 // In the case where we receive a flush without any data having been
9313 // received we consider it an emitted track for the purposes of coalescing
9314 // `done` events.
9315 // We do this for the case where there is an audio and video track in the
9316 // segment but no audio data. (seen in several playlists with alternate
9317 // audio tracks and no audio present in the main TS segments.)
9318 this.emittedTracks++;
9319
9320 if (this.emittedTracks >= this.numberOfTracks) {
9321 this.trigger('done');
9322 this.emittedTracks = 0;
9323 }
9324
9325 return;
9326 }
9327 }
9328
9329 if (this.videoTrack) {
9330 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
9331 videoProperties.forEach(function (prop) {
9332 event.info[prop] = this.videoTrack[prop];
9333 }, this);
9334 } else if (this.audioTrack) {
9335 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
9336 audioProperties.forEach(function (prop) {
9337 event.info[prop] = this.audioTrack[prop];
9338 }, this);
9339 }
9340
9341 if (this.videoTrack || this.audioTrack) {
9342 if (this.pendingTracks.length === 1) {
9343 event.type = this.pendingTracks[0].type;
9344 } else {
9345 event.type = 'combined';
9346 }
9347
9348 this.emittedTracks += this.pendingTracks.length;
9349 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
9350
9351 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
9352 // and track definitions
9353
9354 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
9355
9356 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
9357
9358 for (i = 0; i < this.pendingBoxes.length; i++) {
9359 event.data.set(this.pendingBoxes[i], offset);
9360 offset += this.pendingBoxes[i].byteLength;
9361 } // Translate caption PTS times into second offsets to match the
9362 // video timeline for the segment, and add track info
9363
9364
9365 for (i = 0; i < this.pendingCaptions.length; i++) {
9366 caption = this.pendingCaptions[i];
9367 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
9368 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
9369 event.captionStreams[caption.stream] = true;
9370 event.captions.push(caption);
9371 } // Translate ID3 frame PTS times into second offsets to match the
9372 // video timeline for the segment
9373
9374
9375 for (i = 0; i < this.pendingMetadata.length; i++) {
9376 id3 = this.pendingMetadata[i];
9377 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
9378 event.metadata.push(id3);
9379 } // We add this to every single emitted segment even though we only need
9380 // it for the first
9381
9382
9383 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
9384
9385 this.pendingTracks.length = 0;
9386 this.videoTrack = null;
9387 this.pendingBoxes.length = 0;
9388 this.pendingCaptions.length = 0;
9389 this.pendingBytes = 0;
9390 this.pendingMetadata.length = 0; // Emit the built segment
9391 // We include captions and ID3 tags for backwards compatibility,
9392 // ideally we should send only video and audio in the data event
9393
9394 this.trigger('data', event); // Emit each caption to the outside world
9395 // Ideally, this would happen immediately on parsing captions,
9396 // but we need to ensure that video data is sent back first
9397 // so that caption timing can be adjusted to match video timing
9398
9399 for (i = 0; i < event.captions.length; i++) {
9400 caption = event.captions[i];
9401 this.trigger('caption', caption);
9402 } // Emit each id3 tag to the outside world
9403 // Ideally, this would happen immediately on parsing the tag,
9404 // but we need to ensure that video data is sent back first
9405 // so that ID3 frame timing can be adjusted to match video timing
9406
9407
9408 for (i = 0; i < event.metadata.length; i++) {
9409 id3 = event.metadata[i];
9410 this.trigger('id3Frame', id3);
9411 }
9412 } // Only emit `done` if all tracks have been flushed and emitted
9413
9414
9415 if (this.emittedTracks >= this.numberOfTracks) {
9416 this.trigger('done');
9417 this.emittedTracks = 0;
9418 }
9419 };
9420
9421 _CoalesceStream.prototype.setRemux = function (val) {
9422 this.remuxTracks = val;
9423 };
9424 /**
9425 * A Stream that expects MP2T binary data as input and produces
9426 * corresponding media segments, suitable for use with Media Source
9427 * Extension (MSE) implementations that support the ISO BMFF byte
9428 * stream format, like Chrome.
9429 */
9430
9431
9432 _Transmuxer = function Transmuxer(options) {
9433 var self = this,
9434 hasFlushed = true,
9435 videoTrack,
9436 audioTrack;
9437
9438 _Transmuxer.prototype.init.call(this);
9439
9440 options = options || {};
9441 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
9442 this.transmuxPipeline_ = {};
9443
9444 this.setupAacPipeline = function () {
9445 var pipeline = {};
9446 this.transmuxPipeline_ = pipeline;
9447 pipeline.type = 'aac';
9448 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
9449
9450 pipeline.aacStream = new aac();
9451 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
9452 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
9453 pipeline.adtsStream = new adts();
9454 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9455 pipeline.headOfPipeline = pipeline.aacStream;
9456 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
9457 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
9458 pipeline.metadataStream.on('timestamp', function (frame) {
9459 pipeline.aacStream.setTimestamp(frame.timeStamp);
9460 });
9461 pipeline.aacStream.on('data', function (data) {
9462 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
9463 return;
9464 }
9465
9466 audioTrack = audioTrack || {
9467 timelineStartInfo: {
9468 baseMediaDecodeTime: self.baseMediaDecodeTime
9469 },
9470 codec: 'adts',
9471 type: 'audio'
9472 }; // hook up the audio segment stream to the first track with aac data
9473
9474 pipeline.coalesceStream.numberOfTracks++;
9475 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9476 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
9477
9478 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
9479
9480 self.trigger('trackinfo', {
9481 hasAudio: !!audioTrack,
9482 hasVideo: !!videoTrack
9483 });
9484 }); // Re-emit any data coming from the coalesce stream to the outside world
9485
9486 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
9487
9488 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9489 };
9490
9491 this.setupTsPipeline = function () {
9492 var pipeline = {};
9493 this.transmuxPipeline_ = pipeline;
9494 pipeline.type = 'ts';
9495 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
9496
9497 pipeline.packetStream = new m2ts_1.TransportPacketStream();
9498 pipeline.parseStream = new m2ts_1.TransportParseStream();
9499 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
9500 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
9501 pipeline.adtsStream = new adts();
9502 pipeline.h264Stream = new H264Stream();
9503 pipeline.captionStream = new m2ts_1.CaptionStream(options);
9504 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
9505 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
9506
9507 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
9508 // demux the streams
9509
9510 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
9511 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
9512 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
9513
9514 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
9515 pipeline.elementaryStream.on('data', function (data) {
9516 var i;
9517
9518 if (data.type === 'metadata') {
9519 i = data.tracks.length; // scan the tracks listed in the metadata
9520
9521 while (i--) {
9522 if (!videoTrack && data.tracks[i].type === 'video') {
9523 videoTrack = data.tracks[i];
9524 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
9525 } else if (!audioTrack && data.tracks[i].type === 'audio') {
9526 audioTrack = data.tracks[i];
9527 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
9528 }
9529 } // hook up the video segment stream to the first track with h264 data
9530
9531
9532 if (videoTrack && !pipeline.videoSegmentStream) {
9533 pipeline.coalesceStream.numberOfTracks++;
9534 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
9535 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
9536 // When video emits timelineStartInfo data after a flush, we forward that
9537 // info to the AudioSegmentStream, if it exists, because video timeline
9538 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
9539 // because this is a particularly subtle form of timestamp alteration.
9540 if (audioTrack && !options.keepOriginalTimestamps) {
9541 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
9542 // very earliest DTS we have seen in video because Chrome will
9543 // interpret any video track with a baseMediaDecodeTime that is
9544 // non-zero as a gap.
9545
9546 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
9547 }
9548 });
9549 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
9550 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
9551 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
9552 if (audioTrack) {
9553 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
9554 }
9555 });
9556 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
9557
9558 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
9559 }
9560
9561 if (audioTrack && !pipeline.audioSegmentStream) {
9562 // hook up the audio segment stream to the first track with aac data
9563 pipeline.coalesceStream.numberOfTracks++;
9564 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
9565 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
9566 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
9567
9568 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
9569 } // emit pmt info
9570
9571
9572 self.trigger('trackinfo', {
9573 hasAudio: !!audioTrack,
9574 hasVideo: !!videoTrack
9575 });
9576 }
9577 }); // Re-emit any data coming from the coalesce stream to the outside world
9578
9579 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
9580 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
9581 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
9582 self.trigger('id3Frame', id3Frame);
9583 });
9584 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
9585
9586 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
9587 }; // hook up the segment streams once track metadata is delivered
9588
9589
9590 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9591 var pipeline = this.transmuxPipeline_;
9592
9593 if (!options.keepOriginalTimestamps) {
9594 this.baseMediaDecodeTime = baseMediaDecodeTime;
9595 }
9596
9597 if (audioTrack) {
9598 audioTrack.timelineStartInfo.dts = undefined;
9599 audioTrack.timelineStartInfo.pts = undefined;
9600 trackDecodeInfo.clearDtsInfo(audioTrack);
9601
9602 if (pipeline.audioTimestampRolloverStream) {
9603 pipeline.audioTimestampRolloverStream.discontinuity();
9604 }
9605 }
9606
9607 if (videoTrack) {
9608 if (pipeline.videoSegmentStream) {
9609 pipeline.videoSegmentStream.gopCache_ = [];
9610 }
9611
9612 videoTrack.timelineStartInfo.dts = undefined;
9613 videoTrack.timelineStartInfo.pts = undefined;
9614 trackDecodeInfo.clearDtsInfo(videoTrack);
9615 pipeline.captionStream.reset();
9616 }
9617
9618 if (pipeline.timestampRolloverStream) {
9619 pipeline.timestampRolloverStream.discontinuity();
9620 }
9621 };
9622
9623 this.setAudioAppendStart = function (timestamp) {
9624 if (audioTrack) {
9625 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
9626 }
9627 };
9628
9629 this.setRemux = function (val) {
9630 var pipeline = this.transmuxPipeline_;
9631 options.remux = val;
9632
9633 if (pipeline && pipeline.coalesceStream) {
9634 pipeline.coalesceStream.setRemux(val);
9635 }
9636 };
9637
9638 this.alignGopsWith = function (gopsToAlignWith) {
9639 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
9640 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
9641 }
9642 }; // feed incoming data to the front of the parsing pipeline
9643
9644
9645 this.push = function (data) {
9646 if (hasFlushed) {
9647 var isAac = isLikelyAacData$1(data);
9648
9649 if (isAac && this.transmuxPipeline_.type !== 'aac') {
9650 this.setupAacPipeline();
9651 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
9652 this.setupTsPipeline();
9653 }
9654
9655 hasFlushed = false;
9656 }
9657
9658 this.transmuxPipeline_.headOfPipeline.push(data);
9659 }; // flush any buffered data
9660
9661
9662 this.flush = function () {
9663 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
9664
9665 this.transmuxPipeline_.headOfPipeline.flush();
9666 };
9667
9668 this.endTimeline = function () {
9669 this.transmuxPipeline_.headOfPipeline.endTimeline();
9670 };
9671
9672 this.reset = function () {
9673 if (this.transmuxPipeline_.headOfPipeline) {
9674 this.transmuxPipeline_.headOfPipeline.reset();
9675 }
9676 }; // Caption data has to be reset when seeking outside buffered range
9677
9678
9679 this.resetCaptions = function () {
9680 if (this.transmuxPipeline_.captionStream) {
9681 this.transmuxPipeline_.captionStream.reset();
9682 }
9683 };
9684 };
9685
9686 _Transmuxer.prototype = new stream();
9687 var transmuxer = {
9688 Transmuxer: _Transmuxer,
9689 VideoSegmentStream: _VideoSegmentStream,
9690 AudioSegmentStream: _AudioSegmentStream,
9691 AUDIO_PROPERTIES: audioProperties,
9692 VIDEO_PROPERTIES: videoProperties,
9693 // exported for testing
9694 generateSegmentTimingInfo: generateSegmentTimingInfo
9695 };
9696 /**
9697 * mux.js
9698 *
9699 * Copyright (c) Brightcove
9700 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9701 */
9702
9703 var codecs = {
9704 Adts: adts,
9705 h264: h264
9706 };
9707 var ONE_SECOND_IN_TS$4 = clock.ONE_SECOND_IN_TS;
9708 /**
9709 * Constructs a single-track, ISO BMFF media segment from AAC data
9710 * events. The output of this stream can be fed to a SourceBuffer
9711 * configured with a suitable initialization segment.
9712 */
9713
9714 var AudioSegmentStream = function AudioSegmentStream(track, options) {
9715 var adtsFrames = [],
9716 sequenceNumber = 0,
9717 earliestAllowedDts = 0,
9718 audioAppendStartTs = 0,
9719 videoBaseMediaDecodeTime = Infinity,
9720 segmentStartPts = null,
9721 segmentEndPts = null;
9722 options = options || {};
9723 AudioSegmentStream.prototype.init.call(this);
9724
9725 this.push = function (data) {
9726 trackDecodeInfo.collectDtsInfo(track, data);
9727
9728 if (track) {
9729 audioProperties.forEach(function (prop) {
9730 track[prop] = data[prop];
9731 });
9732 } // buffer audio data until end() is called
9733
9734
9735 adtsFrames.push(data);
9736 };
9737
9738 this.setEarliestDts = function (earliestDts) {
9739 earliestAllowedDts = earliestDts;
9740 };
9741
9742 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9743 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9744 };
9745
9746 this.setAudioAppendStart = function (timestamp) {
9747 audioAppendStartTs = timestamp;
9748 };
9749
9750 this.processFrames_ = function () {
9751 var frames, moof, mdat, boxes, timingInfo; // return early if no audio data has been observed
9752
9753 if (adtsFrames.length === 0) {
9754 return;
9755 }
9756
9757 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9758
9759 if (frames.length === 0) {
9760 // return early if the frames are all after the earliest allowed DTS
9761 // TODO should we clear the adtsFrames?
9762 return;
9763 }
9764
9765 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9766 audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9767 // samples (that is, adts frames) in the audio data
9768
9769 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9770
9771 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9772 adtsFrames = [];
9773 moof = mp4Generator.moof(sequenceNumber, [track]); // bump the sequence number for next time
9774
9775 sequenceNumber++;
9776 track.initSegment = mp4Generator.initSegment([track]); // it would be great to allocate this array up front instead of
9777 // throwing away hundreds of media segment fragments
9778
9779 boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
9780 boxes.set(moof);
9781 boxes.set(mdat, moof.byteLength);
9782 trackDecodeInfo.clearDtsInfo(track);
9783
9784 if (segmentStartPts === null) {
9785 segmentEndPts = segmentStartPts = frames[0].pts;
9786 }
9787
9788 segmentEndPts += frames.length * (ONE_SECOND_IN_TS$4 * 1024 / track.samplerate);
9789 timingInfo = {
9790 start: segmentStartPts
9791 };
9792 this.trigger('timingInfo', timingInfo);
9793 this.trigger('data', {
9794 track: track,
9795 boxes: boxes
9796 });
9797 };
9798
9799 this.flush = function () {
9800 this.processFrames_(); // trigger final timing info
9801
9802 this.trigger('timingInfo', {
9803 start: segmentStartPts,
9804 end: segmentEndPts
9805 });
9806 this.resetTiming_();
9807 this.trigger('done', 'AudioSegmentStream');
9808 };
9809
9810 this.partialFlush = function () {
9811 this.processFrames_();
9812 this.trigger('partialdone', 'AudioSegmentStream');
9813 };
9814
9815 this.endTimeline = function () {
9816 this.flush();
9817 this.trigger('endedtimeline', 'AudioSegmentStream');
9818 };
9819
9820 this.resetTiming_ = function () {
9821 trackDecodeInfo.clearDtsInfo(track);
9822 segmentStartPts = null;
9823 segmentEndPts = null;
9824 };
9825
9826 this.reset = function () {
9827 this.resetTiming_();
9828 adtsFrames = [];
9829 this.trigger('reset');
9830 };
9831 };
9832
9833 AudioSegmentStream.prototype = new stream();
9834 var audioSegmentStream = AudioSegmentStream;
9835
9836 var VideoSegmentStream = function VideoSegmentStream(track, options) {
9837 var sequenceNumber = 0,
9838 nalUnits = [],
9839 frameCache = [],
9840 // gopsToAlignWith = [],
9841 config,
9842 pps,
9843 segmentStartPts = null,
9844 segmentEndPts = null,
9845 gops,
9846 ensureNextFrameIsKeyFrame = true;
9847 options = options || {};
9848 VideoSegmentStream.prototype.init.call(this);
9849
9850 this.push = function (nalUnit) {
9851 trackDecodeInfo.collectDtsInfo(track, nalUnit);
9852
9853 if (typeof track.timelineStartInfo.dts === 'undefined') {
9854 track.timelineStartInfo.dts = nalUnit.dts;
9855 } // record the track config
9856
9857
9858 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9859 config = nalUnit.config;
9860 track.sps = [nalUnit.data];
9861 videoProperties.forEach(function (prop) {
9862 track[prop] = config[prop];
9863 }, this);
9864 }
9865
9866 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9867 pps = nalUnit.data;
9868 track.pps = [nalUnit.data];
9869 } // buffer video until flush() is called
9870
9871
9872 nalUnits.push(nalUnit);
9873 };
9874
9875 this.processNals_ = function (cacheLastFrame) {
9876 var i;
9877 nalUnits = frameCache.concat(nalUnits); // Throw away nalUnits at the start of the byte stream until
9878 // we find the first AUD
9879
9880 while (nalUnits.length) {
9881 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9882 break;
9883 }
9884
9885 nalUnits.shift();
9886 } // Return early if no video data has been observed
9887
9888
9889 if (nalUnits.length === 0) {
9890 return;
9891 }
9892
9893 var frames = frameUtils.groupNalsIntoFrames(nalUnits);
9894
9895 if (!frames.length) {
9896 return;
9897 } // note that the frame cache may also protect us from cases where we haven't
9898 // pushed data for the entire first or last frame yet
9899
9900
9901 frameCache = frames[frames.length - 1];
9902
9903 if (cacheLastFrame) {
9904 frames.pop();
9905 frames.duration -= frameCache.duration;
9906 frames.nalCount -= frameCache.length;
9907 frames.byteLength -= frameCache.byteLength;
9908 }
9909
9910 if (!frames.length) {
9911 nalUnits = [];
9912 return;
9913 }
9914
9915 this.trigger('timelineStartInfo', track.timelineStartInfo);
9916
9917 if (ensureNextFrameIsKeyFrame) {
9918 gops = frameUtils.groupFramesIntoGops(frames);
9919
9920 if (!gops[0][0].keyFrame) {
9921 gops = frameUtils.extendFirstKeyFrame(gops);
9922
9923 if (!gops[0][0].keyFrame) {
9924 // we haven't yet gotten a key frame, so reset nal units to wait for more nal
9925 // units
9926 nalUnits = [].concat.apply([], frames).concat(frameCache);
9927 frameCache = [];
9928 return;
9929 }
9930
9931 frames = [].concat.apply([], gops);
9932 frames.duration = gops.duration;
9933 }
9934
9935 ensureNextFrameIsKeyFrame = false;
9936 }
9937
9938 if (segmentStartPts === null) {
9939 segmentStartPts = frames[0].pts;
9940 segmentEndPts = segmentStartPts;
9941 }
9942
9943 segmentEndPts += frames.duration;
9944 this.trigger('timingInfo', {
9945 start: segmentStartPts,
9946 end: segmentEndPts
9947 });
9948
9949 for (i = 0; i < frames.length; i++) {
9950 var frame = frames[i];
9951 track.samples = frameUtils.generateSampleTableForFrame(frame);
9952 var mdat = mp4Generator.mdat(frameUtils.concatenateNalDataForFrame(frame));
9953 trackDecodeInfo.clearDtsInfo(track);
9954 trackDecodeInfo.collectDtsInfo(track, frame);
9955 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9956 var moof = mp4Generator.moof(sequenceNumber, [track]);
9957 sequenceNumber++;
9958 track.initSegment = mp4Generator.initSegment([track]);
9959 var boxes = new Uint8Array(moof.byteLength + mdat.byteLength);
9960 boxes.set(moof);
9961 boxes.set(mdat, moof.byteLength);
9962 this.trigger('data', {
9963 track: track,
9964 boxes: boxes,
9965 sequence: sequenceNumber,
9966 videoFrameDts: frame.dts,
9967 videoFramePts: frame.pts
9968 });
9969 }
9970
9971 nalUnits = [];
9972 };
9973
9974 this.resetTimingAndConfig_ = function () {
9975 config = undefined;
9976 pps = undefined;
9977 segmentStartPts = null;
9978 segmentEndPts = null;
9979 };
9980
9981 this.partialFlush = function () {
9982 this.processNals_(true);
9983 this.trigger('partialdone', 'VideoSegmentStream');
9984 };
9985
9986 this.flush = function () {
9987 this.processNals_(false); // reset config and pps because they may differ across segments
9988 // for instance, when we are rendition switching
9989
9990 this.resetTimingAndConfig_();
9991 this.trigger('done', 'VideoSegmentStream');
9992 };
9993
9994 this.endTimeline = function () {
9995 this.flush();
9996 this.trigger('endedtimeline', 'VideoSegmentStream');
9997 };
9998
9999 this.reset = function () {
10000 this.resetTimingAndConfig_();
10001 frameCache = [];
10002 nalUnits = [];
10003 ensureNextFrameIsKeyFrame = true;
10004 this.trigger('reset');
10005 };
10006 };
10007
10008 VideoSegmentStream.prototype = new stream();
10009 var videoSegmentStream = VideoSegmentStream;
10010 var isLikelyAacData$2 = utils.isLikelyAacData;
10011
10012 var createPipeline = function createPipeline(object) {
10013 object.prototype = new stream();
10014 object.prototype.init.call(object);
10015 return object;
10016 };
10017
10018 var tsPipeline = function tsPipeline(options) {
10019 var pipeline = {
10020 type: 'ts',
10021 tracks: {
10022 audio: null,
10023 video: null
10024 },
10025 packet: new m2ts_1.TransportPacketStream(),
10026 parse: new m2ts_1.TransportParseStream(),
10027 elementary: new m2ts_1.ElementaryStream(),
10028 timestampRollover: new m2ts_1.TimestampRolloverStream(),
10029 adts: new codecs.Adts(),
10030 h264: new codecs.h264.H264Stream(),
10031 captionStream: new m2ts_1.CaptionStream(options),
10032 metadataStream: new m2ts_1.MetadataStream()
10033 };
10034 pipeline.headOfPipeline = pipeline.packet; // Transport Stream
10035
10036 pipeline.packet.pipe(pipeline.parse).pipe(pipeline.elementary).pipe(pipeline.timestampRollover); // H264
10037
10038 pipeline.timestampRollover.pipe(pipeline.h264); // Hook up CEA-608/708 caption stream
10039
10040 pipeline.h264.pipe(pipeline.captionStream);
10041 pipeline.timestampRollover.pipe(pipeline.metadataStream); // ADTS
10042
10043 pipeline.timestampRollover.pipe(pipeline.adts);
10044 pipeline.elementary.on('data', function (data) {
10045 if (data.type !== 'metadata') {
10046 return;
10047 }
10048
10049 for (var i = 0; i < data.tracks.length; i++) {
10050 if (!pipeline.tracks[data.tracks[i].type]) {
10051 pipeline.tracks[data.tracks[i].type] = data.tracks[i];
10052 pipeline.tracks[data.tracks[i].type].timelineStartInfo.baseMediaDecodeTime = options.baseMediaDecodeTime;
10053 }
10054 }
10055
10056 if (pipeline.tracks.video && !pipeline.videoSegmentStream) {
10057 pipeline.videoSegmentStream = new videoSegmentStream(pipeline.tracks.video, options);
10058 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10059 if (pipeline.tracks.audio && !options.keepOriginalTimestamps) {
10060 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - options.baseMediaDecodeTime);
10061 }
10062 });
10063 pipeline.videoSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'videoTimingInfo'));
10064 pipeline.videoSegmentStream.on('data', function (data) {
10065 pipeline.trigger('data', {
10066 type: 'video',
10067 data: data
10068 });
10069 });
10070 pipeline.videoSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
10071 pipeline.videoSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
10072 pipeline.videoSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
10073 pipeline.h264.pipe(pipeline.videoSegmentStream);
10074 }
10075
10076 if (pipeline.tracks.audio && !pipeline.audioSegmentStream) {
10077 pipeline.audioSegmentStream = new audioSegmentStream(pipeline.tracks.audio, options);
10078 pipeline.audioSegmentStream.on('data', function (data) {
10079 pipeline.trigger('data', {
10080 type: 'audio',
10081 data: data
10082 });
10083 });
10084 pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
10085 pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
10086 pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
10087 pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo'));
10088 pipeline.adts.pipe(pipeline.audioSegmentStream);
10089 } // emit pmt info
10090
10091
10092 pipeline.trigger('trackinfo', {
10093 hasAudio: !!pipeline.tracks.audio,
10094 hasVideo: !!pipeline.tracks.video
10095 });
10096 });
10097 pipeline.captionStream.on('data', function (caption) {
10098 var timelineStartPts;
10099
10100 if (pipeline.tracks.video) {
10101 timelineStartPts = pipeline.tracks.video.timelineStartInfo.pts || 0;
10102 } else {
10103 // This will only happen if we encounter caption packets before
10104 // video data in a segment. This is an unusual/unlikely scenario,
10105 // so we assume the timeline starts at zero for now.
10106 timelineStartPts = 0;
10107 } // Translate caption PTS times into second offsets into the
10108 // video timeline for the segment
10109
10110
10111 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, options.keepOriginalTimestamps);
10112 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, options.keepOriginalTimestamps);
10113 pipeline.trigger('caption', caption);
10114 });
10115 pipeline = createPipeline(pipeline);
10116 pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
10117 return pipeline;
10118 };
10119
10120 var aacPipeline = function aacPipeline(options) {
10121 var pipeline = {
10122 type: 'aac',
10123 tracks: {
10124 audio: null
10125 },
10126 metadataStream: new m2ts_1.MetadataStream(),
10127 aacStream: new aac(),
10128 audioRollover: new m2ts_1.TimestampRolloverStream('audio'),
10129 timedMetadataRollover: new m2ts_1.TimestampRolloverStream('timed-metadata'),
10130 adtsStream: new adts(true)
10131 }; // set up the parsing pipeline
10132
10133 pipeline.headOfPipeline = pipeline.aacStream;
10134 pipeline.aacStream.pipe(pipeline.audioRollover).pipe(pipeline.adtsStream);
10135 pipeline.aacStream.pipe(pipeline.timedMetadataRollover).pipe(pipeline.metadataStream);
10136 pipeline.metadataStream.on('timestamp', function (frame) {
10137 pipeline.aacStream.setTimestamp(frame.timeStamp);
10138 });
10139 pipeline.aacStream.on('data', function (data) {
10140 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10141 return;
10142 }
10143
10144 pipeline.tracks.audio = pipeline.tracks.audio || {
10145 timelineStartInfo: {
10146 baseMediaDecodeTime: options.baseMediaDecodeTime
10147 },
10148 codec: 'adts',
10149 type: 'audio'
10150 }; // hook up the audio segment stream to the first track with aac data
10151
10152 pipeline.audioSegmentStream = new audioSegmentStream(pipeline.tracks.audio, options);
10153 pipeline.audioSegmentStream.on('data', function (data) {
10154 pipeline.trigger('data', {
10155 type: 'audio',
10156 data: data
10157 });
10158 });
10159 pipeline.audioSegmentStream.on('partialdone', pipeline.trigger.bind(pipeline, 'partialdone'));
10160 pipeline.audioSegmentStream.on('done', pipeline.trigger.bind(pipeline, 'done'));
10161 pipeline.audioSegmentStream.on('endedtimeline', pipeline.trigger.bind(pipeline, 'endedtimeline'));
10162 pipeline.audioSegmentStream.on('timingInfo', pipeline.trigger.bind(pipeline, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10163
10164 pipeline.adtsStream.pipe(pipeline.audioSegmentStream);
10165 pipeline.trigger('trackinfo', {
10166 hasAudio: !!pipeline.tracks.audio,
10167 hasVideo: !!pipeline.tracks.video
10168 });
10169 }); // set the pipeline up as a stream before binding to get access to the trigger function
10170
10171 pipeline = createPipeline(pipeline);
10172 pipeline.metadataStream.on('data', pipeline.trigger.bind(pipeline, 'id3Frame'));
10173 return pipeline;
10174 };
10175
10176 var setupPipelineListeners = function setupPipelineListeners(pipeline, transmuxer) {
10177 pipeline.on('data', transmuxer.trigger.bind(transmuxer, 'data'));
10178 pipeline.on('done', transmuxer.trigger.bind(transmuxer, 'done'));
10179 pipeline.on('partialdone', transmuxer.trigger.bind(transmuxer, 'partialdone'));
10180 pipeline.on('endedtimeline', transmuxer.trigger.bind(transmuxer, 'endedtimeline'));
10181 pipeline.on('audioTimingInfo', transmuxer.trigger.bind(transmuxer, 'audioTimingInfo'));
10182 pipeline.on('videoTimingInfo', transmuxer.trigger.bind(transmuxer, 'videoTimingInfo'));
10183 pipeline.on('trackinfo', transmuxer.trigger.bind(transmuxer, 'trackinfo'));
10184 pipeline.on('id3Frame', function (event) {
10185 // add this to every single emitted segment even though it's only needed for the first
10186 event.dispatchType = pipeline.metadataStream.dispatchType; // keep original time, can be adjusted if needed at a higher level
10187
10188 event.cueTime = clock.videoTsToSeconds(event.pts);
10189 transmuxer.trigger('id3Frame', event);
10190 });
10191 pipeline.on('caption', function (event) {
10192 transmuxer.trigger('caption', event);
10193 });
10194 };
10195
10196 var Transmuxer = function Transmuxer(options) {
10197 var pipeline = null,
10198 hasFlushed = true;
10199 options = options || {};
10200 Transmuxer.prototype.init.call(this);
10201 options.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10202
10203 this.push = function (bytes) {
10204 if (hasFlushed) {
10205 var isAac = isLikelyAacData$2(bytes);
10206
10207 if (isAac && (!pipeline || pipeline.type !== 'aac')) {
10208 pipeline = aacPipeline(options);
10209 setupPipelineListeners(pipeline, this);
10210 } else if (!isAac && (!pipeline || pipeline.type !== 'ts')) {
10211 pipeline = tsPipeline(options);
10212 setupPipelineListeners(pipeline, this);
10213 }
10214
10215 hasFlushed = false;
10216 }
10217
10218 pipeline.headOfPipeline.push(bytes);
10219 };
10220
10221 this.flush = function () {
10222 if (!pipeline) {
10223 return;
10224 }
10225
10226 hasFlushed = true;
10227 pipeline.headOfPipeline.flush();
10228 };
10229
10230 this.partialFlush = function () {
10231 if (!pipeline) {
10232 return;
10233 }
10234
10235 pipeline.headOfPipeline.partialFlush();
10236 };
10237
10238 this.endTimeline = function () {
10239 if (!pipeline) {
10240 return;
10241 }
10242
10243 pipeline.headOfPipeline.endTimeline();
10244 };
10245
10246 this.reset = function () {
10247 if (!pipeline) {
10248 return;
10249 }
10250
10251 pipeline.headOfPipeline.reset();
10252 };
10253
10254 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10255 if (!options.keepOriginalTimestamps) {
10256 options.baseMediaDecodeTime = baseMediaDecodeTime;
10257 }
10258
10259 if (!pipeline) {
10260 return;
10261 }
10262
10263 if (pipeline.tracks.audio) {
10264 pipeline.tracks.audio.timelineStartInfo.dts = undefined;
10265 pipeline.tracks.audio.timelineStartInfo.pts = undefined;
10266 trackDecodeInfo.clearDtsInfo(pipeline.tracks.audio);
10267
10268 if (pipeline.audioRollover) {
10269 pipeline.audioRollover.discontinuity();
10270 }
10271 }
10272
10273 if (pipeline.tracks.video) {
10274 if (pipeline.videoSegmentStream) {
10275 pipeline.videoSegmentStream.gopCache_ = [];
10276 }
10277
10278 pipeline.tracks.video.timelineStartInfo.dts = undefined;
10279 pipeline.tracks.video.timelineStartInfo.pts = undefined;
10280 trackDecodeInfo.clearDtsInfo(pipeline.tracks.video); // pipeline.captionStream.reset();
10281 }
10282
10283 if (pipeline.timestampRollover) {
10284 pipeline.timestampRollover.discontinuity();
10285 }
10286 };
10287
10288 this.setRemux = function (val) {
10289 options.remux = val;
10290
10291 if (pipeline && pipeline.coalesceStream) {
10292 pipeline.coalesceStream.setRemux(val);
10293 }
10294 };
10295
10296 this.setAudioAppendStart = function (audioAppendStart) {
10297 if (!pipeline || !pipeline.tracks.audio || !pipeline.audioSegmentStream) {
10298 return;
10299 }
10300
10301 pipeline.audioSegmentStream.setAudioAppendStart(audioAppendStart);
10302 }; // TODO GOP alignment support
10303 // Support may be a bit trickier than with full segment appends, as GOPs may be split
10304 // and processed in a more granular fashion
10305
10306
10307 this.alignGopsWith = function (gopsToAlignWith) {
10308 return;
10309 };
10310 };
10311
10312 Transmuxer.prototype = new stream();
10313 var transmuxer$1 = Transmuxer;
10314 /**
10315 * mux.js
10316 *
10317 * Copyright (c) Brightcove
10318 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10319 */
10320
10321 var toUnsigned = function toUnsigned(value) {
10322 return value >>> 0;
10323 };
10324
10325 var toHexString = function toHexString(value) {
10326 return ('00' + value.toString(16)).slice(-2);
10327 };
10328
10329 var bin = {
10330 toUnsigned: toUnsigned,
10331 toHexString: toHexString
10332 };
10333
10334 var parseType$1 = function parseType(buffer) {
10335 var result = '';
10336 result += String.fromCharCode(buffer[0]);
10337 result += String.fromCharCode(buffer[1]);
10338 result += String.fromCharCode(buffer[2]);
10339 result += String.fromCharCode(buffer[3]);
10340 return result;
10341 };
10342
10343 var parseType_1 = parseType$1;
10344 var toUnsigned$1 = bin.toUnsigned;
10345
10346 var findBox = function findBox(data, path) {
10347 var results = [],
10348 i,
10349 size,
10350 type,
10351 end,
10352 subresults;
10353
10354 if (!path.length) {
10355 // short-circuit the search for empty paths
10356 return null;
10357 }
10358
10359 for (i = 0; i < data.byteLength;) {
10360 size = toUnsigned$1(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10361 type = parseType_1(data.subarray(i + 4, i + 8));
10362 end = size > 1 ? i + size : data.byteLength;
10363
10364 if (type === path[0]) {
10365 if (path.length === 1) {
10366 // this is the end of the path and we've found the box we were
10367 // looking for
10368 results.push(data.subarray(i + 8, end));
10369 } else {
10370 // recursively search for the next box along the path
10371 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10372
10373 if (subresults.length) {
10374 results = results.concat(subresults);
10375 }
10376 }
10377 }
10378
10379 i = end;
10380 } // we've finished searching all of data
10381
10382
10383 return results;
10384 };
10385
10386 var findBox_1 = findBox;
10387 var toUnsigned$2 = bin.toUnsigned;
10388
10389 var tfdt = function tfdt(data) {
10390 var result = {
10391 version: data[0],
10392 flags: new Uint8Array(data.subarray(1, 4)),
10393 baseMediaDecodeTime: toUnsigned$2(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
10394 };
10395
10396 if (result.version === 1) {
10397 result.baseMediaDecodeTime *= Math.pow(2, 32);
10398 result.baseMediaDecodeTime += toUnsigned$2(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
10399 }
10400
10401 return result;
10402 };
10403
10404 var parseTfdt = tfdt;
10405
10406 var parseSampleFlags = function parseSampleFlags(flags) {
10407 return {
10408 isLeading: (flags[0] & 0x0c) >>> 2,
10409 dependsOn: flags[0] & 0x03,
10410 isDependedOn: (flags[1] & 0xc0) >>> 6,
10411 hasRedundancy: (flags[1] & 0x30) >>> 4,
10412 paddingValue: (flags[1] & 0x0e) >>> 1,
10413 isNonSyncSample: flags[1] & 0x01,
10414 degradationPriority: flags[2] << 8 | flags[3]
10415 };
10416 };
10417
10418 var parseSampleFlags_1 = parseSampleFlags;
10419
10420 var trun$1 = function trun(data) {
10421 var result = {
10422 version: data[0],
10423 flags: new Uint8Array(data.subarray(1, 4)),
10424 samples: []
10425 },
10426 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10427 // Flag interpretation
10428 dataOffsetPresent = result.flags[2] & 0x01,
10429 // compare with 2nd byte of 0x1
10430 firstSampleFlagsPresent = result.flags[2] & 0x04,
10431 // compare with 2nd byte of 0x4
10432 sampleDurationPresent = result.flags[1] & 0x01,
10433 // compare with 2nd byte of 0x100
10434 sampleSizePresent = result.flags[1] & 0x02,
10435 // compare with 2nd byte of 0x200
10436 sampleFlagsPresent = result.flags[1] & 0x04,
10437 // compare with 2nd byte of 0x400
10438 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10439 // compare with 2nd byte of 0x800
10440 sampleCount = view.getUint32(4),
10441 offset = 8,
10442 sample;
10443
10444 if (dataOffsetPresent) {
10445 // 32 bit signed integer
10446 result.dataOffset = view.getInt32(offset);
10447 offset += 4;
10448 } // Overrides the flags for the first sample only. The order of
10449 // optional values will be: duration, size, compositionTimeOffset
10450
10451
10452 if (firstSampleFlagsPresent && sampleCount) {
10453 sample = {
10454 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10455 };
10456 offset += 4;
10457
10458 if (sampleDurationPresent) {
10459 sample.duration = view.getUint32(offset);
10460 offset += 4;
10461 }
10462
10463 if (sampleSizePresent) {
10464 sample.size = view.getUint32(offset);
10465 offset += 4;
10466 }
10467
10468 if (sampleCompositionTimeOffsetPresent) {
10469 if (result.version === 1) {
10470 sample.compositionTimeOffset = view.getInt32(offset);
10471 } else {
10472 sample.compositionTimeOffset = view.getUint32(offset);
10473 }
10474
10475 offset += 4;
10476 }
10477
10478 result.samples.push(sample);
10479 sampleCount--;
10480 }
10481
10482 while (sampleCount--) {
10483 sample = {};
10484
10485 if (sampleDurationPresent) {
10486 sample.duration = view.getUint32(offset);
10487 offset += 4;
10488 }
10489
10490 if (sampleSizePresent) {
10491 sample.size = view.getUint32(offset);
10492 offset += 4;
10493 }
10494
10495 if (sampleFlagsPresent) {
10496 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10497 offset += 4;
10498 }
10499
10500 if (sampleCompositionTimeOffsetPresent) {
10501 if (result.version === 1) {
10502 sample.compositionTimeOffset = view.getInt32(offset);
10503 } else {
10504 sample.compositionTimeOffset = view.getUint32(offset);
10505 }
10506
10507 offset += 4;
10508 }
10509
10510 result.samples.push(sample);
10511 }
10512
10513 return result;
10514 };
10515
10516 var parseTrun = trun$1;
10517
10518 var tfhd = function tfhd(data) {
10519 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10520 result = {
10521 version: data[0],
10522 flags: new Uint8Array(data.subarray(1, 4)),
10523 trackId: view.getUint32(4)
10524 },
10525 baseDataOffsetPresent = result.flags[2] & 0x01,
10526 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10527 defaultSampleDurationPresent = result.flags[2] & 0x08,
10528 defaultSampleSizePresent = result.flags[2] & 0x10,
10529 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10530 durationIsEmpty = result.flags[0] & 0x010000,
10531 defaultBaseIsMoof = result.flags[0] & 0x020000,
10532 i;
10533 i = 8;
10534
10535 if (baseDataOffsetPresent) {
10536 i += 4; // truncate top 4 bytes
10537 // FIXME: should we read the full 64 bits?
10538
10539 result.baseDataOffset = view.getUint32(12);
10540 i += 4;
10541 }
10542
10543 if (sampleDescriptionIndexPresent) {
10544 result.sampleDescriptionIndex = view.getUint32(i);
10545 i += 4;
10546 }
10547
10548 if (defaultSampleDurationPresent) {
10549 result.defaultSampleDuration = view.getUint32(i);
10550 i += 4;
10551 }
10552
10553 if (defaultSampleSizePresent) {
10554 result.defaultSampleSize = view.getUint32(i);
10555 i += 4;
10556 }
10557
10558 if (defaultSampleFlagsPresent) {
10559 result.defaultSampleFlags = view.getUint32(i);
10560 }
10561
10562 if (durationIsEmpty) {
10563 result.durationIsEmpty = true;
10564 }
10565
10566 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10567 result.baseDataOffsetIsMoof = true;
10568 }
10569
10570 return result;
10571 };
10572
10573 var parseTfhd = tfhd;
10574 var discardEmulationPreventionBytes$1 = captionPacketParser.discardEmulationPreventionBytes;
10575 var CaptionStream$1 = captionStream.CaptionStream;
10576 /**
10577 * Maps an offset in the mdat to a sample based on the the size of the samples.
10578 * Assumes that `parseSamples` has been called first.
10579 *
10580 * @param {Number} offset - The offset into the mdat
10581 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10582 * @return {?Object} The matching sample, or null if no match was found.
10583 *
10584 * @see ISO-BMFF-12/2015, Section 8.8.8
10585 **/
10586
10587 var mapToSample = function mapToSample(offset, samples) {
10588 var approximateOffset = offset;
10589
10590 for (var i = 0; i < samples.length; i++) {
10591 var sample = samples[i];
10592
10593 if (approximateOffset < sample.size) {
10594 return sample;
10595 }
10596
10597 approximateOffset -= sample.size;
10598 }
10599
10600 return null;
10601 };
10602 /**
10603 * Finds SEI nal units contained in a Media Data Box.
10604 * Assumes that `parseSamples` has been called first.
10605 *
10606 * @param {Uint8Array} avcStream - The bytes of the mdat
10607 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10608 * @param {Number} trackId - The trackId of this video track
10609 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10610 * The contents of the seiNal should match what is expected by
10611 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10612 *
10613 * @see ISO-BMFF-12/2015, Section 8.1.1
10614 * @see Rec. ITU-T H.264, 7.3.2.3.1
10615 **/
10616
10617
10618 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10619 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10620 result = [],
10621 seiNal,
10622 i,
10623 length,
10624 lastMatchedSample;
10625
10626 for (i = 0; i + 4 < avcStream.length; i += length) {
10627 length = avcView.getUint32(i);
10628 i += 4; // Bail if this doesn't appear to be an H264 stream
10629
10630 if (length <= 0) {
10631 continue;
10632 }
10633
10634 switch (avcStream[i] & 0x1F) {
10635 case 0x06:
10636 var data = avcStream.subarray(i + 1, i + 1 + length);
10637 var matchingSample = mapToSample(i, samples);
10638 seiNal = {
10639 nalUnitType: 'sei_rbsp',
10640 size: length,
10641 data: data,
10642 escapedRBSP: discardEmulationPreventionBytes$1(data),
10643 trackId: trackId
10644 };
10645
10646 if (matchingSample) {
10647 seiNal.pts = matchingSample.pts;
10648 seiNal.dts = matchingSample.dts;
10649 lastMatchedSample = matchingSample;
10650 } else if (lastMatchedSample) {
10651 // If a matching sample cannot be found, use the last
10652 // sample's values as they should be as close as possible
10653 seiNal.pts = lastMatchedSample.pts;
10654 seiNal.dts = lastMatchedSample.dts;
10655 } else {
10656 // eslint-disable-next-line no-console
10657 console.log("We've encountered a nal unit without data. See mux.js#233.");
10658 break;
10659 }
10660
10661 result.push(seiNal);
10662 break;
10663 }
10664 }
10665
10666 return result;
10667 };
10668 /**
10669 * Parses sample information out of Track Run Boxes and calculates
10670 * the absolute presentation and decode timestamps of each sample.
10671 *
10672 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
10673 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
10674 @see ISO-BMFF-12/2015, Section 8.8.12
10675 * @param {Object} tfhd - The parsed Track Fragment Header
10676 * @see inspect.parseTfhd
10677 * @return {Object[]} the parsed samples
10678 *
10679 * @see ISO-BMFF-12/2015, Section 8.8.8
10680 **/
10681
10682
10683 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
10684 var currentDts = baseMediaDecodeTime;
10685 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
10686 var defaultSampleSize = tfhd.defaultSampleSize || 0;
10687 var trackId = tfhd.trackId;
10688 var allSamples = [];
10689 truns.forEach(function (trun) {
10690 // Note: We currently do not parse the sample table as well
10691 // as the trun. It's possible some sources will require this.
10692 // moov > trak > mdia > minf > stbl
10693 var trackRun = parseTrun(trun);
10694 var samples = trackRun.samples;
10695 samples.forEach(function (sample) {
10696 if (sample.duration === undefined) {
10697 sample.duration = defaultSampleDuration;
10698 }
10699
10700 if (sample.size === undefined) {
10701 sample.size = defaultSampleSize;
10702 }
10703
10704 sample.trackId = trackId;
10705 sample.dts = currentDts;
10706
10707 if (sample.compositionTimeOffset === undefined) {
10708 sample.compositionTimeOffset = 0;
10709 }
10710
10711 sample.pts = currentDts + sample.compositionTimeOffset;
10712 currentDts += sample.duration;
10713 });
10714 allSamples = allSamples.concat(samples);
10715 });
10716 return allSamples;
10717 };
10718 /**
10719 * Parses out caption nals from an FMP4 segment's video tracks.
10720 *
10721 * @param {Uint8Array} segment - The bytes of a single segment
10722 * @param {Number} videoTrackId - The trackId of a video track in the segment
10723 * @return {Object.<Number, Object[]>} A mapping of video trackId to
10724 * a list of seiNals found in that track
10725 **/
10726
10727
10728 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
10729 // To get the samples
10730 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
10731
10732 var mdats = findBox_1(segment, ['mdat']);
10733 var captionNals = {};
10734 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
10735
10736 mdats.forEach(function (mdat, index) {
10737 var matchingTraf = trafs[index];
10738 mdatTrafPairs.push({
10739 mdat: mdat,
10740 traf: matchingTraf
10741 });
10742 });
10743 mdatTrafPairs.forEach(function (pair) {
10744 var mdat = pair.mdat;
10745 var traf = pair.traf;
10746 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
10747
10748 var headerInfo = parseTfhd(tfhd[0]);
10749 var trackId = headerInfo.trackId;
10750 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
10751
10752 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
10753 var truns = findBox_1(traf, ['trun']);
10754 var samples;
10755 var seiNals; // Only parse video data for the chosen video track
10756
10757 if (videoTrackId === trackId && truns.length > 0) {
10758 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
10759 seiNals = findSeiNals(mdat, samples, trackId);
10760
10761 if (!captionNals[trackId]) {
10762 captionNals[trackId] = [];
10763 }
10764
10765 captionNals[trackId] = captionNals[trackId].concat(seiNals);
10766 }
10767 });
10768 return captionNals;
10769 };
10770 /**
10771 * Parses out inband captions from an MP4 container and returns
10772 * caption objects that can be used by WebVTT and the TextTrack API.
10773 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
10774 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
10775 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
10776 *
10777 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
10778 * @param {Number} trackId - The id of the video track to parse
10779 * @param {Number} timescale - The timescale for the video track from the init segment
10780 *
10781 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
10782 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
10783 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
10784 * @return {String} parsedCaptions[].text - The visible content of the caption
10785 **/
10786
10787
10788 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
10789 var seiNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
10790
10791 if (trackId === null) {
10792 return null;
10793 }
10794
10795 seiNals = parseCaptionNals(segment, trackId);
10796 return {
10797 seiNals: seiNals[trackId],
10798 timescale: timescale
10799 };
10800 };
10801 /**
10802 * Converts SEI NALUs into captions that can be used by video.js
10803 **/
10804
10805
10806 var CaptionParser = function CaptionParser() {
10807 var isInitialized = false;
10808 var captionStream; // Stores segments seen before trackId and timescale are set
10809
10810 var segmentCache; // Stores video track ID of the track being parsed
10811
10812 var trackId; // Stores the timescale of the track being parsed
10813
10814 var timescale; // Stores captions parsed so far
10815
10816 var parsedCaptions; // Stores whether we are receiving partial data or not
10817
10818 var parsingPartial;
10819 /**
10820 * A method to indicate whether a CaptionParser has been initalized
10821 * @returns {Boolean}
10822 **/
10823
10824 this.isInitialized = function () {
10825 return isInitialized;
10826 };
10827 /**
10828 * Initializes the underlying CaptionStream, SEI NAL parsing
10829 * and management, and caption collection
10830 **/
10831
10832
10833 this.init = function (options) {
10834 captionStream = new CaptionStream$1();
10835 isInitialized = true;
10836 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
10837
10838 captionStream.on('data', function (event) {
10839 // Convert to seconds in the source's timescale
10840 event.startTime = event.startPts / timescale;
10841 event.endTime = event.endPts / timescale;
10842 parsedCaptions.captions.push(event);
10843 parsedCaptions.captionStreams[event.stream] = true;
10844 });
10845 };
10846 /**
10847 * Determines if a new video track will be selected
10848 * or if the timescale changed
10849 * @return {Boolean}
10850 **/
10851
10852
10853 this.isNewInit = function (videoTrackIds, timescales) {
10854 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
10855 return false;
10856 }
10857
10858 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
10859 };
10860 /**
10861 * Parses out SEI captions and interacts with underlying
10862 * CaptionStream to return dispatched captions
10863 *
10864 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
10865 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
10866 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
10867 * @see parseEmbeddedCaptions
10868 * @see m2ts/caption-stream.js
10869 **/
10870
10871
10872 this.parse = function (segment, videoTrackIds, timescales) {
10873 var parsedData;
10874
10875 if (!this.isInitialized()) {
10876 return null; // This is not likely to be a video segment
10877 } else if (!videoTrackIds || !timescales) {
10878 return null;
10879 } else if (this.isNewInit(videoTrackIds, timescales)) {
10880 // Use the first video track only as there is no
10881 // mechanism to switch to other video tracks
10882 trackId = videoTrackIds[0];
10883 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
10884 // data until we have one.
10885 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
10886 } else if (trackId === null || !timescale) {
10887 segmentCache.push(segment);
10888 return null;
10889 } // Now that a timescale and trackId is set, parse cached segments
10890
10891
10892 while (segmentCache.length > 0) {
10893 var cachedSegment = segmentCache.shift();
10894 this.parse(cachedSegment, videoTrackIds, timescales);
10895 }
10896
10897 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
10898
10899 if (parsedData === null || !parsedData.seiNals) {
10900 return null;
10901 }
10902
10903 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
10904
10905 this.flushStream();
10906 return parsedCaptions;
10907 };
10908 /**
10909 * Pushes SEI NALUs onto CaptionStream
10910 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
10911 * Assumes that `parseCaptionNals` has been called first
10912 * @see m2ts/caption-stream.js
10913 **/
10914
10915
10916 this.pushNals = function (nals) {
10917 if (!this.isInitialized() || !nals || nals.length === 0) {
10918 return null;
10919 }
10920
10921 nals.forEach(function (nal) {
10922 captionStream.push(nal);
10923 });
10924 };
10925 /**
10926 * Flushes underlying CaptionStream to dispatch processed, displayable captions
10927 * @see m2ts/caption-stream.js
10928 **/
10929
10930
10931 this.flushStream = function () {
10932 if (!this.isInitialized()) {
10933 return null;
10934 }
10935
10936 if (!parsingPartial) {
10937 captionStream.flush();
10938 } else {
10939 captionStream.partialFlush();
10940 }
10941 };
10942 /**
10943 * Reset caption buckets for new data
10944 **/
10945
10946
10947 this.clearParsedCaptions = function () {
10948 parsedCaptions.captions = [];
10949 parsedCaptions.captionStreams = {};
10950 };
10951 /**
10952 * Resets underlying CaptionStream
10953 * @see m2ts/caption-stream.js
10954 **/
10955
10956
10957 this.resetCaptionStream = function () {
10958 if (!this.isInitialized()) {
10959 return null;
10960 }
10961
10962 captionStream.reset();
10963 };
10964 /**
10965 * Convenience method to clear all captions flushed from the
10966 * CaptionStream and still being parsed
10967 * @see m2ts/caption-stream.js
10968 **/
10969
10970
10971 this.clearAllCaptions = function () {
10972 this.clearParsedCaptions();
10973 this.resetCaptionStream();
10974 };
10975 /**
10976 * Reset caption parser
10977 **/
10978
10979
10980 this.reset = function () {
10981 segmentCache = [];
10982 trackId = null;
10983 timescale = null;
10984
10985 if (!parsedCaptions) {
10986 parsedCaptions = {
10987 captions: [],
10988 // CC1, CC2, CC3, CC4
10989 captionStreams: {}
10990 };
10991 } else {
10992 this.clearParsedCaptions();
10993 }
10994
10995 this.resetCaptionStream();
10996 };
10997
10998 this.reset();
10999 };
11000
11001 var captionParser = CaptionParser;
11002 /* global self */
11003
11004 var typeFromStreamString = function typeFromStreamString(streamString) {
11005 if (streamString === 'AudioSegmentStream') {
11006 return 'audio';
11007 }
11008
11009 return streamString === 'VideoSegmentStream' ? 'video' : '';
11010 };
11011 /**
11012 * Re-emits transmuxer events by converting them into messages to the
11013 * world outside the worker.
11014 *
11015 * @param {Object} transmuxer the transmuxer to wire events on
11016 * @private
11017 */
11018
11019
11020 var wireFullTransmuxerEvents = function wireFullTransmuxerEvents(self, transmuxer) {
11021 transmuxer.on('data', function (segment) {
11022 // transfer ownership of the underlying ArrayBuffer
11023 // instead of doing a copy to save memory
11024 // ArrayBuffers are transferable but generic TypedArrays are not
11025 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
11026 var initArray = segment.initSegment;
11027 segment.initSegment = {
11028 data: initArray.buffer,
11029 byteOffset: initArray.byteOffset,
11030 byteLength: initArray.byteLength
11031 };
11032 var typedArray = segment.data;
11033 segment.data = typedArray.buffer;
11034 self.postMessage({
11035 action: 'data',
11036 segment: segment,
11037 byteOffset: typedArray.byteOffset,
11038 byteLength: typedArray.byteLength
11039 }, [segment.data]);
11040 });
11041 transmuxer.on('done', function (data) {
11042 self.postMessage({
11043 action: 'done'
11044 });
11045 });
11046 transmuxer.on('gopInfo', function (gopInfo) {
11047 self.postMessage({
11048 action: 'gopInfo',
11049 gopInfo: gopInfo
11050 });
11051 });
11052 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
11053 var videoSegmentTimingInfo = {
11054 start: {
11055 decode: clock.videoTsToSeconds(timingInfo.start.dts),
11056 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
11057 },
11058 end: {
11059 decode: clock.videoTsToSeconds(timingInfo.end.dts),
11060 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
11061 },
11062 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
11063 };
11064
11065 if (timingInfo.prependedContentDuration) {
11066 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
11067 }
11068
11069 self.postMessage({
11070 action: 'videoSegmentTimingInfo',
11071 videoSegmentTimingInfo: videoSegmentTimingInfo
11072 });
11073 });
11074 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
11075 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
11076 var audioSegmentTimingInfo = {
11077 start: {
11078 decode: clock.videoTsToSeconds(timingInfo.start.dts),
11079 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
11080 },
11081 end: {
11082 decode: clock.videoTsToSeconds(timingInfo.end.dts),
11083 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
11084 },
11085 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
11086 };
11087
11088 if (timingInfo.prependedContentDuration) {
11089 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
11090 }
11091
11092 self.postMessage({
11093 action: 'audioSegmentTimingInfo',
11094 audioSegmentTimingInfo: audioSegmentTimingInfo
11095 });
11096 });
11097 transmuxer.on('id3Frame', function (id3Frame) {
11098 self.postMessage({
11099 action: 'id3Frame',
11100 id3Frame: id3Frame
11101 });
11102 });
11103 transmuxer.on('caption', function (caption) {
11104 self.postMessage({
11105 action: 'caption',
11106 caption: caption
11107 });
11108 });
11109 transmuxer.on('trackinfo', function (trackInfo) {
11110 self.postMessage({
11111 action: 'trackinfo',
11112 trackInfo: trackInfo
11113 });
11114 });
11115 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
11116 // convert to video TS since we prioritize video time over audio
11117 self.postMessage({
11118 action: 'audioTimingInfo',
11119 audioTimingInfo: {
11120 start: clock.videoTsToSeconds(audioTimingInfo.start),
11121 end: clock.videoTsToSeconds(audioTimingInfo.end)
11122 }
11123 });
11124 });
11125 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
11126 self.postMessage({
11127 action: 'videoTimingInfo',
11128 videoTimingInfo: {
11129 start: clock.videoTsToSeconds(videoTimingInfo.start),
11130 end: clock.videoTsToSeconds(videoTimingInfo.end)
11131 }
11132 });
11133 });
11134 };
11135
11136 var wirePartialTransmuxerEvents = function wirePartialTransmuxerEvents(self, transmuxer) {
11137 transmuxer.on('data', function (event) {
11138 // transfer ownership of the underlying ArrayBuffer
11139 // instead of doing a copy to save memory
11140 // ArrayBuffers are transferable but generic TypedArrays are not
11141 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
11142 var initSegment = {
11143 data: event.data.track.initSegment.buffer,
11144 byteOffset: event.data.track.initSegment.byteOffset,
11145 byteLength: event.data.track.initSegment.byteLength
11146 };
11147 var boxes = {
11148 data: event.data.boxes.buffer,
11149 byteOffset: event.data.boxes.byteOffset,
11150 byteLength: event.data.boxes.byteLength
11151 };
11152 var segment = {
11153 boxes: boxes,
11154 initSegment: initSegment,
11155 type: event.type,
11156 sequence: event.data.sequence
11157 };
11158
11159 if (typeof event.data.videoFrameDts !== 'undefined') {
11160 segment.videoFrameDtsTime = clock.videoTsToSeconds(event.data.videoFrameDts);
11161 }
11162
11163 if (typeof event.data.videoFramePts !== 'undefined') {
11164 segment.videoFramePtsTime = clock.videoTsToSeconds(event.data.videoFramePts);
11165 }
11166
11167 self.postMessage({
11168 action: 'data',
11169 segment: segment
11170 }, [segment.boxes.data, segment.initSegment.data]);
11171 });
11172 transmuxer.on('id3Frame', function (id3Frame) {
11173 self.postMessage({
11174 action: 'id3Frame',
11175 id3Frame: id3Frame
11176 });
11177 });
11178 transmuxer.on('caption', function (caption) {
11179 self.postMessage({
11180 action: 'caption',
11181 caption: caption
11182 });
11183 });
11184 transmuxer.on('done', function (data) {
11185 self.postMessage({
11186 action: 'done',
11187 type: typeFromStreamString(data)
11188 });
11189 });
11190 transmuxer.on('partialdone', function (data) {
11191 self.postMessage({
11192 action: 'partialdone',
11193 type: typeFromStreamString(data)
11194 });
11195 });
11196 transmuxer.on('endedsegment', function (data) {
11197 self.postMessage({
11198 action: 'endedSegment',
11199 type: typeFromStreamString(data)
11200 });
11201 });
11202 transmuxer.on('trackinfo', function (trackInfo) {
11203 self.postMessage({
11204 action: 'trackinfo',
11205 trackInfo: trackInfo
11206 });
11207 });
11208 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
11209 // This can happen if flush is called when no
11210 // audio has been processed. This should be an
11211 // unusual case, but if it does occur should not
11212 // result in valid data being returned
11213 if (audioTimingInfo.start === null) {
11214 self.postMessage({
11215 action: 'audioTimingInfo',
11216 audioTimingInfo: audioTimingInfo
11217 });
11218 return;
11219 } // convert to video TS since we prioritize video time over audio
11220
11221
11222 var timingInfoInSeconds = {
11223 start: clock.videoTsToSeconds(audioTimingInfo.start)
11224 };
11225
11226 if (audioTimingInfo.end) {
11227 timingInfoInSeconds.end = clock.videoTsToSeconds(audioTimingInfo.end);
11228 }
11229
11230 self.postMessage({
11231 action: 'audioTimingInfo',
11232 audioTimingInfo: timingInfoInSeconds
11233 });
11234 });
11235 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
11236 var timingInfoInSeconds = {
11237 start: clock.videoTsToSeconds(videoTimingInfo.start)
11238 };
11239
11240 if (videoTimingInfo.end) {
11241 timingInfoInSeconds.end = clock.videoTsToSeconds(videoTimingInfo.end);
11242 }
11243
11244 self.postMessage({
11245 action: 'videoTimingInfo',
11246 videoTimingInfo: timingInfoInSeconds
11247 });
11248 });
11249 };
11250 /**
11251 * All incoming messages route through this hash. If no function exists
11252 * to handle an incoming message, then we ignore the message.
11253 *
11254 * @class MessageHandlers
11255 * @param {Object} options the options to initialize with
11256 */
11257
11258
11259 var MessageHandlers = /*#__PURE__*/function () {
11260 function MessageHandlers(self, options) {
11261 this.options = options || {};
11262 this.self = self;
11263 this.init();
11264 }
11265 /**
11266 * initialize our web worker and wire all the events.
11267 */
11268
11269
11270 var _proto = MessageHandlers.prototype;
11271
11272 _proto.init = function init() {
11273 if (this.transmuxer) {
11274 this.transmuxer.dispose();
11275 }
11276
11277 this.transmuxer = this.options.handlePartialData ? new transmuxer$1(this.options) : new transmuxer.Transmuxer(this.options);
11278
11279 if (this.options.handlePartialData) {
11280 wirePartialTransmuxerEvents(this.self, this.transmuxer);
11281 } else {
11282 wireFullTransmuxerEvents(this.self, this.transmuxer);
11283 }
11284 };
11285
11286 _proto.pushMp4Captions = function pushMp4Captions(data) {
11287 if (!this.captionParser) {
11288 this.captionParser = new captionParser();
11289 this.captionParser.init();
11290 }
11291
11292 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
11293 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
11294 this.self.postMessage({
11295 action: 'mp4Captions',
11296 captions: parsed && parsed.captions || [],
11297 data: segment.buffer
11298 }, [segment.buffer]);
11299 };
11300
11301 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
11302 if (this.captionParser) {
11303 this.captionParser.clearAllCaptions();
11304 }
11305 };
11306
11307 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
11308 if (this.captionParser) {
11309 this.captionParser.clearParsedCaptions();
11310 }
11311 }
11312 /**
11313 * Adds data (a ts segment) to the start of the transmuxer pipeline for
11314 * processing.
11315 *
11316 * @param {ArrayBuffer} data data to push into the muxer
11317 */
11318 ;
11319
11320 _proto.push = function push(data) {
11321 // Cast array buffer to correct type for transmuxer
11322 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
11323 this.transmuxer.push(segment);
11324 }
11325 /**
11326 * Recreate the transmuxer so that the next segment added via `push`
11327 * start with a fresh transmuxer.
11328 */
11329 ;
11330
11331 _proto.reset = function reset() {
11332 this.transmuxer.reset();
11333 }
11334 /**
11335 * Set the value that will be used as the `baseMediaDecodeTime` time for the
11336 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
11337 * set relative to the first based on the PTS values.
11338 *
11339 * @param {Object} data used to set the timestamp offset in the muxer
11340 */
11341 ;
11342
11343 _proto.setTimestampOffset = function setTimestampOffset(data) {
11344 var timestampOffset = data.timestampOffset || 0;
11345 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
11346 };
11347
11348 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
11349 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
11350 };
11351
11352 _proto.setRemux = function setRemux(data) {
11353 this.transmuxer.setRemux(data.remux);
11354 }
11355 /**
11356 * Forces the pipeline to finish processing the last segment and emit it's
11357 * results.
11358 *
11359 * @param {Object} data event data, not really used
11360 */
11361 ;
11362
11363 _proto.flush = function flush(data) {
11364 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
11365
11366 self.postMessage({
11367 action: 'done',
11368 type: 'transmuxed'
11369 });
11370 };
11371
11372 _proto.partialFlush = function partialFlush(data) {
11373 this.transmuxer.partialFlush(); // transmuxed partialdone action is fired after both audio/video pipelines are flushed
11374
11375 self.postMessage({
11376 action: 'partialdone',
11377 type: 'transmuxed'
11378 });
11379 };
11380
11381 _proto.endTimeline = function endTimeline() {
11382 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
11383 // timelines
11384
11385 self.postMessage({
11386 action: 'endedtimeline',
11387 type: 'transmuxed'
11388 });
11389 };
11390
11391 _proto.alignGopsWith = function alignGopsWith(data) {
11392 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
11393 };
11394
11395 return MessageHandlers;
11396 }();
11397 /**
11398 * Our web worker interface so that things can talk to mux.js
11399 * that will be running in a web worker. the scope is passed to this by
11400 * webworkify.
11401 *
11402 * @param {Object} self the scope for the web worker
11403 */
11404
11405
11406 self.onmessage = function (event) {
11407 if (event.data.action === 'init' && event.data.options) {
11408 this.messageHandlers = new MessageHandlers(self, event.data.options);
11409 return;
11410 }
11411
11412 if (!this.messageHandlers) {
11413 this.messageHandlers = new MessageHandlers(self);
11414 }
11415
11416 if (event.data && event.data.action && event.data.action !== 'init') {
11417 if (this.messageHandlers[event.data.action]) {
11418 this.messageHandlers[event.data.action](event.data);
11419 }
11420 }
11421 };
11422}));
11423var TransmuxWorker = factory(workerCode);
11424/* rollup-plugin-worker-factory end for worker!/Users/gkatsevman/p/http-streaming-release/src/transmuxer-worker.js */
11425
11426var handleData_ = function handleData_(event, transmuxedData, callback) {
11427 var _event$data$segment = event.data.segment,
11428 type = _event$data$segment.type,
11429 initSegment = _event$data$segment.initSegment,
11430 captions = _event$data$segment.captions,
11431 captionStreams = _event$data$segment.captionStreams,
11432 metadata = _event$data$segment.metadata,
11433 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
11434 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
11435 transmuxedData.buffer.push({
11436 captions: captions,
11437 captionStreams: captionStreams,
11438 metadata: metadata
11439 }); // right now, boxes will come back from partial transmuxer, data from full
11440
11441 var boxes = event.data.segment.boxes || {
11442 data: event.data.segment.data
11443 };
11444 var result = {
11445 type: type,
11446 // cast ArrayBuffer to TypedArray
11447 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
11448 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
11449 };
11450
11451 if (typeof videoFrameDtsTime !== 'undefined') {
11452 result.videoFrameDtsTime = videoFrameDtsTime;
11453 }
11454
11455 if (typeof videoFramePtsTime !== 'undefined') {
11456 result.videoFramePtsTime = videoFramePtsTime;
11457 }
11458
11459 callback(result);
11460};
11461var handleDone_ = function handleDone_(_ref) {
11462 var transmuxedData = _ref.transmuxedData,
11463 callback = _ref.callback;
11464 // Previously we only returned data on data events,
11465 // not on done events. Clear out the buffer to keep that consistent.
11466 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
11467 // have received
11468
11469 callback(transmuxedData);
11470};
11471var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
11472 transmuxedData.gopInfo = event.data.gopInfo;
11473};
11474var processTransmux = function processTransmux(options) {
11475 var transmuxer = options.transmuxer,
11476 bytes = options.bytes,
11477 audioAppendStart = options.audioAppendStart,
11478 gopsToAlignWith = options.gopsToAlignWith,
11479 isPartial = options.isPartial,
11480 remux = options.remux,
11481 onData = options.onData,
11482 onTrackInfo = options.onTrackInfo,
11483 onAudioTimingInfo = options.onAudioTimingInfo,
11484 onVideoTimingInfo = options.onVideoTimingInfo,
11485 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
11486 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
11487 onId3 = options.onId3,
11488 onCaptions = options.onCaptions,
11489 onDone = options.onDone,
11490 onEndedTimeline = options.onEndedTimeline,
11491 isEndOfTimeline = options.isEndOfTimeline;
11492 var transmuxedData = {
11493 isPartial: isPartial,
11494 buffer: []
11495 };
11496 var waitForEndedTimelineEvent = isEndOfTimeline;
11497
11498 var handleMessage = function handleMessage(event) {
11499 if (transmuxer.currentTransmux !== options) {
11500 // disposed
11501 return;
11502 }
11503
11504 if (event.data.action === 'data') {
11505 handleData_(event, transmuxedData, onData);
11506 }
11507
11508 if (event.data.action === 'trackinfo') {
11509 onTrackInfo(event.data.trackInfo);
11510 }
11511
11512 if (event.data.action === 'gopInfo') {
11513 handleGopInfo_(event, transmuxedData);
11514 }
11515
11516 if (event.data.action === 'audioTimingInfo') {
11517 onAudioTimingInfo(event.data.audioTimingInfo);
11518 }
11519
11520 if (event.data.action === 'videoTimingInfo') {
11521 onVideoTimingInfo(event.data.videoTimingInfo);
11522 }
11523
11524 if (event.data.action === 'videoSegmentTimingInfo') {
11525 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
11526 }
11527
11528 if (event.data.action === 'audioSegmentTimingInfo') {
11529 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
11530 }
11531
11532 if (event.data.action === 'id3Frame') {
11533 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
11534 }
11535
11536 if (event.data.action === 'caption') {
11537 onCaptions(event.data.caption);
11538 }
11539
11540 if (event.data.action === 'endedtimeline') {
11541 waitForEndedTimelineEvent = false;
11542 onEndedTimeline();
11543 } // wait for the transmuxed event since we may have audio and video
11544
11545
11546 if (event.data.type !== 'transmuxed') {
11547 return;
11548 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
11549 // of a timeline, that means there may still be data events before the segment
11550 // processing can be considerred complete. In that case, the final event should be
11551 // an "endedtimeline" event with the type "transmuxed."
11552
11553
11554 if (waitForEndedTimelineEvent) {
11555 return;
11556 }
11557
11558 transmuxer.onmessage = null;
11559 handleDone_({
11560 transmuxedData: transmuxedData,
11561 callback: onDone
11562 });
11563 /* eslint-disable no-use-before-define */
11564
11565 dequeue(transmuxer);
11566 /* eslint-enable */
11567 };
11568
11569 transmuxer.onmessage = handleMessage;
11570
11571 if (audioAppendStart) {
11572 transmuxer.postMessage({
11573 action: 'setAudioAppendStart',
11574 appendStart: audioAppendStart
11575 });
11576 } // allow empty arrays to be passed to clear out GOPs
11577
11578
11579 if (Array.isArray(gopsToAlignWith)) {
11580 transmuxer.postMessage({
11581 action: 'alignGopsWith',
11582 gopsToAlignWith: gopsToAlignWith
11583 });
11584 }
11585
11586 if (typeof remux !== 'undefined') {
11587 transmuxer.postMessage({
11588 action: 'setRemux',
11589 remux: remux
11590 });
11591 }
11592
11593 if (bytes.byteLength) {
11594 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
11595 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
11596 transmuxer.postMessage({
11597 action: 'push',
11598 // Send the typed-array of data as an ArrayBuffer so that
11599 // it can be sent as a "Transferable" and avoid the costly
11600 // memory copy
11601 data: buffer,
11602 // To recreate the original typed-array, we need information
11603 // about what portion of the ArrayBuffer it was a view into
11604 byteOffset: byteOffset,
11605 byteLength: bytes.byteLength
11606 }, [buffer]);
11607 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
11608 // the end of the segment
11609
11610
11611 transmuxer.postMessage({
11612 action: isPartial ? 'partialFlush' : 'flush'
11613 });
11614
11615 if (isEndOfTimeline) {
11616 transmuxer.postMessage({
11617 action: 'endTimeline'
11618 });
11619 }
11620};
11621var dequeue = function dequeue(transmuxer) {
11622 transmuxer.currentTransmux = null;
11623
11624 if (transmuxer.transmuxQueue.length) {
11625 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
11626
11627 if (typeof transmuxer.currentTransmux === 'function') {
11628 transmuxer.currentTransmux();
11629 } else {
11630 processTransmux(transmuxer.currentTransmux);
11631 }
11632 }
11633};
11634var processAction = function processAction(transmuxer, action) {
11635 transmuxer.postMessage({
11636 action: action
11637 });
11638 dequeue(transmuxer);
11639};
11640var enqueueAction = function enqueueAction(action, transmuxer) {
11641 if (!transmuxer.currentTransmux) {
11642 transmuxer.currentTransmux = action;
11643 processAction(transmuxer, action);
11644 return;
11645 }
11646
11647 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
11648};
11649var reset = function reset(transmuxer) {
11650 enqueueAction('reset', transmuxer);
11651};
11652var endTimeline = function endTimeline(transmuxer) {
11653 enqueueAction('endTimeline', transmuxer);
11654};
11655var transmux = function transmux(options) {
11656 if (!options.transmuxer.currentTransmux) {
11657 options.transmuxer.currentTransmux = options;
11658 processTransmux(options);
11659 return;
11660 }
11661
11662 options.transmuxer.transmuxQueue.push(options);
11663};
11664var createTransmuxer = function createTransmuxer(options) {
11665 var transmuxer = new TransmuxWorker();
11666 transmuxer.currentTransmux = null;
11667 transmuxer.transmuxQueue = [];
11668 var term = transmuxer.terminate;
11669
11670 transmuxer.terminate = function () {
11671 transmuxer.currentTransmux = null;
11672 transmuxer.transmuxQueue.length = 0;
11673 return term.call(transmuxer);
11674 };
11675
11676 transmuxer.postMessage({
11677 action: 'init',
11678 options: options
11679 });
11680 return transmuxer;
11681};
11682var segmentTransmuxer = {
11683 reset: reset,
11684 endTimeline: endTimeline,
11685 transmux: transmux,
11686 createTransmuxer: createTransmuxer
11687};
11688
11689/**
11690 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
11691 * internal "media time," as well as whether it contains video and/or audio.
11692 *
11693 * @private
11694 * @param {Uint8Array} bytes - segment bytes
11695 * @param {number} baseStartTime
11696 * Relative reference timestamp used when adjusting frame timestamps for rollover.
11697 * This value should be in seconds, as it's converted to a 90khz clock within the
11698 * function body.
11699 * @return {Object} The start time of the current segment in "media time" as well as
11700 * whether it contains video and/or audio
11701 */
11702
11703var probeTsSegment = function probeTsSegment(bytes, baseStartTime) {
11704 var timeInfo = tsInspector__default['default'].inspect(bytes, baseStartTime * clock.ONE_SECOND_IN_TS);
11705
11706 if (!timeInfo) {
11707 return null;
11708 }
11709
11710 var result = {
11711 // each type's time info comes back as an array of 2 times, start and end
11712 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
11713 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
11714 };
11715
11716 if (result.hasVideo) {
11717 result.videoStart = timeInfo.video[0].ptsTime;
11718 }
11719
11720 if (result.hasAudio) {
11721 result.audioStart = timeInfo.audio[0].ptsTime;
11722 }
11723
11724 return result;
11725};
11726/**
11727 * Combine all segments into a single Uint8Array
11728 *
11729 * @param {Object} segmentObj
11730 * @return {Uint8Array} concatenated bytes
11731 * @private
11732 */
11733
11734var concatSegments = function concatSegments(segmentObj) {
11735 var offset = 0;
11736 var tempBuffer;
11737
11738 if (segmentObj.bytes) {
11739 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
11740
11741 segmentObj.segments.forEach(function (segment) {
11742 tempBuffer.set(segment, offset);
11743 offset += segment.byteLength;
11744 });
11745 }
11746
11747 return tempBuffer;
11748};
11749
11750var REQUEST_ERRORS = {
11751 FAILURE: 2,
11752 TIMEOUT: -101,
11753 ABORTED: -102
11754};
11755/**
11756 * Abort all requests
11757 *
11758 * @param {Object} activeXhrs - an object that tracks all XHR requests
11759 */
11760
11761var abortAll = function abortAll(activeXhrs) {
11762 activeXhrs.forEach(function (xhr) {
11763 xhr.abort();
11764 });
11765};
11766/**
11767 * Gather important bandwidth stats once a request has completed
11768 *
11769 * @param {Object} request - the XHR request from which to gather stats
11770 */
11771
11772
11773var getRequestStats = function getRequestStats(request) {
11774 return {
11775 bandwidth: request.bandwidth,
11776 bytesReceived: request.bytesReceived || 0,
11777 roundTripTime: request.roundTripTime || 0
11778 };
11779};
11780/**
11781 * If possible gather bandwidth stats as a request is in
11782 * progress
11783 *
11784 * @param {Event} progressEvent - an event object from an XHR's progress event
11785 */
11786
11787
11788var getProgressStats = function getProgressStats(progressEvent) {
11789 var request = progressEvent.target;
11790 var roundTripTime = Date.now() - request.requestTime;
11791 var stats = {
11792 bandwidth: Infinity,
11793 bytesReceived: 0,
11794 roundTripTime: roundTripTime || 0
11795 };
11796 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
11797 // because we should only use bandwidth stats on progress to determine when
11798 // abort a request early due to insufficient bandwidth
11799
11800 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
11801 return stats;
11802};
11803/**
11804 * Handle all error conditions in one place and return an object
11805 * with all the information
11806 *
11807 * @param {Error|null} error - if non-null signals an error occured with the XHR
11808 * @param {Object} request - the XHR request that possibly generated the error
11809 */
11810
11811
11812var handleErrors = function handleErrors(error, request) {
11813 if (request.timedout) {
11814 return {
11815 status: request.status,
11816 message: 'HLS request timed-out at URL: ' + request.uri,
11817 code: REQUEST_ERRORS.TIMEOUT,
11818 xhr: request
11819 };
11820 }
11821
11822 if (request.aborted) {
11823 return {
11824 status: request.status,
11825 message: 'HLS request aborted at URL: ' + request.uri,
11826 code: REQUEST_ERRORS.ABORTED,
11827 xhr: request
11828 };
11829 }
11830
11831 if (error) {
11832 return {
11833 status: request.status,
11834 message: 'HLS request errored at URL: ' + request.uri,
11835 code: REQUEST_ERRORS.FAILURE,
11836 xhr: request
11837 };
11838 }
11839
11840 return null;
11841};
11842/**
11843 * Handle responses for key data and convert the key data to the correct format
11844 * for the decryption step later
11845 *
11846 * @param {Object} segment - a simplified copy of the segmentInfo object
11847 * from SegmentLoader
11848 * @param {Function} finishProcessingFn - a callback to execute to continue processing
11849 * this request
11850 */
11851
11852
11853var handleKeyResponse = function handleKeyResponse(segment, finishProcessingFn) {
11854 return function (error, request) {
11855 var response = request.response;
11856 var errorObj = handleErrors(error, request);
11857
11858 if (errorObj) {
11859 return finishProcessingFn(errorObj, segment);
11860 }
11861
11862 if (response.byteLength !== 16) {
11863 return finishProcessingFn({
11864 status: request.status,
11865 message: 'Invalid HLS key at URL: ' + request.uri,
11866 code: REQUEST_ERRORS.FAILURE,
11867 xhr: request
11868 }, segment);
11869 }
11870
11871 var view = new DataView(response);
11872 segment.key.bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
11873 return finishProcessingFn(null, segment);
11874 };
11875};
11876/**
11877 * Handle init-segment responses
11878 *
11879 * @param {Object} segment - a simplified copy of the segmentInfo object
11880 * from SegmentLoader
11881 * @param {Function} finishProcessingFn - a callback to execute to continue processing
11882 * this request
11883 */
11884
11885
11886var handleInitSegmentResponse = function handleInitSegmentResponse(_ref) {
11887 var segment = _ref.segment,
11888 finishProcessingFn = _ref.finishProcessingFn;
11889 return function (error, request) {
11890 var response = request.response;
11891 var errorObj = handleErrors(error, request);
11892
11893 if (errorObj) {
11894 return finishProcessingFn(errorObj, segment);
11895 } // stop processing if received empty content
11896
11897
11898 if (response.byteLength === 0) {
11899 return finishProcessingFn({
11900 status: request.status,
11901 message: 'Empty HLS segment content at URL: ' + request.uri,
11902 code: REQUEST_ERRORS.FAILURE,
11903 xhr: request
11904 }, segment);
11905 }
11906
11907 segment.map.bytes = new Uint8Array(request.response);
11908 var type = containers.detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
11909 // only know how to parse mp4 init segments at the moment
11910
11911 if (type !== 'mp4') {
11912 return finishProcessingFn({
11913 status: request.status,
11914 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + request.uri,
11915 code: REQUEST_ERRORS.FAILURE,
11916 internal: true,
11917 xhr: request
11918 }, segment);
11919 }
11920
11921 var tracks = mp4probe__default['default'].tracks(segment.map.bytes);
11922 tracks.forEach(function (track) {
11923 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
11924
11925 if (segment.map.tracks[track.type]) {
11926 return;
11927 }
11928
11929 segment.map.tracks[track.type] = track;
11930
11931 if (typeof track.id === 'number' && track.timescale) {
11932 segment.map.timescales = segment.map.timescales || {};
11933 segment.map.timescales[track.id] = track.timescale;
11934 }
11935 });
11936 return finishProcessingFn(null, segment);
11937 };
11938};
11939/**
11940 * Response handler for segment-requests being sure to set the correct
11941 * property depending on whether the segment is encryped or not
11942 * Also records and keeps track of stats that are used for ABR purposes
11943 *
11944 * @param {Object} segment - a simplified copy of the segmentInfo object
11945 * from SegmentLoader
11946 * @param {Function} finishProcessingFn - a callback to execute to continue processing
11947 * this request
11948 */
11949
11950
11951var handleSegmentResponse = function handleSegmentResponse(_ref2) {
11952 var segment = _ref2.segment,
11953 finishProcessingFn = _ref2.finishProcessingFn,
11954 responseType = _ref2.responseType;
11955 return function (error, request) {
11956 var response = request.response;
11957 var errorObj = handleErrors(error, request);
11958
11959 if (errorObj) {
11960 return finishProcessingFn(errorObj, segment);
11961 }
11962
11963 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
11964 // thrown for two primary cases:
11965 // 1. the mime type override stops working, or is not implemented for a specific
11966 // browser
11967 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
11968 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0)); // stop processing if received empty content
11969
11970 if (response.byteLength === 0) {
11971 return finishProcessingFn({
11972 status: request.status,
11973 message: 'Empty HLS segment content at URL: ' + request.uri,
11974 code: REQUEST_ERRORS.FAILURE,
11975 xhr: request
11976 }, segment);
11977 }
11978
11979 segment.stats = getRequestStats(request);
11980
11981 if (segment.key) {
11982 segment.encryptedBytes = new Uint8Array(newBytes);
11983 } else {
11984 segment.bytes = new Uint8Array(newBytes);
11985 }
11986
11987 return finishProcessingFn(null, segment);
11988 };
11989};
11990
11991var transmuxAndNotify = function transmuxAndNotify(_ref3) {
11992 var segment = _ref3.segment,
11993 bytes = _ref3.bytes,
11994 isPartial = _ref3.isPartial,
11995 trackInfoFn = _ref3.trackInfoFn,
11996 timingInfoFn = _ref3.timingInfoFn,
11997 videoSegmentTimingInfoFn = _ref3.videoSegmentTimingInfoFn,
11998 audioSegmentTimingInfoFn = _ref3.audioSegmentTimingInfoFn,
11999 id3Fn = _ref3.id3Fn,
12000 captionsFn = _ref3.captionsFn,
12001 isEndOfTimeline = _ref3.isEndOfTimeline,
12002 endedTimelineFn = _ref3.endedTimelineFn,
12003 dataFn = _ref3.dataFn,
12004 doneFn = _ref3.doneFn;
12005 var fmp4Tracks = segment.map && segment.map.tracks || {};
12006 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
12007 // One reason for this is that in the case of full segments, we want to trust start
12008 // times from the probe, rather than the transmuxer.
12009
12010 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
12011 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
12012 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
12013 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end'); // Check to see if we are appending a full segment.
12014
12015 if (!isPartial && !segment.lastReachedChar) {
12016 // In the full segment transmuxer, we don't yet have the ability to extract a "proper"
12017 // start time. Meaning cached frame data may corrupt our notion of where this segment
12018 // really starts. To get around this, full segment appends should probe for the info
12019 // needed.
12020 var probeResult = probeTsSegment(bytes, segment.baseStartTime);
12021
12022 if (probeResult) {
12023 trackInfoFn(segment, {
12024 hasAudio: probeResult.hasAudio,
12025 hasVideo: probeResult.hasVideo,
12026 isMuxed: isMuxed
12027 });
12028 trackInfoFn = null;
12029
12030 if (probeResult.hasAudio && !isMuxed) {
12031 audioStartFn(probeResult.audioStart);
12032 }
12033
12034 if (probeResult.hasVideo) {
12035 videoStartFn(probeResult.videoStart);
12036 }
12037
12038 audioStartFn = null;
12039 videoStartFn = null;
12040 }
12041 }
12042
12043 transmux({
12044 bytes: bytes,
12045 transmuxer: segment.transmuxer,
12046 audioAppendStart: segment.audioAppendStart,
12047 gopsToAlignWith: segment.gopsToAlignWith,
12048 isPartial: isPartial,
12049 remux: isMuxed,
12050 onData: function onData(result) {
12051 result.type = result.type === 'combined' ? 'video' : result.type;
12052 dataFn(segment, result);
12053 },
12054 onTrackInfo: function onTrackInfo(trackInfo) {
12055 if (trackInfoFn) {
12056 if (isMuxed) {
12057 trackInfo.isMuxed = true;
12058 }
12059
12060 trackInfoFn(segment, trackInfo);
12061 }
12062 },
12063 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
12064 // we only want the first start value we encounter
12065 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
12066 audioStartFn(audioTimingInfo.start);
12067 audioStartFn = null;
12068 } // we want to continually update the end time
12069
12070
12071 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
12072 audioEndFn(audioTimingInfo.end);
12073 }
12074 },
12075 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
12076 // we only want the first start value we encounter
12077 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
12078 videoStartFn(videoTimingInfo.start);
12079 videoStartFn = null;
12080 } // we want to continually update the end time
12081
12082
12083 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
12084 videoEndFn(videoTimingInfo.end);
12085 }
12086 },
12087 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
12088 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
12089 },
12090 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
12091 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
12092 },
12093 onId3: function onId3(id3Frames, dispatchType) {
12094 id3Fn(segment, id3Frames, dispatchType);
12095 },
12096 onCaptions: function onCaptions(captions) {
12097 captionsFn(segment, [captions]);
12098 },
12099 // if this is a partial transmux, the end of the timeline has not yet been reached
12100 // until the last part of the segment is processed (at which point isPartial will
12101 // be false)
12102 isEndOfTimeline: isEndOfTimeline && !isPartial,
12103 onEndedTimeline: function onEndedTimeline() {
12104 endedTimelineFn();
12105 },
12106 onDone: function onDone(result) {
12107 // To handle partial appends, there won't be a done function passed in (since
12108 // there's still, potentially, more segment to process), so there's nothing to do.
12109 if (!doneFn || isPartial) {
12110 return;
12111 }
12112
12113 result.type = result.type === 'combined' ? 'video' : result.type;
12114 doneFn(null, segment, result);
12115 }
12116 });
12117};
12118
12119var handleSegmentBytes = function handleSegmentBytes(_ref4) {
12120 var segment = _ref4.segment,
12121 bytes = _ref4.bytes,
12122 isPartial = _ref4.isPartial,
12123 trackInfoFn = _ref4.trackInfoFn,
12124 timingInfoFn = _ref4.timingInfoFn,
12125 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
12126 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
12127 id3Fn = _ref4.id3Fn,
12128 captionsFn = _ref4.captionsFn,
12129 isEndOfTimeline = _ref4.isEndOfTimeline,
12130 endedTimelineFn = _ref4.endedTimelineFn,
12131 dataFn = _ref4.dataFn,
12132 doneFn = _ref4.doneFn;
12133 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
12134 // We should have a handler that fetches the number of bytes required
12135 // to check if something is fmp4. This will allow us to save bandwidth
12136 // because we can only blacklist a playlist and abort requests
12137 // by codec after trackinfo triggers.
12138
12139 if (containers.isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
12140 segment.isFmp4 = true;
12141 var tracks = segment.map.tracks;
12142 var trackInfo = {
12143 isFmp4: true,
12144 hasVideo: !!tracks.video,
12145 hasAudio: !!tracks.audio
12146 }; // if we have a audio track, with a codec that is not set to
12147 // encrypted audio
12148
12149 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
12150 trackInfo.audioCodec = tracks.audio.codec;
12151 } // if we have a video track, with a codec that is not set to
12152 // encrypted video
12153
12154
12155 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
12156 trackInfo.videoCodec = tracks.video.codec;
12157 }
12158
12159 if (tracks.video && tracks.audio) {
12160 trackInfo.isMuxed = true;
12161 } // since we don't support appending fmp4 data on progress, we know we have the full
12162 // segment here
12163
12164
12165 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
12166 // time. The end time can be roughly calculated by the receiver using the duration.
12167 //
12168 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
12169 // that is the true start of the segment (where the playback engine should begin
12170 // decoding).
12171
12172 var timingInfo = mp4probe__default['default'].startTime(segment.map.timescales, bytesAsUint8Array);
12173
12174 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
12175 timingInfoFn(segment, 'audio', 'start', timingInfo);
12176 }
12177
12178 if (trackInfo.hasVideo) {
12179 timingInfoFn(segment, 'video', 'start', timingInfo);
12180 }
12181
12182 var finishLoading = function finishLoading(captions) {
12183 // if the track still has audio at this point it is only possible
12184 // for it to be audio only. See `tracks.video && tracks.audio` if statement
12185 // above.
12186 // we make sure to use segment.bytes here as that
12187 dataFn(segment, {
12188 data: bytes,
12189 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
12190 });
12191
12192 if (captions && captions.length) {
12193 captionsFn(segment, captions);
12194 }
12195
12196 doneFn(null, segment, {});
12197 }; // Run through the CaptionParser in case there are captions.
12198 // Initialize CaptionParser if it hasn't been yet
12199
12200
12201 if (!tracks.video || !bytes.byteLength || !segment.transmuxer) {
12202 finishLoading();
12203 return;
12204 }
12205
12206 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
12207 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
12208
12209 var listenForCaptions = function listenForCaptions(event) {
12210 if (event.data.action !== 'mp4Captions') {
12211 return;
12212 }
12213
12214 segment.transmuxer.removeEventListener('message', listenForCaptions);
12215 var data = event.data.data; // transfer ownership of bytes back to us.
12216
12217 segment.bytes = bytes = new Uint8Array(data, data.byteOffset || 0, data.byteLength);
12218 finishLoading(event.data.captions);
12219 };
12220
12221 segment.transmuxer.addEventListener('message', listenForCaptions); // transfer ownership of bytes to worker.
12222
12223 segment.transmuxer.postMessage({
12224 action: 'pushMp4Captions',
12225 timescales: segment.map.timescales,
12226 trackIds: [tracks.video.id],
12227 data: buffer,
12228 byteOffset: byteOffset,
12229 byteLength: bytes.byteLength
12230 }, [buffer]);
12231 return;
12232 } // VTT or other segments that don't need processing
12233
12234
12235 if (!segment.transmuxer) {
12236 doneFn(null, segment, {});
12237 return;
12238 }
12239
12240 if (typeof segment.container === 'undefined') {
12241 segment.container = containers.detectContainerForBytes(bytesAsUint8Array);
12242 }
12243
12244 if (segment.container !== 'ts' && segment.container !== 'aac') {
12245 trackInfoFn(segment, {
12246 hasAudio: false,
12247 hasVideo: false
12248 });
12249 doneFn(null, segment, {});
12250 return;
12251 } // ts or aac
12252
12253
12254 transmuxAndNotify({
12255 segment: segment,
12256 bytes: bytes,
12257 isPartial: isPartial,
12258 trackInfoFn: trackInfoFn,
12259 timingInfoFn: timingInfoFn,
12260 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12261 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12262 id3Fn: id3Fn,
12263 captionsFn: captionsFn,
12264 isEndOfTimeline: isEndOfTimeline,
12265 endedTimelineFn: endedTimelineFn,
12266 dataFn: dataFn,
12267 doneFn: doneFn
12268 });
12269};
12270/**
12271 * Decrypt the segment via the decryption web worker
12272 *
12273 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
12274 * routines
12275 * @param {Object} segment - a simplified copy of the segmentInfo object
12276 * from SegmentLoader
12277 * @param {Function} trackInfoFn - a callback that receives track info
12278 * @param {Function} timingInfoFn - a callback that receives timing info
12279 * @param {Function} videoSegmentTimingInfoFn
12280 * a callback that receives video timing info based on media times and
12281 * any adjustments made by the transmuxer
12282 * @param {Function} audioSegmentTimingInfoFn
12283 * a callback that receives audio timing info based on media times and
12284 * any adjustments made by the transmuxer
12285 * @param {boolean} isEndOfTimeline
12286 * true if this segment represents the last segment in a timeline
12287 * @param {Function} endedTimelineFn
12288 * a callback made when a timeline is ended, will only be called if
12289 * isEndOfTimeline is true
12290 * @param {Function} dataFn - a callback that is executed when segment bytes are available
12291 * and ready to use
12292 * @param {Function} doneFn - a callback that is executed after decryption has completed
12293 */
12294
12295
12296var decryptSegment = function decryptSegment(_ref5) {
12297 var decryptionWorker = _ref5.decryptionWorker,
12298 segment = _ref5.segment,
12299 trackInfoFn = _ref5.trackInfoFn,
12300 timingInfoFn = _ref5.timingInfoFn,
12301 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
12302 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
12303 id3Fn = _ref5.id3Fn,
12304 captionsFn = _ref5.captionsFn,
12305 isEndOfTimeline = _ref5.isEndOfTimeline,
12306 endedTimelineFn = _ref5.endedTimelineFn,
12307 dataFn = _ref5.dataFn,
12308 doneFn = _ref5.doneFn;
12309
12310 var decryptionHandler = function decryptionHandler(event) {
12311 if (event.data.source === segment.requestId) {
12312 decryptionWorker.removeEventListener('message', decryptionHandler);
12313 var decrypted = event.data.decrypted;
12314 segment.bytes = new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength);
12315 handleSegmentBytes({
12316 segment: segment,
12317 bytes: segment.bytes,
12318 isPartial: false,
12319 trackInfoFn: trackInfoFn,
12320 timingInfoFn: timingInfoFn,
12321 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12322 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12323 id3Fn: id3Fn,
12324 captionsFn: captionsFn,
12325 isEndOfTimeline: isEndOfTimeline,
12326 endedTimelineFn: endedTimelineFn,
12327 dataFn: dataFn,
12328 doneFn: doneFn
12329 });
12330 }
12331 };
12332
12333 decryptionWorker.addEventListener('message', decryptionHandler);
12334 var keyBytes;
12335
12336 if (segment.key.bytes.slice) {
12337 keyBytes = segment.key.bytes.slice();
12338 } else {
12339 keyBytes = new Uint32Array(Array.prototype.slice.call(segment.key.bytes));
12340 } // this is an encrypted segment
12341 // incrementally decrypt the segment
12342
12343
12344 decryptionWorker.postMessage(createTransferableMessage({
12345 source: segment.requestId,
12346 encrypted: segment.encryptedBytes,
12347 key: keyBytes,
12348 iv: segment.key.iv
12349 }), [segment.encryptedBytes.buffer, keyBytes.buffer]);
12350};
12351/**
12352 * This function waits for all XHRs to finish (with either success or failure)
12353 * before continueing processing via it's callback. The function gathers errors
12354 * from each request into a single errors array so that the error status for
12355 * each request can be examined later.
12356 *
12357 * @param {Object} activeXhrs - an object that tracks all XHR requests
12358 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
12359 * routines
12360 * @param {Function} trackInfoFn - a callback that receives track info
12361 * @param {Function} timingInfoFn - a callback that receives timing info
12362 * @param {Function} videoSegmentTimingInfoFn
12363 * a callback that receives video timing info based on media times and
12364 * any adjustments made by the transmuxer
12365 * @param {Function} audioSegmentTimingInfoFn
12366 * a callback that receives audio timing info based on media times and
12367 * any adjustments made by the transmuxer
12368 * @param {Function} id3Fn - a callback that receives ID3 metadata
12369 * @param {Function} captionsFn - a callback that receives captions
12370 * @param {boolean} isEndOfTimeline
12371 * true if this segment represents the last segment in a timeline
12372 * @param {Function} endedTimelineFn
12373 * a callback made when a timeline is ended, will only be called if
12374 * isEndOfTimeline is true
12375 * @param {Function} dataFn - a callback that is executed when segment bytes are available
12376 * and ready to use
12377 * @param {Function} doneFn - a callback that is executed after all resources have been
12378 * downloaded and any decryption completed
12379 */
12380
12381
12382var waitForCompletion = function waitForCompletion(_ref6) {
12383 var activeXhrs = _ref6.activeXhrs,
12384 decryptionWorker = _ref6.decryptionWorker,
12385 trackInfoFn = _ref6.trackInfoFn,
12386 timingInfoFn = _ref6.timingInfoFn,
12387 videoSegmentTimingInfoFn = _ref6.videoSegmentTimingInfoFn,
12388 audioSegmentTimingInfoFn = _ref6.audioSegmentTimingInfoFn,
12389 id3Fn = _ref6.id3Fn,
12390 captionsFn = _ref6.captionsFn,
12391 isEndOfTimeline = _ref6.isEndOfTimeline,
12392 endedTimelineFn = _ref6.endedTimelineFn,
12393 dataFn = _ref6.dataFn,
12394 doneFn = _ref6.doneFn;
12395 var count = 0;
12396 var didError = false;
12397 return function (error, segment) {
12398 if (didError) {
12399 return;
12400 }
12401
12402 if (error) {
12403 didError = true; // If there are errors, we have to abort any outstanding requests
12404
12405 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
12406 // handle the aborted events from those requests, there are some cases where we may
12407 // never get an aborted event. For instance, if the network connection is lost and
12408 // there were two requests, the first may have triggered an error immediately, while
12409 // the second request remains unsent. In that case, the aborted algorithm will not
12410 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
12411 //
12412 // We also can't rely on the ready state of the XHR, since the request that
12413 // triggered the connection error may also show as a ready state of 0 (unsent).
12414 // Therefore, we have to finish this group of requests immediately after the first
12415 // seen error.
12416
12417 return doneFn(error, segment);
12418 }
12419
12420 count += 1;
12421
12422 if (count === activeXhrs.length) {
12423 // Keep track of when *all* of the requests have completed
12424 segment.endOfAllRequests = Date.now();
12425
12426 if (segment.encryptedBytes) {
12427 return decryptSegment({
12428 decryptionWorker: decryptionWorker,
12429 segment: segment,
12430 trackInfoFn: trackInfoFn,
12431 timingInfoFn: timingInfoFn,
12432 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12433 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12434 id3Fn: id3Fn,
12435 captionsFn: captionsFn,
12436 isEndOfTimeline: isEndOfTimeline,
12437 endedTimelineFn: endedTimelineFn,
12438 dataFn: dataFn,
12439 doneFn: doneFn
12440 });
12441 } // Otherwise, everything is ready just continue
12442
12443
12444 handleSegmentBytes({
12445 segment: segment,
12446 bytes: segment.bytes,
12447 isPartial: false,
12448 trackInfoFn: trackInfoFn,
12449 timingInfoFn: timingInfoFn,
12450 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12451 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12452 id3Fn: id3Fn,
12453 captionsFn: captionsFn,
12454 isEndOfTimeline: isEndOfTimeline,
12455 endedTimelineFn: endedTimelineFn,
12456 dataFn: dataFn,
12457 doneFn: doneFn
12458 });
12459 }
12460 };
12461};
12462/**
12463 * Calls the abort callback if any request within the batch was aborted. Will only call
12464 * the callback once per batch of requests, even if multiple were aborted.
12465 *
12466 * @param {Object} loadendState - state to check to see if the abort function was called
12467 * @param {Function} abortFn - callback to call for abort
12468 */
12469
12470
12471var handleLoadEnd = function handleLoadEnd(_ref7) {
12472 var loadendState = _ref7.loadendState,
12473 abortFn = _ref7.abortFn;
12474 return function (event) {
12475 var request = event.target;
12476
12477 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
12478 abortFn();
12479 loadendState.calledAbortFn = true;
12480 }
12481 };
12482};
12483/**
12484 * Simple progress event callback handler that gathers some stats before
12485 * executing a provided callback with the `segment` object
12486 *
12487 * @param {Object} segment - a simplified copy of the segmentInfo object
12488 * from SegmentLoader
12489 * @param {Function} progressFn - a callback that is executed each time a progress event
12490 * is received
12491 * @param {Function} trackInfoFn - a callback that receives track info
12492 * @param {Function} timingInfoFn - a callback that receives timing info
12493 * @param {Function} videoSegmentTimingInfoFn
12494 * a callback that receives video timing info based on media times and
12495 * any adjustments made by the transmuxer
12496 * @param {Function} audioSegmentTimingInfoFn
12497 * a callback that receives audio timing info based on media times and
12498 * any adjustments made by the transmuxer
12499 * @param {boolean} isEndOfTimeline
12500 * true if this segment represents the last segment in a timeline
12501 * @param {Function} endedTimelineFn
12502 * a callback made when a timeline is ended, will only be called if
12503 * isEndOfTimeline is true
12504 * @param {Function} dataFn - a callback that is executed when segment bytes are available
12505 * and ready to use
12506 * @param {Event} event - the progress event object from XMLHttpRequest
12507 */
12508
12509
12510var handleProgress = function handleProgress(_ref8) {
12511 var segment = _ref8.segment,
12512 progressFn = _ref8.progressFn,
12513 trackInfoFn = _ref8.trackInfoFn,
12514 timingInfoFn = _ref8.timingInfoFn,
12515 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
12516 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
12517 id3Fn = _ref8.id3Fn,
12518 captionsFn = _ref8.captionsFn,
12519 isEndOfTimeline = _ref8.isEndOfTimeline,
12520 endedTimelineFn = _ref8.endedTimelineFn,
12521 dataFn = _ref8.dataFn,
12522 handlePartialData = _ref8.handlePartialData;
12523 return function (event) {
12524 var request = event.target;
12525
12526 if (request.aborted) {
12527 return;
12528 } // don't support encrypted segments or fmp4 for now
12529
12530
12531 if (handlePartialData && !segment.key && // although responseText "should" exist, this guard serves to prevent an error being
12532 // thrown on the next check for two primary cases:
12533 // 1. the mime type override stops working, or is not implemented for a specific
12534 // browser
12535 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
12536 request.responseText && // in order to determine if it's an fmp4 we need at least 8 bytes
12537 request.responseText.length >= 8) {
12538 var newBytes = stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
12539
12540 if (segment.lastReachedChar || !containers.isLikelyFmp4MediaSegment(new Uint8Array(newBytes))) {
12541 segment.lastReachedChar = request.responseText.length;
12542 handleSegmentBytes({
12543 segment: segment,
12544 bytes: newBytes,
12545 isPartial: true,
12546 trackInfoFn: trackInfoFn,
12547 timingInfoFn: timingInfoFn,
12548 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12549 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12550 id3Fn: id3Fn,
12551 captionsFn: captionsFn,
12552 isEndOfTimeline: isEndOfTimeline,
12553 endedTimelineFn: endedTimelineFn,
12554 dataFn: dataFn
12555 });
12556 }
12557 }
12558
12559 segment.stats = videojs__default['default'].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
12560
12561 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
12562 segment.stats.firstBytesReceivedAt = Date.now();
12563 }
12564
12565 return progressFn(event, segment);
12566 };
12567};
12568/**
12569 * Load all resources and does any processing necessary for a media-segment
12570 *
12571 * Features:
12572 * decrypts the media-segment if it has a key uri and an iv
12573 * aborts *all* requests if *any* one request fails
12574 *
12575 * The segment object, at minimum, has the following format:
12576 * {
12577 * resolvedUri: String,
12578 * [transmuxer]: Object,
12579 * [byterange]: {
12580 * offset: Number,
12581 * length: Number
12582 * },
12583 * [key]: {
12584 * resolvedUri: String
12585 * [byterange]: {
12586 * offset: Number,
12587 * length: Number
12588 * },
12589 * iv: {
12590 * bytes: Uint32Array
12591 * }
12592 * },
12593 * [map]: {
12594 * resolvedUri: String,
12595 * [byterange]: {
12596 * offset: Number,
12597 * length: Number
12598 * },
12599 * [bytes]: Uint8Array
12600 * }
12601 * }
12602 * ...where [name] denotes optional properties
12603 *
12604 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
12605 * @param {Object} xhrOptions - the base options to provide to all xhr requests
12606 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
12607 * decryption routines
12608 * @param {Object} segment - a simplified copy of the segmentInfo object
12609 * from SegmentLoader
12610 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
12611 * aborted
12612 * @param {Function} progressFn - a callback that receives progress events from the main
12613 * segment's xhr request
12614 * @param {Function} trackInfoFn - a callback that receives track info
12615 * @param {Function} timingInfoFn - a callback that receives timing info
12616 * @param {Function} videoSegmentTimingInfoFn
12617 * a callback that receives video timing info based on media times and
12618 * any adjustments made by the transmuxer
12619 * @param {Function} audioSegmentTimingInfoFn
12620 * a callback that receives audio timing info based on media times and
12621 * any adjustments made by the transmuxer
12622 * @param {Function} id3Fn - a callback that receives ID3 metadata
12623 * @param {Function} captionsFn - a callback that receives captions
12624 * @param {boolean} isEndOfTimeline
12625 * true if this segment represents the last segment in a timeline
12626 * @param {Function} endedTimelineFn
12627 * a callback made when a timeline is ended, will only be called if
12628 * isEndOfTimeline is true
12629 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
12630 * request, transmuxed if needed
12631 * @param {Function} doneFn - a callback that is executed only once all requests have
12632 * succeeded or failed
12633 * @return {Function} a function that, when invoked, immediately aborts all
12634 * outstanding requests
12635 */
12636
12637
12638var mediaSegmentRequest = function mediaSegmentRequest(_ref9) {
12639 var xhr = _ref9.xhr,
12640 xhrOptions = _ref9.xhrOptions,
12641 decryptionWorker = _ref9.decryptionWorker,
12642 segment = _ref9.segment,
12643 abortFn = _ref9.abortFn,
12644 progressFn = _ref9.progressFn,
12645 trackInfoFn = _ref9.trackInfoFn,
12646 timingInfoFn = _ref9.timingInfoFn,
12647 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
12648 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
12649 id3Fn = _ref9.id3Fn,
12650 captionsFn = _ref9.captionsFn,
12651 isEndOfTimeline = _ref9.isEndOfTimeline,
12652 endedTimelineFn = _ref9.endedTimelineFn,
12653 dataFn = _ref9.dataFn,
12654 doneFn = _ref9.doneFn,
12655 handlePartialData = _ref9.handlePartialData;
12656 var activeXhrs = [];
12657 var finishProcessingFn = waitForCompletion({
12658 activeXhrs: activeXhrs,
12659 decryptionWorker: decryptionWorker,
12660 trackInfoFn: trackInfoFn,
12661 timingInfoFn: timingInfoFn,
12662 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12663 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12664 id3Fn: id3Fn,
12665 captionsFn: captionsFn,
12666 isEndOfTimeline: isEndOfTimeline,
12667 endedTimelineFn: endedTimelineFn,
12668 dataFn: dataFn,
12669 doneFn: doneFn
12670 }); // optionally, request the decryption key
12671
12672 if (segment.key && !segment.key.bytes) {
12673 var keyRequestOptions = videojs__default['default'].mergeOptions(xhrOptions, {
12674 uri: segment.key.resolvedUri,
12675 responseType: 'arraybuffer'
12676 });
12677 var keyRequestCallback = handleKeyResponse(segment, finishProcessingFn);
12678 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
12679 activeXhrs.push(keyXhr);
12680 } // optionally, request the associated media init segment
12681
12682
12683 if (segment.map && !segment.map.bytes) {
12684 var initSegmentOptions = videojs__default['default'].mergeOptions(xhrOptions, {
12685 uri: segment.map.resolvedUri,
12686 responseType: 'arraybuffer',
12687 headers: segmentXhrHeaders(segment.map)
12688 });
12689 var initSegmentRequestCallback = handleInitSegmentResponse({
12690 segment: segment,
12691 finishProcessingFn: finishProcessingFn
12692 });
12693 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
12694 activeXhrs.push(initSegmentXhr);
12695 }
12696
12697 var segmentRequestOptions = videojs__default['default'].mergeOptions(xhrOptions, {
12698 uri: segment.resolvedUri,
12699 responseType: 'arraybuffer',
12700 headers: segmentXhrHeaders(segment)
12701 });
12702
12703 if (handlePartialData) {
12704 // setting to text is required for partial responses
12705 // conversion to ArrayBuffer happens later
12706 segmentRequestOptions.responseType = 'text';
12707
12708 segmentRequestOptions.beforeSend = function (xhrObject) {
12709 // XHR binary charset opt by Marcus Granado 2006 [http://mgran.blogspot.com]
12710 // makes the browser pass through the "text" unparsed
12711 xhrObject.overrideMimeType('text/plain; charset=x-user-defined');
12712 };
12713 }
12714
12715 var segmentRequestCallback = handleSegmentResponse({
12716 segment: segment,
12717 finishProcessingFn: finishProcessingFn,
12718 responseType: segmentRequestOptions.responseType
12719 });
12720 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
12721 segmentXhr.addEventListener('progress', handleProgress({
12722 segment: segment,
12723 progressFn: progressFn,
12724 trackInfoFn: trackInfoFn,
12725 timingInfoFn: timingInfoFn,
12726 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
12727 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
12728 id3Fn: id3Fn,
12729 captionsFn: captionsFn,
12730 isEndOfTimeline: isEndOfTimeline,
12731 endedTimelineFn: endedTimelineFn,
12732 dataFn: dataFn,
12733 handlePartialData: handlePartialData
12734 }));
12735 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
12736 // multiple times, provide a shared state object
12737
12738 var loadendState = {};
12739 activeXhrs.forEach(function (activeXhr) {
12740 activeXhr.addEventListener('loadend', handleLoadEnd({
12741 loadendState: loadendState,
12742 abortFn: abortFn
12743 }));
12744 });
12745 return function () {
12746 return abortAll(activeXhrs);
12747 };
12748};
12749
12750/**
12751 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
12752 * codec strings, or translating codec strings into objects that can be examined.
12753 */
12754var logFn = logger('CodecUtils');
12755/**
12756 * Returns a set of codec strings parsed from the playlist or the default
12757 * codec strings if no codecs were specified in the playlist
12758 *
12759 * @param {Playlist} media the current media playlist
12760 * @return {Object} an object with the video and audio codecs
12761 */
12762
12763var getCodecs = function getCodecs(media) {
12764 // if the codecs were explicitly specified, use them instead of the
12765 // defaults
12766 var mediaAttributes = media.attributes || {};
12767
12768 if (mediaAttributes.CODECS) {
12769 return codecs_js.parseCodecs(mediaAttributes.CODECS);
12770 }
12771};
12772
12773var isMaat = function isMaat(master, media) {
12774 var mediaAttributes = media.attributes || {};
12775 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
12776};
12777var isMuxed = function isMuxed(master, media) {
12778 if (!isMaat(master, media)) {
12779 return true;
12780 }
12781
12782 var mediaAttributes = media.attributes || {};
12783 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
12784
12785 for (var groupId in audioGroup) {
12786 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
12787 // or there are listed playlists (the case for DASH, as the manifest will have already
12788 // provided all of the details necessary to generate the audio playlist, as opposed to
12789 // HLS' externally requested playlists), then the content is demuxed.
12790 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
12791 return true;
12792 }
12793 }
12794
12795 return false;
12796};
12797var unwrapCodecList = function unwrapCodecList(codecList) {
12798 var codecs = {};
12799 codecList.forEach(function (_ref) {
12800 var mediaType = _ref.mediaType,
12801 type = _ref.type,
12802 details = _ref.details;
12803 codecs[mediaType] = codecs[mediaType] || [];
12804 codecs[mediaType].push(codecs_js.translateLegacyCodec("" + type + details));
12805 });
12806 Object.keys(codecs).forEach(function (mediaType) {
12807 if (codecs[mediaType].length > 1) {
12808 logFn("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
12809 codecs[mediaType] = null;
12810 return;
12811 }
12812
12813 codecs[mediaType] = codecs[mediaType][0];
12814 });
12815 return codecs;
12816};
12817var codecCount = function codecCount(codecObj) {
12818 var count = 0;
12819
12820 if (codecObj.audio) {
12821 count++;
12822 }
12823
12824 if (codecObj.video) {
12825 count++;
12826 }
12827
12828 return count;
12829};
12830/**
12831 * Calculates the codec strings for a working configuration of
12832 * SourceBuffers to play variant streams in a master playlist. If
12833 * there is no possible working configuration, an empty object will be
12834 * returned.
12835 *
12836 * @param master {Object} the m3u8 object for the master playlist
12837 * @param media {Object} the m3u8 object for the variant playlist
12838 * @return {Object} the codec strings.
12839 *
12840 * @private
12841 */
12842
12843var codecsForPlaylist = function codecsForPlaylist(master, media) {
12844 var mediaAttributes = media.attributes || {};
12845 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
12846 // Put another way, there is no way to have a video-only multiple-audio HLS!
12847
12848 if (isMaat(master, media) && !codecInfo.audio) {
12849 if (!isMuxed(master, media)) {
12850 // It is possible for codecs to be specified on the audio media group playlist but
12851 // not on the rendition playlist. This is mostly the case for DASH, where audio and
12852 // video are always separate (and separately specified).
12853 var defaultCodecs = unwrapCodecList(codecs_js.codecsFromDefault(master, mediaAttributes.AUDIO) || []);
12854
12855 if (defaultCodecs.audio) {
12856 codecInfo.audio = defaultCodecs.audio;
12857 }
12858 }
12859 }
12860
12861 return codecInfo;
12862};
12863
12864var logFn$1 = logger('PlaylistSelector');
12865
12866var representationToString = function representationToString(representation) {
12867 if (!representation || !representation.playlist) {
12868 return;
12869 }
12870
12871 var playlist = representation.playlist;
12872 return JSON.stringify({
12873 id: playlist.id,
12874 bandwidth: representation.bandwidth,
12875 width: representation.width,
12876 height: representation.height,
12877 codecs: playlist.attributes && playlist.attributes.CODECS || ''
12878 });
12879}; // Utilities
12880
12881/**
12882 * Returns the CSS value for the specified property on an element
12883 * using `getComputedStyle`. Firefox has a long-standing issue where
12884 * getComputedStyle() may return null when running in an iframe with
12885 * `display: none`.
12886 *
12887 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
12888 * @param {HTMLElement} el the htmlelement to work on
12889 * @param {string} the proprety to get the style for
12890 */
12891
12892
12893var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
12894 if (!el) {
12895 return '';
12896 }
12897
12898 var result = window__default['default'].getComputedStyle(el);
12899
12900 if (!result) {
12901 return '';
12902 }
12903
12904 return result[property];
12905};
12906/**
12907 * Resuable stable sort function
12908 *
12909 * @param {Playlists} array
12910 * @param {Function} sortFn Different comparators
12911 * @function stableSort
12912 */
12913
12914
12915var stableSort = function stableSort(array, sortFn) {
12916 var newArray = array.slice();
12917 array.sort(function (left, right) {
12918 var cmp = sortFn(left, right);
12919
12920 if (cmp === 0) {
12921 return newArray.indexOf(left) - newArray.indexOf(right);
12922 }
12923
12924 return cmp;
12925 });
12926};
12927/**
12928 * A comparator function to sort two playlist object by bandwidth.
12929 *
12930 * @param {Object} left a media playlist object
12931 * @param {Object} right a media playlist object
12932 * @return {number} Greater than zero if the bandwidth attribute of
12933 * left is greater than the corresponding attribute of right. Less
12934 * than zero if the bandwidth of right is greater than left and
12935 * exactly zero if the two are equal.
12936 */
12937
12938
12939var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
12940 var leftBandwidth;
12941 var rightBandwidth;
12942
12943 if (left.attributes.BANDWIDTH) {
12944 leftBandwidth = left.attributes.BANDWIDTH;
12945 }
12946
12947 leftBandwidth = leftBandwidth || window__default['default'].Number.MAX_VALUE;
12948
12949 if (right.attributes.BANDWIDTH) {
12950 rightBandwidth = right.attributes.BANDWIDTH;
12951 }
12952
12953 rightBandwidth = rightBandwidth || window__default['default'].Number.MAX_VALUE;
12954 return leftBandwidth - rightBandwidth;
12955};
12956/**
12957 * A comparator function to sort two playlist object by resolution (width).
12958 *
12959 * @param {Object} left a media playlist object
12960 * @param {Object} right a media playlist object
12961 * @return {number} Greater than zero if the resolution.width attribute of
12962 * left is greater than the corresponding attribute of right. Less
12963 * than zero if the resolution.width of right is greater than left and
12964 * exactly zero if the two are equal.
12965 */
12966
12967var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
12968 var leftWidth;
12969 var rightWidth;
12970
12971 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
12972 leftWidth = left.attributes.RESOLUTION.width;
12973 }
12974
12975 leftWidth = leftWidth || window__default['default'].Number.MAX_VALUE;
12976
12977 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
12978 rightWidth = right.attributes.RESOLUTION.width;
12979 }
12980
12981 rightWidth = rightWidth || window__default['default'].Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
12982 // have the same media dimensions/ resolution
12983
12984 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
12985 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
12986 }
12987
12988 return leftWidth - rightWidth;
12989};
12990/**
12991 * Chooses the appropriate media playlist based on bandwidth and player size
12992 *
12993 * @param {Object} master
12994 * Object representation of the master manifest
12995 * @param {number} playerBandwidth
12996 * Current calculated bandwidth of the player
12997 * @param {number} playerWidth
12998 * Current width of the player element (should account for the device pixel ratio)
12999 * @param {number} playerHeight
13000 * Current height of the player element (should account for the device pixel ratio)
13001 * @param {boolean} limitRenditionByPlayerDimensions
13002 * True if the player width and height should be used during the selection, false otherwise
13003 * @return {Playlist} the highest bitrate playlist less than the
13004 * currently detected bandwidth, accounting for some amount of
13005 * bandwidth variance
13006 */
13007
13008var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions) {
13009 // If we end up getting called before `master` is available, exit early
13010 if (!master) {
13011 return;
13012 }
13013
13014 var options = {
13015 bandwidth: playerBandwidth,
13016 width: playerWidth,
13017 height: playerHeight,
13018 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
13019 }; // convert the playlists to an intermediary representation to make comparisons easier
13020
13021 var sortedPlaylistReps = master.playlists.map(function (playlist) {
13022 var bandwidth;
13023 var width = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
13024 var height = playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
13025 bandwidth = playlist.attributes.BANDWIDTH;
13026 bandwidth = bandwidth || window__default['default'].Number.MAX_VALUE;
13027 return {
13028 bandwidth: bandwidth,
13029 width: width,
13030 height: height,
13031 playlist: playlist
13032 };
13033 });
13034 stableSort(sortedPlaylistReps, function (left, right) {
13035 return left.bandwidth - right.bandwidth;
13036 }); // filter out any playlists that have been excluded due to
13037 // incompatible configurations
13038
13039 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
13040 return !Playlist.isIncompatible(rep.playlist);
13041 }); // filter out any playlists that have been disabled manually through the representations
13042 // api or blacklisted temporarily due to playback errors.
13043
13044 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
13045 return Playlist.isEnabled(rep.playlist);
13046 });
13047
13048 if (!enabledPlaylistReps.length) {
13049 // if there are no enabled playlists, then they have all been blacklisted or disabled
13050 // by the user through the representations api. In this case, ignore blacklisting and
13051 // fallback to what the user wants by using playlists the user has not disabled.
13052 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
13053 return !Playlist.isDisabled(rep.playlist);
13054 });
13055 } // filter out any variant that has greater effective bitrate
13056 // than the current estimated bandwidth
13057
13058
13059 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
13060 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
13061 });
13062 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
13063 // and then taking the very first element
13064
13065 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
13066 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
13067 })[0]; // if we're not going to limit renditions by player size, make an early decision.
13068
13069 if (limitRenditionByPlayerDimensions === false) {
13070 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
13071
13072 if (_chosenRep && _chosenRep.playlist) {
13073 var type = 'sortedPlaylistReps';
13074
13075 if (bandwidthBestRep) {
13076 type = 'bandwidthBestRep';
13077 }
13078
13079 if (enabledPlaylistReps[0]) {
13080 type = 'enabledPlaylistReps';
13081 }
13082
13083 logFn$1("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
13084 return _chosenRep.playlist;
13085 }
13086
13087 logFn$1('could not choose a playlist with options', options);
13088 return null;
13089 } // filter out playlists without resolution information
13090
13091
13092 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
13093 return rep.width && rep.height;
13094 }); // sort variants by resolution
13095
13096 stableSort(haveResolution, function (left, right) {
13097 return left.width - right.width;
13098 }); // if we have the exact resolution as the player use it
13099
13100 var resolutionBestRepList = haveResolution.filter(function (rep) {
13101 return rep.width === playerWidth && rep.height === playerHeight;
13102 });
13103 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
13104
13105 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
13106 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
13107 })[0];
13108 var resolutionPlusOneList;
13109 var resolutionPlusOneSmallest;
13110 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
13111 // if there is no match of exact resolution
13112
13113 if (!resolutionBestRep) {
13114 resolutionPlusOneList = haveResolution.filter(function (rep) {
13115 return rep.width > playerWidth || rep.height > playerHeight;
13116 }); // find all the variants have the same smallest resolution
13117
13118 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
13119 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
13120 }); // ensure that we also pick the highest bandwidth variant that
13121 // is just-larger-than the video player
13122
13123 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
13124 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
13125 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
13126 })[0];
13127 } // fallback chain of variants
13128
13129
13130 var chosenRep = resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
13131
13132 if (chosenRep && chosenRep.playlist) {
13133 var _type = 'sortedPlaylistReps';
13134
13135 if (resolutionPlusOneRep) {
13136 _type = 'resolutionPlusOneRep';
13137 } else if (resolutionBestRep) {
13138 _type = 'resolutionBestRep';
13139 } else if (bandwidthBestRep) {
13140 _type = 'bandwidthBestRep';
13141 } else if (enabledPlaylistReps[0]) {
13142 _type = 'enabledPlaylistReps';
13143 }
13144
13145 logFn$1("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
13146 return chosenRep.playlist;
13147 }
13148
13149 logFn$1('could not choose a playlist with options', options);
13150 return null;
13151}; // Playlist Selectors
13152
13153/**
13154 * Chooses the appropriate media playlist based on the most recent
13155 * bandwidth estimate and the player size.
13156 *
13157 * Expects to be called within the context of an instance of VhsHandler
13158 *
13159 * @return {Playlist} the highest bitrate playlist less than the
13160 * currently detected bandwidth, accounting for some amount of
13161 * bandwidth variance
13162 */
13163
13164var lastBandwidthSelector = function lastBandwidthSelector() {
13165 var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
13166 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions);
13167};
13168/**
13169 * Chooses the appropriate media playlist based on an
13170 * exponential-weighted moving average of the bandwidth after
13171 * filtering for player size.
13172 *
13173 * Expects to be called within the context of an instance of VhsHandler
13174 *
13175 * @param {number} decay - a number between 0 and 1. Higher values of
13176 * this parameter will cause previous bandwidth estimates to lose
13177 * significance more quickly.
13178 * @return {Function} a function which can be invoked to create a new
13179 * playlist selector function.
13180 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
13181 */
13182
13183var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
13184 var average = -1;
13185
13186 if (decay < 0 || decay > 1) {
13187 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
13188 }
13189
13190 return function () {
13191 var pixelRatio = this.useDevicePixelRatio ? window__default['default'].devicePixelRatio || 1 : 1;
13192
13193 if (average < 0) {
13194 average = this.systemBandwidth;
13195 }
13196
13197 average = decay * this.systemBandwidth + (1 - decay) * average;
13198 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions);
13199 };
13200};
13201/**
13202 * Chooses the appropriate media playlist based on the potential to rebuffer
13203 *
13204 * @param {Object} settings
13205 * Object of information required to use this selector
13206 * @param {Object} settings.master
13207 * Object representation of the master manifest
13208 * @param {number} settings.currentTime
13209 * The current time of the player
13210 * @param {number} settings.bandwidth
13211 * Current measured bandwidth
13212 * @param {number} settings.duration
13213 * Duration of the media
13214 * @param {number} settings.segmentDuration
13215 * Segment duration to be used in round trip time calculations
13216 * @param {number} settings.timeUntilRebuffer
13217 * Time left in seconds until the player has to rebuffer
13218 * @param {number} settings.currentTimeline
13219 * The current timeline segments are being loaded from
13220 * @param {SyncController} settings.syncController
13221 * SyncController for determining if we have a sync point for a given playlist
13222 * @return {Object|null}
13223 * {Object} return.playlist
13224 * The highest bandwidth playlist with the least amount of rebuffering
13225 * {Number} return.rebufferingImpact
13226 * The amount of time in seconds switching to this playlist will rebuffer. A
13227 * negative value means that switching will cause zero rebuffering.
13228 */
13229
13230var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
13231 var master = settings.master,
13232 currentTime = settings.currentTime,
13233 bandwidth = settings.bandwidth,
13234 duration = settings.duration,
13235 segmentDuration = settings.segmentDuration,
13236 timeUntilRebuffer = settings.timeUntilRebuffer,
13237 currentTimeline = settings.currentTimeline,
13238 syncController = settings.syncController; // filter out any playlists that have been excluded due to
13239 // incompatible configurations
13240
13241 var compatiblePlaylists = master.playlists.filter(function (playlist) {
13242 return !Playlist.isIncompatible(playlist);
13243 }); // filter out any playlists that have been disabled manually through the representations
13244 // api or blacklisted temporarily due to playback errors.
13245
13246 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
13247
13248 if (!enabledPlaylists.length) {
13249 // if there are no enabled playlists, then they have all been blacklisted or disabled
13250 // by the user through the representations api. In this case, ignore blacklisting and
13251 // fallback to what the user wants by using playlists the user has not disabled.
13252 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
13253 return !Playlist.isDisabled(playlist);
13254 });
13255 }
13256
13257 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
13258 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
13259 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
13260 // sync request first. This will double the request time
13261
13262 var numRequests = syncPoint ? 1 : 2;
13263 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
13264 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
13265 return {
13266 playlist: playlist,
13267 rebufferingImpact: rebufferingImpact
13268 };
13269 });
13270 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
13271 return estimate.rebufferingImpact <= 0;
13272 }); // Sort by bandwidth DESC
13273
13274 stableSort(noRebufferingPlaylists, function (a, b) {
13275 return comparePlaylistBandwidth(b.playlist, a.playlist);
13276 });
13277
13278 if (noRebufferingPlaylists.length) {
13279 return noRebufferingPlaylists[0];
13280 }
13281
13282 stableSort(rebufferingEstimates, function (a, b) {
13283 return a.rebufferingImpact - b.rebufferingImpact;
13284 });
13285 return rebufferingEstimates[0] || null;
13286};
13287/**
13288 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
13289 * one with video. If no renditions with video exist, return the lowest audio rendition.
13290 *
13291 * Expects to be called within the context of an instance of VhsHandler
13292 *
13293 * @return {Object|null}
13294 * {Object} return.playlist
13295 * The lowest bitrate playlist that contains a video codec. If no such rendition
13296 * exists pick the lowest audio rendition.
13297 */
13298
13299var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
13300 var _this = this;
13301
13302 // filter out any playlists that have been excluded due to
13303 // incompatible configurations or playback errors
13304 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
13305
13306 stableSort(playlists, function (a, b) {
13307 return comparePlaylistBandwidth(a, b);
13308 }); // Parse and assume that playlists with no video codec have no video
13309 // (this is not necessarily true, although it is generally true).
13310 //
13311 // If an entire manifest has no valid videos everything will get filtered
13312 // out.
13313
13314 var playlistsWithVideo = playlists.filter(function (playlist) {
13315 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
13316 });
13317 return playlistsWithVideo[0] || null;
13318};
13319
13320/**
13321 * @file text-tracks.js
13322 */
13323/**
13324 * Create captions text tracks on video.js if they do not exist
13325 *
13326 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
13327 * @param {Object} tech the video.js tech
13328 * @param {Object} captionStream the caption stream to create
13329 * @private
13330 */
13331
13332var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
13333 if (!inbandTextTracks[captionStream]) {
13334 tech.trigger({
13335 type: 'usage',
13336 name: 'vhs-608'
13337 });
13338 tech.trigger({
13339 type: 'usage',
13340 name: 'hls-608'
13341 });
13342 var track = tech.textTracks().getTrackById(captionStream);
13343
13344 if (track) {
13345 // Resuse an existing track with a CC# id because this was
13346 // very likely created by videojs-contrib-hls from information
13347 // in the m3u8 for us to use
13348 inbandTextTracks[captionStream] = track;
13349 } else {
13350 // Otherwise, create a track with the default `CC#` label and
13351 // without a language
13352 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
13353 kind: 'captions',
13354 id: captionStream,
13355 label: captionStream
13356 }, false).track;
13357 }
13358 }
13359};
13360/**
13361 * Add caption text track data to a source handler given an array of captions
13362 *
13363 * @param {Object}
13364 * @param {Object} inbandTextTracks the inband text tracks
13365 * @param {number} timestampOffset the timestamp offset of the source buffer
13366 * @param {Array} captionArray an array of caption data
13367 * @private
13368 */
13369
13370var addCaptionData = function addCaptionData(_ref) {
13371 var inbandTextTracks = _ref.inbandTextTracks,
13372 captionArray = _ref.captionArray,
13373 timestampOffset = _ref.timestampOffset;
13374
13375 if (!captionArray) {
13376 return;
13377 }
13378
13379 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
13380 captionArray.forEach(function (caption) {
13381 var track = caption.stream;
13382 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
13383 });
13384};
13385/**
13386 * Define properties on a cue for backwards compatability,
13387 * but warn the user that the way that they are using it
13388 * is depricated and will be removed at a later date.
13389 *
13390 * @param {Cue} cue the cue to add the properties on
13391 * @private
13392 */
13393
13394var deprecateOldCue = function deprecateOldCue(cue) {
13395 Object.defineProperties(cue.frame, {
13396 id: {
13397 get: function get() {
13398 videojs__default['default'].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
13399 return cue.value.key;
13400 }
13401 },
13402 value: {
13403 get: function get() {
13404 videojs__default['default'].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
13405 return cue.value.data;
13406 }
13407 },
13408 privateData: {
13409 get: function get() {
13410 videojs__default['default'].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
13411 return cue.value.data;
13412 }
13413 }
13414 });
13415};
13416/**
13417 * Add metadata text track data to a source handler given an array of metadata
13418 *
13419 * @param {Object}
13420 * @param {Object} inbandTextTracks the inband text tracks
13421 * @param {Array} metadataArray an array of meta data
13422 * @param {number} timestampOffset the timestamp offset of the source buffer
13423 * @param {number} videoDuration the duration of the video
13424 * @private
13425 */
13426
13427
13428var addMetadata = function addMetadata(_ref2) {
13429 var inbandTextTracks = _ref2.inbandTextTracks,
13430 metadataArray = _ref2.metadataArray,
13431 timestampOffset = _ref2.timestampOffset,
13432 videoDuration = _ref2.videoDuration;
13433
13434 if (!metadataArray) {
13435 return;
13436 }
13437
13438 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
13439 var metadataTrack = inbandTextTracks.metadataTrack_;
13440
13441 if (!metadataTrack) {
13442 return;
13443 }
13444
13445 metadataArray.forEach(function (metadata) {
13446 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
13447 // ignore this bit of metadata.
13448 // This likely occurs when you have an non-timed ID3 tag like TIT2,
13449 // which is the "Title/Songname/Content description" frame
13450
13451 if (typeof time !== 'number' || window__default['default'].isNaN(time) || time < 0 || !(time < Infinity)) {
13452 return;
13453 }
13454
13455 metadata.frames.forEach(function (frame) {
13456 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
13457 cue.frame = frame;
13458 cue.value = frame;
13459 deprecateOldCue(cue);
13460 metadataTrack.addCue(cue);
13461 });
13462 });
13463
13464 if (!metadataTrack.cues || !metadataTrack.cues.length) {
13465 return;
13466 } // Updating the metadeta cues so that
13467 // the endTime of each cue is the startTime of the next cue
13468 // the endTime of last cue is the duration of the video
13469
13470
13471 var cues = metadataTrack.cues;
13472 var cuesArray = []; // Create a copy of the TextTrackCueList...
13473 // ...disregarding cues with a falsey value
13474
13475 for (var i = 0; i < cues.length; i++) {
13476 if (cues[i]) {
13477 cuesArray.push(cues[i]);
13478 }
13479 } // Group cues by their startTime value
13480
13481
13482 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
13483 var timeSlot = obj[cue.startTime] || [];
13484 timeSlot.push(cue);
13485 obj[cue.startTime] = timeSlot;
13486 return obj;
13487 }, {}); // Sort startTimes by ascending order
13488
13489 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
13490 return Number(a) - Number(b);
13491 }); // Map each cue group's endTime to the next group's startTime
13492
13493 sortedStartTimes.forEach(function (startTime, idx) {
13494 var cueGroup = cuesGroupedByStartTime[startTime];
13495 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
13496
13497 cueGroup.forEach(function (cue) {
13498 cue.endTime = nextTime;
13499 });
13500 });
13501};
13502/**
13503 * Create metadata text track on video.js if it does not exist
13504 *
13505 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
13506 * @param {string} dispatchType the inband metadata track dispatch type
13507 * @param {Object} tech the video.js tech
13508 * @private
13509 */
13510
13511var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
13512 if (inbandTextTracks.metadataTrack_) {
13513 return;
13514 }
13515
13516 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
13517 kind: 'metadata',
13518 label: 'Timed Metadata'
13519 }, false).track;
13520 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
13521};
13522/**
13523 * Remove cues from a track on video.js.
13524 *
13525 * @param {Double} start start of where we should remove the cue
13526 * @param {Double} end end of where the we should remove the cue
13527 * @param {Object} track the text track to remove the cues from
13528 * @private
13529 */
13530
13531var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
13532 var i;
13533 var cue;
13534
13535 if (!track) {
13536 return;
13537 }
13538
13539 if (!track.cues) {
13540 return;
13541 }
13542
13543 i = track.cues.length;
13544
13545 while (i--) {
13546 cue = track.cues[i]; // Remove any cue within the provided start and end time
13547
13548 if (cue.startTime >= start && cue.endTime <= end) {
13549 track.removeCue(cue);
13550 }
13551 }
13552};
13553/**
13554 * Remove duplicate cues from a track on video.js (a cue is considered a
13555 * duplicate if it has the same time interval and text as another)
13556 *
13557 * @param {Object} track the text track to remove the duplicate cues from
13558 * @private
13559 */
13560
13561var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
13562 var cues = track.cues;
13563
13564 if (!cues) {
13565 return;
13566 }
13567
13568 for (var i = 0; i < cues.length; i++) {
13569 var duplicates = [];
13570 var occurrences = 0;
13571
13572 for (var j = 0; j < cues.length; j++) {
13573 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
13574 occurrences++;
13575
13576 if (occurrences > 1) {
13577 duplicates.push(cues[j]);
13578 }
13579 }
13580 }
13581
13582 if (duplicates.length) {
13583 duplicates.forEach(function (dupe) {
13584 return track.removeCue(dupe);
13585 });
13586 }
13587 }
13588};
13589
13590/**
13591 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
13592 * front of current time.
13593 *
13594 * @param {Array} buffer
13595 * The current buffer of gop information
13596 * @param {number} currentTime
13597 * The current time
13598 * @param {Double} mapping
13599 * Offset to map display time to stream presentation time
13600 * @return {Array}
13601 * List of gops considered safe to append over
13602 */
13603
13604var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
13605 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
13606 return [];
13607 } // pts value for current time + 3 seconds to give a bit more wiggle room
13608
13609
13610 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
13611 var i;
13612
13613 for (i = 0; i < buffer.length; i++) {
13614 if (buffer[i].pts > currentTimePts) {
13615 break;
13616 }
13617 }
13618
13619 return buffer.slice(i);
13620};
13621/**
13622 * Appends gop information (timing and byteLength) received by the transmuxer for the
13623 * gops appended in the last call to appendBuffer
13624 *
13625 * @param {Array} buffer
13626 * The current buffer of gop information
13627 * @param {Array} gops
13628 * List of new gop information
13629 * @param {boolean} replace
13630 * If true, replace the buffer with the new gop information. If false, append the
13631 * new gop information to the buffer in the right location of time.
13632 * @return {Array}
13633 * Updated list of gop information
13634 */
13635
13636var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
13637 if (!gops.length) {
13638 return buffer;
13639 }
13640
13641 if (replace) {
13642 // If we are in safe append mode, then completely overwrite the gop buffer
13643 // with the most recent appeneded data. This will make sure that when appending
13644 // future segments, we only try to align with gops that are both ahead of current
13645 // time and in the last segment appended.
13646 return gops.slice();
13647 }
13648
13649 var start = gops[0].pts;
13650 var i = 0;
13651
13652 for (i; i < buffer.length; i++) {
13653 if (buffer[i].pts >= start) {
13654 break;
13655 }
13656 }
13657
13658 return buffer.slice(0, i).concat(gops);
13659};
13660/**
13661 * Removes gop information in buffer that overlaps with provided start and end
13662 *
13663 * @param {Array} buffer
13664 * The current buffer of gop information
13665 * @param {Double} start
13666 * position to start the remove at
13667 * @param {Double} end
13668 * position to end the remove at
13669 * @param {Double} mapping
13670 * Offset to map display time to stream presentation time
13671 */
13672
13673var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
13674 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
13675 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
13676 var updatedBuffer = buffer.slice();
13677 var i = buffer.length;
13678
13679 while (i--) {
13680 if (buffer[i].pts <= endPts) {
13681 break;
13682 }
13683 }
13684
13685 if (i === -1) {
13686 // no removal because end of remove range is before start of buffer
13687 return updatedBuffer;
13688 }
13689
13690 var j = i + 1;
13691
13692 while (j--) {
13693 if (buffer[j].pts <= startPts) {
13694 break;
13695 }
13696 } // clamp remove range start to 0 index
13697
13698
13699 j = Math.max(j, 0);
13700 updatedBuffer.splice(j, i - j + 1);
13701 return updatedBuffer;
13702};
13703
13704var shallowEqual = function shallowEqual(a, b) {
13705 // if both are undefined
13706 // or one or the other is undefined
13707 // they are not equal
13708 if (!a && !b || !a && b || a && !b) {
13709 return false;
13710 } // they are the same object and thus, equal
13711
13712
13713 if (a === b) {
13714 return true;
13715 } // sort keys so we can make sure they have
13716 // all the same keys later.
13717
13718
13719 var akeys = Object.keys(a).sort();
13720 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
13721
13722 if (akeys.length !== bkeys.length) {
13723 return false;
13724 }
13725
13726 for (var i = 0; i < akeys.length; i++) {
13727 var key = akeys[i]; // different sorted keys, not equal
13728
13729 if (key !== bkeys[i]) {
13730 return false;
13731 } // different values, not equal
13732
13733
13734 if (a[key] !== b[key]) {
13735 return false;
13736 }
13737 }
13738
13739 return true;
13740};
13741
13742var CHECK_BUFFER_DELAY = 500;
13743
13744var finite = function finite(num) {
13745 return typeof num === 'number' && isFinite(num);
13746}; // With most content hovering around 30fps, if a segment has a duration less than a half
13747// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
13748// not accurately reflect the rest of the content.
13749
13750
13751var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
13752var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
13753 // Although these checks should most likely cover non 'main' types, for now it narrows
13754 // the scope of our checks.
13755 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
13756 return null;
13757 }
13758
13759 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
13760 return 'Neither audio nor video found in segment.';
13761 }
13762
13763 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
13764 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
13765 }
13766
13767 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
13768 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
13769 }
13770
13771 return null;
13772};
13773/**
13774 * Calculates a time value that is safe to remove from the back buffer without interrupting
13775 * playback.
13776 *
13777 * @param {TimeRange} seekable
13778 * The current seekable range
13779 * @param {number} currentTime
13780 * The current time of the player
13781 * @param {number} targetDuration
13782 * The target duration of the current playlist
13783 * @return {number}
13784 * Time that is safe to remove from the back buffer without interrupting playback
13785 */
13786
13787var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
13788 // 30 seconds before the playhead provides a safe default for trimming.
13789 //
13790 // Choosing a reasonable default is particularly important for high bitrate content and
13791 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
13792 // throw an APPEND_BUFFER_ERR.
13793 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
13794
13795 if (seekable.length) {
13796 // Some live playlists may have a shorter window of content than the full allowed back
13797 // buffer. For these playlists, don't save content that's no longer within the window.
13798 trimTime = Math.max(trimTime, seekable.start(0));
13799 } // Don't remove within target duration of the current time to avoid the possibility of
13800 // removing the GOP currently being played, as removing it can cause playback stalls.
13801
13802
13803 var maxTrimTime = currentTime - targetDuration;
13804 return Math.min(maxTrimTime, trimTime);
13805};
13806
13807var segmentInfoString = function segmentInfoString(segmentInfo) {
13808 var _segmentInfo$segment = segmentInfo.segment,
13809 start = _segmentInfo$segment.start,
13810 end = _segmentInfo$segment.end,
13811 _segmentInfo$playlist = segmentInfo.playlist,
13812 seq = _segmentInfo$playlist.mediaSequence,
13813 id = _segmentInfo$playlist.id,
13814 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
13815 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
13816 index = segmentInfo.mediaIndex,
13817 timeline = segmentInfo.timeline;
13818 return ["appending [" + index + "] of [" + seq + ", " + (seq + segments.length) + "] from playlist [" + id + "]", "[" + start + " => " + end + "] in timeline [" + timeline + "]"].join(' ');
13819};
13820
13821var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
13822 return mediaType + "TimingInfo";
13823};
13824/**
13825 * Returns the timestamp offset to use for the segment.
13826 *
13827 * @param {number} segmentTimeline
13828 * The timeline of the segment
13829 * @param {number} currentTimeline
13830 * The timeline currently being followed by the loader
13831 * @param {number} startOfSegment
13832 * The estimated segment start
13833 * @param {TimeRange[]} buffered
13834 * The loader's buffer
13835 * @param {boolean} overrideCheck
13836 * If true, no checks are made to see if the timestamp offset value should be set,
13837 * but sets it directly to a value.
13838 *
13839 * @return {number|null}
13840 * Either a number representing a new timestamp offset, or null if the segment is
13841 * part of the same timeline
13842 */
13843
13844
13845var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
13846 var segmentTimeline = _ref.segmentTimeline,
13847 currentTimeline = _ref.currentTimeline,
13848 startOfSegment = _ref.startOfSegment,
13849 buffered = _ref.buffered,
13850 overrideCheck = _ref.overrideCheck;
13851
13852 // Check to see if we are crossing a discontinuity to see if we need to set the
13853 // timestamp offset on the transmuxer and source buffer.
13854 //
13855 // Previously, we changed the timestampOffset if the start of this segment was less than
13856 // the currently set timestampOffset, but this isn't desirable as it can produce bad
13857 // behavior, especially around long running live streams.
13858 if (!overrideCheck && segmentTimeline === currentTimeline) {
13859 return null;
13860 } // When changing renditions, it's possible to request a segment on an older timeline. For
13861 // instance, given two renditions with the following:
13862 //
13863 // #EXTINF:10
13864 // segment1
13865 // #EXT-X-DISCONTINUITY
13866 // #EXTINF:10
13867 // segment2
13868 // #EXTINF:10
13869 // segment3
13870 //
13871 // And the current player state:
13872 //
13873 // current time: 8
13874 // buffer: 0 => 20
13875 //
13876 // The next segment on the current rendition would be segment3, filling the buffer from
13877 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
13878 // then the next segment to be requested will be segment1 from the new rendition in
13879 // order to fill time 8 and onwards. Using the buffered end would result in repeated
13880 // content (since it would position segment1 of the new rendition starting at 20s). This
13881 // case can be identified when the new segment's timeline is a prior value. Instead of
13882 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
13883 // more accurate to the actual start time of the segment.
13884
13885
13886 if (segmentTimeline < currentTimeline) {
13887 return startOfSegment;
13888 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
13889 // value uses the end of the last segment if it is available. While this value
13890 // should often be correct, it's better to rely on the buffered end, as the new
13891 // content post discontinuity should line up with the buffered end as if it were
13892 // time 0 for the new content.
13893
13894
13895 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
13896};
13897/**
13898 * Returns whether or not the loader should wait for a timeline change from the timeline
13899 * change controller before processing the segment.
13900 *
13901 * Primary timing in VHS goes by video. This is different from most media players, as
13902 * audio is more often used as the primary timing source. For the foreseeable future, VHS
13903 * will continue to use video as the primary timing source, due to the current logic and
13904 * expectations built around it.
13905
13906 * Since the timing follows video, in order to maintain sync, the video loader is
13907 * responsible for setting both audio and video source buffer timestamp offsets.
13908 *
13909 * Setting different values for audio and video source buffers could lead to
13910 * desyncing. The following examples demonstrate some of the situations where this
13911 * distinction is important. Note that all of these cases involve demuxed content. When
13912 * content is muxed, the audio and video are packaged together, therefore syncing
13913 * separate media playlists is not an issue.
13914 *
13915 * CASE 1: Audio prepares to load a new timeline before video:
13916 *
13917 * Timeline: 0 1
13918 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
13919 * Audio Loader: ^
13920 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
13921 * Video Loader ^
13922 *
13923 * In the above example, the audio loader is preparing to load the 6th segment, the first
13924 * after a discontinuity, while the video loader is still loading the 5th segment, before
13925 * the discontinuity.
13926 *
13927 * If the audio loader goes ahead and loads and appends the 6th segment before the video
13928 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
13929 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
13930 * the audio loader must provide the audioAppendStart value to trim the content in the
13931 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
13932 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
13933 * segment until that value is provided.
13934 *
13935 * CASE 2: Video prepares to load a new timeline before audio:
13936 *
13937 * Timeline: 0 1
13938 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
13939 * Audio Loader: ^
13940 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
13941 * Video Loader ^
13942 *
13943 * In the above example, the video loader is preparing to load the 6th segment, the first
13944 * after a discontinuity, while the audio loader is still loading the 5th segment, before
13945 * the discontinuity.
13946 *
13947 * If the video loader goes ahead and loads and appends the 6th segment, then once the
13948 * segment is loaded and processed, both the video and audio timestamp offsets will be
13949 * set, since video is used as the primary timing source. This is to ensure content lines
13950 * up appropriately, as any modifications to the video timing are reflected by audio when
13951 * the video loader sets the audio and video timestamp offsets to the same value. However,
13952 * setting the timestamp offset for audio before audio has had a chance to change
13953 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
13954 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
13955 *
13956 * CASE 3: When seeking, audio prepares to load a new timeline before video
13957 *
13958 * Timeline: 0 1
13959 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
13960 * Audio Loader: ^
13961 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
13962 * Video Loader ^
13963 *
13964 * In the above example, both audio and video loaders are loading segments from timeline
13965 * 0, but imagine that the seek originated from timeline 1.
13966 *
13967 * When seeking to a new timeline, the timestamp offset will be set based on the expected
13968 * segment start of the loaded video segment. In order to maintain sync, the audio loader
13969 * must wait for the video loader to load its segment and update both the audio and video
13970 * timestamp offsets before it may load and append its own segment. This is the case
13971 * whether the seek results in a mismatched segment request (e.g., the audio loader
13972 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
13973 * loaders choose to load the same segment index from each playlist, as the segments may
13974 * not be aligned perfectly, even for matching segment indexes.
13975 *
13976 * @param {Object} timelinechangeController
13977 * @param {number} currentTimeline
13978 * The timeline currently being followed by the loader
13979 * @param {number} segmentTimeline
13980 * The timeline of the segment being loaded
13981 * @param {('main'|'audio')} loaderType
13982 * The loader type
13983 * @param {boolean} audioDisabled
13984 * Whether the audio is disabled for the loader. This should only be true when the
13985 * loader may have muxed audio in its segment, but should not append it, e.g., for
13986 * the main loader when an alternate audio playlist is active.
13987 *
13988 * @return {boolean}
13989 * Whether the loader should wait for a timeline change from the timeline change
13990 * controller before processing the segment
13991 */
13992
13993var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
13994 var timelineChangeController = _ref2.timelineChangeController,
13995 currentTimeline = _ref2.currentTimeline,
13996 segmentTimeline = _ref2.segmentTimeline,
13997 loaderType = _ref2.loaderType,
13998 audioDisabled = _ref2.audioDisabled;
13999
14000 if (currentTimeline === segmentTimeline) {
14001 return false;
14002 }
14003
14004 if (loaderType === 'audio') {
14005 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
14006 type: 'main'
14007 }); // Audio loader should wait if:
14008 //
14009 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
14010 // * main hasn't yet changed to the timeline audio is looking to load
14011
14012 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
14013 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
14014 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
14015 // loader's segments (or the content is audio/video only and handled by the main
14016 // loader).
14017
14018
14019 if (loaderType === 'main' && audioDisabled) {
14020 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
14021 type: 'audio'
14022 }); // Main loader should wait for the audio loader if audio is not pending a timeline
14023 // change to the current timeline.
14024 //
14025 // Since the main loader is responsible for setting the timestamp offset for both
14026 // audio and video, the main loader must wait for audio to be about to change to its
14027 // timeline before setting the offset, otherwise, if audio is behind in loading,
14028 // segments from the previous timeline would be adjusted by the new timestamp offset.
14029 //
14030 // This requirement means that video will not cross a timeline until the audio is
14031 // about to cross to it, so that way audio and video will always cross the timeline
14032 // together.
14033 //
14034 // In addition to normal timeline changes, these rules also apply to the start of a
14035 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
14036 // that these rules apply to the first timeline change because if they did not, it's
14037 // possible that the main loader will cross two timelines before the audio loader has
14038 // crossed one. Logic may be implemented to handle the startup as a special case, but
14039 // it's easier to simply treat all timeline changes the same.
14040
14041 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
14042 return false;
14043 }
14044
14045 return true;
14046 }
14047
14048 return false;
14049};
14050var mediaDuration = function mediaDuration(audioTimingInfo, videoTimingInfo) {
14051 var audioDuration = audioTimingInfo && typeof audioTimingInfo.start === 'number' && typeof audioTimingInfo.end === 'number' ? audioTimingInfo.end - audioTimingInfo.start : 0;
14052 var videoDuration = videoTimingInfo && typeof videoTimingInfo.start === 'number' && typeof videoTimingInfo.end === 'number' ? videoTimingInfo.end - videoTimingInfo.start : 0;
14053 return Math.max(audioDuration, videoDuration);
14054};
14055var segmentTooLong = function segmentTooLong(_ref3) {
14056 var segmentDuration = _ref3.segmentDuration,
14057 maxDuration = _ref3.maxDuration;
14058
14059 // 0 duration segments are most likely due to metadata only segments or a lack of
14060 // information.
14061 if (!segmentDuration) {
14062 return false;
14063 } // For HLS:
14064 //
14065 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
14066 // The EXTINF duration of each Media Segment in the Playlist
14067 // file, when rounded to the nearest integer, MUST be less than or equal
14068 // to the target duration; longer segments can trigger playback stalls
14069 // or other errors.
14070 //
14071 // For DASH, the mpd-parser uses the largest reported segment duration as the target
14072 // duration. Although that reported duration is occasionally approximate (i.e., not
14073 // exact), a strict check may report that a segment is too long more often in DASH.
14074
14075
14076 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
14077};
14078var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
14079 // Right now we aren't following DASH's timing model exactly, so only perform
14080 // this check for HLS content.
14081 if (sourceType !== 'hls') {
14082 return null;
14083 }
14084
14085 var segmentDuration = mediaDuration(segmentInfo.audioTimingInfo, segmentInfo.videoTimingInfo); // Don't report if we lack information.
14086 //
14087 // If the segment has a duration of 0 it is either a lack of information or a
14088 // metadata only segment and shouldn't be reported here.
14089
14090 if (!segmentDuration) {
14091 return null;
14092 }
14093
14094 var targetDuration = segmentInfo.playlist.targetDuration;
14095 var isSegmentWayTooLong = segmentTooLong({
14096 segmentDuration: segmentDuration,
14097 maxDuration: targetDuration * 2
14098 });
14099 var isSegmentSlightlyTooLong = segmentTooLong({
14100 segmentDuration: segmentDuration,
14101 maxDuration: targetDuration
14102 });
14103 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
14104
14105 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
14106 return {
14107 severity: isSegmentWayTooLong ? 'warn' : 'info',
14108 message: segmentTooLongMessage
14109 };
14110 }
14111
14112 return null;
14113};
14114/**
14115 * An object that manages segment loading and appending.
14116 *
14117 * @class SegmentLoader
14118 * @param {Object} options required and optional options
14119 * @extends videojs.EventTarget
14120 */
14121
14122var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
14123 _inheritsLoose__default['default'](SegmentLoader, _videojs$EventTarget);
14124
14125 function SegmentLoader(settings, options) {
14126 var _this;
14127
14128 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
14129
14130 if (!settings) {
14131 throw new TypeError('Initialization settings are required');
14132 }
14133
14134 if (typeof settings.currentTime !== 'function') {
14135 throw new TypeError('No currentTime getter specified');
14136 }
14137
14138 if (!settings.mediaSource) {
14139 throw new TypeError('No MediaSource specified');
14140 } // public properties
14141
14142
14143 _this.bandwidth = settings.bandwidth;
14144 _this.throughput = {
14145 rate: 0,
14146 count: 0
14147 };
14148 _this.roundTrip = NaN;
14149
14150 _this.resetStats_();
14151
14152 _this.mediaIndex = null; // private settings
14153
14154 _this.hasPlayed_ = settings.hasPlayed;
14155 _this.currentTime_ = settings.currentTime;
14156 _this.seekable_ = settings.seekable;
14157 _this.seeking_ = settings.seeking;
14158 _this.duration_ = settings.duration;
14159 _this.mediaSource_ = settings.mediaSource;
14160 _this.vhs_ = settings.vhs;
14161 _this.loaderType_ = settings.loaderType;
14162 _this.currentMediaInfo_ = void 0;
14163 _this.startingMediaInfo_ = void 0;
14164 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
14165 _this.goalBufferLength_ = settings.goalBufferLength;
14166 _this.sourceType_ = settings.sourceType;
14167 _this.sourceUpdater_ = settings.sourceUpdater;
14168 _this.inbandTextTracks_ = settings.inbandTextTracks;
14169 _this.state_ = 'INIT';
14170 _this.handlePartialData_ = settings.handlePartialData;
14171 _this.timelineChangeController_ = settings.timelineChangeController;
14172 _this.shouldSaveSegmentTimingInfo_ = true;
14173 _this.parse708captions_ = settings.parse708captions; // private instance variables
14174
14175 _this.checkBufferTimeout_ = null;
14176 _this.error_ = void 0;
14177 _this.currentTimeline_ = -1;
14178 _this.pendingSegment_ = null;
14179 _this.xhrOptions_ = null;
14180 _this.pendingSegments_ = [];
14181 _this.audioDisabled_ = false;
14182 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
14183
14184 _this.gopBuffer_ = [];
14185 _this.timeMapping_ = 0;
14186 _this.safeAppend_ = videojs__default['default'].browser.IE_VERSION >= 11;
14187 _this.appendInitSegment_ = {
14188 audio: true,
14189 video: true
14190 };
14191 _this.playlistOfLastInitSegment_ = {
14192 audio: null,
14193 video: null
14194 };
14195 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
14196 // information yet to start the loading process (e.g., if the audio loader wants to
14197 // load a segment from the next timeline but the main loader hasn't yet crossed that
14198 // timeline), then the load call will be added to the queue until it is ready to be
14199 // processed.
14200
14201 _this.loadQueue_ = [];
14202 _this.metadataQueue_ = {
14203 id3: [],
14204 caption: []
14205 }; // Fragmented mp4 playback
14206
14207 _this.activeInitSegmentId_ = null;
14208 _this.initSegments_ = {}; // HLSe playback
14209
14210 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
14211 _this.keyCache_ = {};
14212 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
14213 // between a time in the display time and a segment index within
14214 // a playlist
14215
14216 _this.syncController_ = settings.syncController;
14217 _this.syncPoint_ = {
14218 segmentIndex: 0,
14219 time: 0
14220 };
14221 _this.transmuxer_ = _this.createTransmuxer_();
14222
14223 _this.triggerSyncInfoUpdate_ = function () {
14224 return _this.trigger('syncinfoupdate');
14225 };
14226
14227 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
14228
14229 _this.mediaSource_.addEventListener('sourceopen', function () {
14230 if (!_this.isEndOfStream_()) {
14231 _this.ended_ = false;
14232 }
14233 }); // ...for determining the fetch location
14234
14235
14236 _this.fetchAtBuffer_ = false;
14237 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
14238 Object.defineProperty(_assertThisInitialized__default['default'](_this), 'state', {
14239 get: function get() {
14240 return this.state_;
14241 },
14242 set: function set(newState) {
14243 if (newState !== this.state_) {
14244 this.logger_(this.state_ + " -> " + newState);
14245 this.state_ = newState;
14246 this.trigger('statechange');
14247 }
14248 }
14249 });
14250
14251 _this.sourceUpdater_.on('ready', function () {
14252 if (_this.hasEnoughInfoToAppend_()) {
14253 _this.processCallQueue_();
14254 }
14255 }); // Only the main loader needs to listen for pending timeline changes, as the main
14256 // loader should wait for audio to be ready to change its timeline so that both main
14257 // and audio timelines change together. For more details, see the
14258 // shouldWaitForTimelineChange function.
14259
14260
14261 if (_this.loaderType_ === 'main') {
14262 _this.timelineChangeController_.on('pendingtimelinechange', function () {
14263 if (_this.hasEnoughInfoToAppend_()) {
14264 _this.processCallQueue_();
14265 }
14266 });
14267 } // The main loader only listens on pending timeline changes, but the audio loader,
14268 // since its loads follow main, needs to listen on timeline changes. For more details,
14269 // see the shouldWaitForTimelineChange function.
14270
14271
14272 if (_this.loaderType_ === 'audio') {
14273 _this.timelineChangeController_.on('timelinechange', function () {
14274 if (_this.hasEnoughInfoToLoad_()) {
14275 _this.processLoadQueue_();
14276 }
14277
14278 if (_this.hasEnoughInfoToAppend_()) {
14279 _this.processCallQueue_();
14280 }
14281 });
14282 }
14283
14284 return _this;
14285 }
14286
14287 var _proto = SegmentLoader.prototype;
14288
14289 _proto.createTransmuxer_ = function createTransmuxer_() {
14290 return segmentTransmuxer.createTransmuxer({
14291 remux: false,
14292 alignGopsAtEnd: this.safeAppend_,
14293 keepOriginalTimestamps: true,
14294 handlePartialData: this.handlePartialData_,
14295 parse708captions: this.parse708captions_
14296 });
14297 }
14298 /**
14299 * reset all of our media stats
14300 *
14301 * @private
14302 */
14303 ;
14304
14305 _proto.resetStats_ = function resetStats_() {
14306 this.mediaBytesTransferred = 0;
14307 this.mediaRequests = 0;
14308 this.mediaRequestsAborted = 0;
14309 this.mediaRequestsTimedout = 0;
14310 this.mediaRequestsErrored = 0;
14311 this.mediaTransferDuration = 0;
14312 this.mediaSecondsLoaded = 0;
14313 }
14314 /**
14315 * dispose of the SegmentLoader and reset to the default state
14316 */
14317 ;
14318
14319 _proto.dispose = function dispose() {
14320 this.trigger('dispose');
14321 this.state = 'DISPOSED';
14322 this.pause();
14323 this.abort_();
14324
14325 if (this.transmuxer_) {
14326 this.transmuxer_.terminate();
14327 }
14328
14329 this.resetStats_();
14330
14331 if (this.checkBufferTimeout_) {
14332 window__default['default'].clearTimeout(this.checkBufferTimeout_);
14333 }
14334
14335 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
14336 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
14337 }
14338
14339 this.off();
14340 };
14341
14342 _proto.setAudio = function setAudio(enable) {
14343 this.audioDisabled_ = !enable;
14344
14345 if (enable) {
14346 this.appendInitSegment_.audio = true;
14347 } else {
14348 // remove current track audio if it gets disabled
14349 this.sourceUpdater_.removeAudio(0, this.duration_());
14350 }
14351 }
14352 /**
14353 * abort anything that is currently doing on with the SegmentLoader
14354 * and reset to a default state
14355 */
14356 ;
14357
14358 _proto.abort = function abort() {
14359 if (this.state !== 'WAITING') {
14360 if (this.pendingSegment_) {
14361 this.pendingSegment_ = null;
14362 }
14363
14364 return;
14365 }
14366
14367 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
14368 // since we are no longer "waiting" on any requests. XHR callback is not always run
14369 // when the request is aborted. This will prevent the loader from being stuck in the
14370 // WAITING state indefinitely.
14371
14372 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
14373 // next segment
14374
14375 if (!this.paused()) {
14376 this.monitorBuffer_();
14377 }
14378 }
14379 /**
14380 * abort all pending xhr requests and null any pending segements
14381 *
14382 * @private
14383 */
14384 ;
14385
14386 _proto.abort_ = function abort_() {
14387 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
14388 this.pendingSegment_.abortRequests();
14389 } // clear out the segment being processed
14390
14391
14392 this.pendingSegment_ = null;
14393 this.callQueue_ = [];
14394 this.loadQueue_ = [];
14395 this.metadataQueue_.id3 = [];
14396 this.metadataQueue_.caption = [];
14397 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
14398 };
14399
14400 _proto.checkForAbort_ = function checkForAbort_(requestId) {
14401 // If the state is APPENDING, then aborts will not modify the state, meaning the first
14402 // callback that happens should reset the state to READY so that loading can continue.
14403 if (this.state === 'APPENDING' && !this.pendingSegment_) {
14404 this.state = 'READY';
14405 return true;
14406 }
14407
14408 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
14409 return true;
14410 }
14411
14412 return false;
14413 }
14414 /**
14415 * set an error on the segment loader and null out any pending segements
14416 *
14417 * @param {Error} error the error to set on the SegmentLoader
14418 * @return {Error} the error that was set or that is currently set
14419 */
14420 ;
14421
14422 _proto.error = function error(_error) {
14423 if (typeof _error !== 'undefined') {
14424 this.logger_('error occurred:', _error);
14425 this.error_ = _error;
14426 }
14427
14428 this.pendingSegment_ = null;
14429 return this.error_;
14430 };
14431
14432 _proto.endOfStream = function endOfStream() {
14433 this.ended_ = true;
14434
14435 if (this.transmuxer_) {
14436 // need to clear out any cached data to prepare for the new segment
14437 segmentTransmuxer.reset(this.transmuxer_);
14438 }
14439
14440 this.gopBuffer_.length = 0;
14441 this.pause();
14442 this.trigger('ended');
14443 }
14444 /**
14445 * Indicates which time ranges are buffered
14446 *
14447 * @return {TimeRange}
14448 * TimeRange object representing the current buffered ranges
14449 */
14450 ;
14451
14452 _proto.buffered_ = function buffered_() {
14453 if (!this.sourceUpdater_ || !this.startingMediaInfo_) {
14454 return videojs__default['default'].createTimeRanges();
14455 }
14456
14457 if (this.loaderType_ === 'main') {
14458 var _this$startingMediaIn = this.startingMediaInfo_,
14459 hasAudio = _this$startingMediaIn.hasAudio,
14460 hasVideo = _this$startingMediaIn.hasVideo,
14461 isMuxed = _this$startingMediaIn.isMuxed;
14462
14463 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
14464 return this.sourceUpdater_.buffered();
14465 }
14466
14467 if (hasVideo) {
14468 return this.sourceUpdater_.videoBuffered();
14469 }
14470 } // One case that can be ignored for now is audio only with alt audio,
14471 // as we don't yet have proper support for that.
14472
14473
14474 return this.sourceUpdater_.audioBuffered();
14475 }
14476 /**
14477 * Gets and sets init segment for the provided map
14478 *
14479 * @param {Object} map
14480 * The map object representing the init segment to get or set
14481 * @param {boolean=} set
14482 * If true, the init segment for the provided map should be saved
14483 * @return {Object}
14484 * map object for desired init segment
14485 */
14486 ;
14487
14488 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
14489 if (set === void 0) {
14490 set = false;
14491 }
14492
14493 if (!map) {
14494 return null;
14495 }
14496
14497 var id = initSegmentId(map);
14498 var storedMap = this.initSegments_[id];
14499
14500 if (set && !storedMap && map.bytes) {
14501 this.initSegments_[id] = storedMap = {
14502 resolvedUri: map.resolvedUri,
14503 byterange: map.byterange,
14504 bytes: map.bytes,
14505 tracks: map.tracks,
14506 timescales: map.timescales
14507 };
14508 }
14509
14510 return storedMap || map;
14511 }
14512 /**
14513 * Gets and sets key for the provided key
14514 *
14515 * @param {Object} key
14516 * The key object representing the key to get or set
14517 * @param {boolean=} set
14518 * If true, the key for the provided key should be saved
14519 * @return {Object}
14520 * Key object for desired key
14521 */
14522 ;
14523
14524 _proto.segmentKey = function segmentKey(key, set) {
14525 if (set === void 0) {
14526 set = false;
14527 }
14528
14529 if (!key) {
14530 return null;
14531 }
14532
14533 var id = segmentKeyId(key);
14534 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
14535 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
14536
14537 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
14538 this.keyCache_[id] = storedKey = {
14539 resolvedUri: key.resolvedUri,
14540 bytes: key.bytes
14541 };
14542 }
14543
14544 var result = {
14545 resolvedUri: (storedKey || key).resolvedUri
14546 };
14547
14548 if (storedKey) {
14549 result.bytes = storedKey.bytes;
14550 }
14551
14552 return result;
14553 }
14554 /**
14555 * Returns true if all configuration required for loading is present, otherwise false.
14556 *
14557 * @return {boolean} True if the all configuration is ready for loading
14558 * @private
14559 */
14560 ;
14561
14562 _proto.couldBeginLoading_ = function couldBeginLoading_() {
14563 return this.playlist_ && !this.paused();
14564 }
14565 /**
14566 * load a playlist and start to fill the buffer
14567 */
14568 ;
14569
14570 _proto.load = function load() {
14571 // un-pause
14572 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
14573 // specified
14574
14575 if (!this.playlist_) {
14576 return;
14577 } // if all the configuration is ready, initialize and begin loading
14578
14579
14580 if (this.state === 'INIT' && this.couldBeginLoading_()) {
14581 return this.init_();
14582 } // if we're in the middle of processing a segment already, don't
14583 // kick off an additional segment request
14584
14585
14586 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
14587 return;
14588 }
14589
14590 this.state = 'READY';
14591 }
14592 /**
14593 * Once all the starting parameters have been specified, begin
14594 * operation. This method should only be invoked from the INIT
14595 * state.
14596 *
14597 * @private
14598 */
14599 ;
14600
14601 _proto.init_ = function init_() {
14602 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
14603 // audio data from the muxed content should be removed
14604
14605 this.resetEverything();
14606 return this.monitorBuffer_();
14607 }
14608 /**
14609 * set a playlist on the segment loader
14610 *
14611 * @param {PlaylistLoader} media the playlist to set on the segment loader
14612 */
14613 ;
14614
14615 _proto.playlist = function playlist(newPlaylist, options) {
14616 if (options === void 0) {
14617 options = {};
14618 }
14619
14620 if (!newPlaylist) {
14621 return;
14622 }
14623
14624 var oldPlaylist = this.playlist_;
14625 var segmentInfo = this.pendingSegment_;
14626 this.playlist_ = newPlaylist;
14627 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
14628 // is always our zero-time so force a sync update each time the playlist
14629 // is refreshed from the server
14630 //
14631 // Use the INIT state to determine if playback has started, as the playlist sync info
14632 // should be fixed once requests begin (as sync points are generated based on sync
14633 // info), but not before then.
14634
14635 if (this.state === 'INIT') {
14636 newPlaylist.syncInfo = {
14637 mediaSequence: newPlaylist.mediaSequence,
14638 time: 0
14639 }; // Setting the date time mapping means mapping the program date time (if available)
14640 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
14641 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
14642 // be updated as the playlist is refreshed before the loader starts loading, the
14643 // program date time mapping needs to be updated as well.
14644 //
14645 // This mapping is only done for the main loader because a program date time should
14646 // map equivalently between playlists.
14647
14648 if (this.loaderType_ === 'main') {
14649 this.syncController_.setDateTimeMappingForStart(newPlaylist);
14650 }
14651 }
14652
14653 var oldId = null;
14654
14655 if (oldPlaylist) {
14656 if (oldPlaylist.id) {
14657 oldId = oldPlaylist.id;
14658 } else if (oldPlaylist.uri) {
14659 oldId = oldPlaylist.uri;
14660 }
14661 }
14662
14663 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
14664 // in LIVE, we always want to update with new playlists (including refreshes)
14665
14666 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
14667 // buffering now
14668
14669 if (this.state === 'INIT' && this.couldBeginLoading_()) {
14670 return this.init_();
14671 }
14672
14673 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
14674 if (this.mediaIndex !== null || this.handlePartialData_) {
14675 // we must "resync" the segment loader when we switch renditions and
14676 // the segment loader is already synced to the previous rendition
14677 //
14678 // or if we're handling partial data, we need to ensure the transmuxer is cleared
14679 // out before we start adding more data
14680 this.resyncLoader();
14681 }
14682
14683 this.currentMediaInfo_ = void 0;
14684 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
14685
14686 return;
14687 } // we reloaded the same playlist so we are in a live scenario
14688 // and we will likely need to adjust the mediaIndex
14689
14690
14691 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
14692 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
14693 // this is important because we can abort a request and this value must be
14694 // equal to the last appended mediaIndex
14695
14696 if (this.mediaIndex !== null) {
14697 this.mediaIndex -= mediaSequenceDiff;
14698 } // update the mediaIndex on the SegmentInfo object
14699 // this is important because we will update this.mediaIndex with this value
14700 // in `handleAppendsDone_` after the segment has been successfully appended
14701
14702
14703 if (segmentInfo) {
14704 segmentInfo.mediaIndex -= mediaSequenceDiff; // we need to update the referenced segment so that timing information is
14705 // saved for the new playlist's segment, however, if the segment fell off the
14706 // playlist, we can leave the old reference and just lose the timing info
14707
14708 if (segmentInfo.mediaIndex >= 0) {
14709 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
14710 }
14711 }
14712
14713 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
14714 }
14715 /**
14716 * Prevent the loader from fetching additional segments. If there
14717 * is a segment request outstanding, it will finish processing
14718 * before the loader halts. A segment loader can be unpaused by
14719 * calling load().
14720 */
14721 ;
14722
14723 _proto.pause = function pause() {
14724 if (this.checkBufferTimeout_) {
14725 window__default['default'].clearTimeout(this.checkBufferTimeout_);
14726 this.checkBufferTimeout_ = null;
14727 }
14728 }
14729 /**
14730 * Returns whether the segment loader is fetching additional
14731 * segments when given the opportunity. This property can be
14732 * modified through calls to pause() and load().
14733 */
14734 ;
14735
14736 _proto.paused = function paused() {
14737 return this.checkBufferTimeout_ === null;
14738 }
14739 /**
14740 * Delete all the buffered data and reset the SegmentLoader
14741 *
14742 * @param {Function} [done] an optional callback to be executed when the remove
14743 * operation is complete
14744 */
14745 ;
14746
14747 _proto.resetEverything = function resetEverything(done) {
14748 this.ended_ = false;
14749 this.appendInitSegment_ = {
14750 audio: true,
14751 video: true
14752 };
14753 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
14754 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
14755 // we then clamp the value to duration if necessary.
14756
14757 this.remove(0, Infinity, done); // clears fmp4 captions
14758
14759 if (this.transmuxer_) {
14760 this.transmuxer_.postMessage({
14761 action: 'clearAllMp4Captions'
14762 });
14763 }
14764 }
14765 /**
14766 * Force the SegmentLoader to resync and start loading around the currentTime instead
14767 * of starting at the end of the buffer
14768 *
14769 * Useful for fast quality changes
14770 */
14771 ;
14772
14773 _proto.resetLoader = function resetLoader() {
14774 this.fetchAtBuffer_ = false;
14775 this.resyncLoader();
14776 }
14777 /**
14778 * Force the SegmentLoader to restart synchronization and make a conservative guess
14779 * before returning to the simple walk-forward method
14780 */
14781 ;
14782
14783 _proto.resyncLoader = function resyncLoader() {
14784 if (this.transmuxer_) {
14785 // need to clear out any cached data to prepare for the new segment
14786 segmentTransmuxer.reset(this.transmuxer_);
14787 }
14788
14789 this.mediaIndex = null;
14790 this.syncPoint_ = null;
14791 this.isPendingTimestampOffset_ = false;
14792 this.callQueue_ = [];
14793 this.loadQueue_ = [];
14794 this.metadataQueue_.id3 = [];
14795 this.metadataQueue_.caption = [];
14796 this.abort();
14797
14798 if (this.transmuxer_) {
14799 this.transmuxer_.postMessage({
14800 action: 'clearParsedMp4Captions'
14801 });
14802 }
14803 }
14804 /**
14805 * Remove any data in the source buffer between start and end times
14806 *
14807 * @param {number} start - the start time of the region to remove from the buffer
14808 * @param {number} end - the end time of the region to remove from the buffer
14809 * @param {Function} [done] - an optional callback to be executed when the remove
14810 * operation is complete
14811 */
14812 ;
14813
14814 _proto.remove = function remove(start, end, done) {
14815 if (done === void 0) {
14816 done = function done() {};
14817 }
14818
14819 // clamp end to duration if we need to remove everything.
14820 // This is due to a browser bug that causes issues if we remove to Infinity.
14821 // videojs/videojs-contrib-hls#1225
14822 if (end === Infinity) {
14823 end = this.duration_();
14824 }
14825
14826 if (!this.sourceUpdater_ || !this.startingMediaInfo_) {
14827 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
14828
14829 return;
14830 } // set it to one to complete this function's removes
14831
14832
14833 var removesRemaining = 1;
14834
14835 var removeFinished = function removeFinished() {
14836 removesRemaining--;
14837
14838 if (removesRemaining === 0) {
14839 done();
14840 }
14841 };
14842
14843 if (!this.audioDisabled_) {
14844 removesRemaining++;
14845 this.sourceUpdater_.removeAudio(start, end, removeFinished);
14846 } // While it would be better to only remove video if the main loader has video, this
14847 // should be safe with audio only as removeVideo will call back even if there's no
14848 // video buffer.
14849 //
14850 // In theory we can check to see if there's video before calling the remove, but in
14851 // the event that we're switching between renditions and from video to audio only
14852 // (when we add support for that), we may need to clear the video contents despite
14853 // what the new media will contain.
14854
14855
14856 if (this.loaderType_ === 'main') {
14857 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
14858 removesRemaining++;
14859 this.sourceUpdater_.removeVideo(start, end, removeFinished);
14860 } // remove any captions and ID3 tags
14861
14862
14863 for (var track in this.inbandTextTracks_) {
14864 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
14865 }
14866
14867 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
14868
14869 removeFinished();
14870 }
14871 /**
14872 * (re-)schedule monitorBufferTick_ to run as soon as possible
14873 *
14874 * @private
14875 */
14876 ;
14877
14878 _proto.monitorBuffer_ = function monitorBuffer_() {
14879 if (this.checkBufferTimeout_) {
14880 window__default['default'].clearTimeout(this.checkBufferTimeout_);
14881 }
14882
14883 this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), 1);
14884 }
14885 /**
14886 * As long as the SegmentLoader is in the READY state, periodically
14887 * invoke fillBuffer_().
14888 *
14889 * @private
14890 */
14891 ;
14892
14893 _proto.monitorBufferTick_ = function monitorBufferTick_() {
14894 if (this.state === 'READY') {
14895 this.fillBuffer_();
14896 }
14897
14898 if (this.checkBufferTimeout_) {
14899 window__default['default'].clearTimeout(this.checkBufferTimeout_);
14900 }
14901
14902 this.checkBufferTimeout_ = window__default['default'].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
14903 }
14904 /**
14905 * fill the buffer with segements unless the sourceBuffers are
14906 * currently updating
14907 *
14908 * Note: this function should only ever be called by monitorBuffer_
14909 * and never directly
14910 *
14911 * @private
14912 */
14913 ;
14914
14915 _proto.fillBuffer_ = function fillBuffer_() {
14916 // TODO since the source buffer maintains a queue, and we shouldn't call this function
14917 // except when we're ready for the next segment, this check can most likely be removed
14918 if (this.sourceUpdater_.updating()) {
14919 return;
14920 }
14921
14922 if (!this.syncPoint_) {
14923 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
14924 }
14925
14926 var buffered = this.buffered_(); // see if we need to begin loading immediately
14927
14928 var segmentInfo = this.checkBuffer_(buffered, this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
14929
14930 if (!segmentInfo) {
14931 return;
14932 }
14933
14934 segmentInfo.timestampOffset = timestampOffsetForSegment({
14935 segmentTimeline: segmentInfo.timeline,
14936 currentTimeline: this.currentTimeline_,
14937 startOfSegment: segmentInfo.startOfSegment,
14938 buffered: buffered,
14939 overrideCheck: this.isPendingTimestampOffset_
14940 });
14941 this.isPendingTimestampOffset_ = false;
14942
14943 if (typeof segmentInfo.timestampOffset === 'number') {
14944 this.timelineChangeController_.pendingTimelineChange({
14945 type: this.loaderType_,
14946 from: this.currentTimeline_,
14947 to: segmentInfo.timeline
14948 });
14949 }
14950
14951 this.loadSegment_(segmentInfo);
14952 }
14953 /**
14954 * Determines if we should call endOfStream on the media source based
14955 * on the state of the buffer or if appened segment was the final
14956 * segment in the playlist.
14957 *
14958 * @param {number} [mediaIndex] the media index of segment we last appended
14959 * @param {Object} [playlist] a media playlist object
14960 * @return {boolean} do we need to call endOfStream on the MediaSource
14961 */
14962 ;
14963
14964 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist) {
14965 if (mediaIndex === void 0) {
14966 mediaIndex = this.mediaIndex;
14967 }
14968
14969 if (playlist === void 0) {
14970 playlist = this.playlist_;
14971 }
14972
14973 if (!playlist || !this.mediaSource_) {
14974 return false;
14975 } // mediaIndex is zero based but length is 1 based
14976
14977
14978 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // if we've buffered to the end of the video, we need to call endOfStream
14979 // so that MediaSources can trigger the `ended` event when it runs out of
14980 // buffered data instead of waiting for me
14981
14982 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment;
14983 }
14984 /**
14985 * Determines what segment request should be made, given current playback
14986 * state.
14987 *
14988 * @param {TimeRanges} buffered - the state of the buffer
14989 * @param {Object} playlist - the playlist object to fetch segments from
14990 * @param {number} mediaIndex - the previous mediaIndex fetched or null
14991 * @param {boolean} hasPlayed - a flag indicating whether we have played or not
14992 * @param {number} currentTime - the playback position in seconds
14993 * @param {Object} syncPoint - a segment info object that describes the
14994 * @return {Object} a segment request object that describes the segment to load
14995 */
14996 ;
14997
14998 _proto.checkBuffer_ = function checkBuffer_(buffered, playlist, currentMediaIndex, hasPlayed, currentTime, syncPoint) {
14999 var lastBufferedEnd = 0;
15000
15001 if (buffered.length) {
15002 lastBufferedEnd = buffered.end(buffered.length - 1);
15003 }
15004
15005 var bufferedTime = Math.max(0, lastBufferedEnd - currentTime);
15006
15007 if (!playlist.segments.length) {
15008 return null;
15009 } // if there is plenty of content buffered, and the video has
15010 // been played before relax for awhile
15011
15012
15013 if (bufferedTime >= this.goalBufferLength_()) {
15014 return null;
15015 } // if the video has not yet played once, and we already have
15016 // one segment downloaded do nothing
15017
15018
15019 if (!hasPlayed && bufferedTime >= 1) {
15020 return null;
15021 }
15022
15023 var nextMediaIndex = null;
15024 var startOfSegment;
15025 var isSyncRequest = false; // When the syncPoint is null, there is no way of determining a good
15026 // conservative segment index to fetch from
15027 // The best thing to do here is to get the kind of sync-point data by
15028 // making a request
15029
15030 if (syncPoint === null) {
15031 nextMediaIndex = this.getSyncSegmentCandidate_(playlist);
15032 isSyncRequest = true;
15033 } else if (currentMediaIndex !== null) {
15034 // Under normal playback conditions fetching is a simple walk forward
15035 var segment = playlist.segments[currentMediaIndex];
15036
15037 if (segment && segment.end) {
15038 startOfSegment = segment.end;
15039 } else {
15040 startOfSegment = lastBufferedEnd;
15041 }
15042
15043 nextMediaIndex = currentMediaIndex + 1; // There is a sync-point but the lack of a mediaIndex indicates that
15044 // we need to make a good conservative guess about which segment to
15045 // fetch
15046 } else if (this.fetchAtBuffer_) {
15047 // Find the segment containing the end of the buffer
15048 var mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, lastBufferedEnd, syncPoint.segmentIndex, syncPoint.time);
15049 nextMediaIndex = mediaSourceInfo.mediaIndex;
15050 startOfSegment = mediaSourceInfo.startTime;
15051 } else {
15052 // Find the segment containing currentTime
15053 var _mediaSourceInfo = Playlist.getMediaInfoForTime(playlist, currentTime, syncPoint.segmentIndex, syncPoint.time);
15054
15055 nextMediaIndex = _mediaSourceInfo.mediaIndex;
15056 startOfSegment = _mediaSourceInfo.startTime;
15057 }
15058
15059 var segmentInfo = this.generateSegmentInfo_(playlist, nextMediaIndex, startOfSegment, isSyncRequest);
15060
15061 if (!segmentInfo) {
15062 return;
15063 } // if this is the last segment in the playlist
15064 // we are not seeking and end of stream has already been called
15065 // do not re-request
15066
15067
15068 if (this.mediaSource_ && this.playlist_ && segmentInfo.mediaIndex === this.playlist_.segments.length - 1 && this.mediaSource_.readyState === 'ended' && !this.seeking_()) {
15069 return;
15070 }
15071
15072 this.logger_("checkBuffer_ returning " + segmentInfo.uri, {
15073 segmentInfo: segmentInfo,
15074 playlist: playlist,
15075 currentMediaIndex: currentMediaIndex,
15076 nextMediaIndex: nextMediaIndex,
15077 startOfSegment: startOfSegment,
15078 isSyncRequest: isSyncRequest
15079 });
15080 return segmentInfo;
15081 }
15082 /**
15083 * The segment loader has no recourse except to fetch a segment in the
15084 * current playlist and use the internal timestamps in that segment to
15085 * generate a syncPoint. This function returns a good candidate index
15086 * for that process.
15087 *
15088 * @param {Object} playlist - the playlist object to look for a
15089 * @return {number} An index of a segment from the playlist to load
15090 */
15091 ;
15092
15093 _proto.getSyncSegmentCandidate_ = function getSyncSegmentCandidate_(playlist) {
15094 var _this2 = this;
15095
15096 if (this.currentTimeline_ === -1) {
15097 return 0;
15098 }
15099
15100 var segmentIndexArray = playlist.segments.map(function (s, i) {
15101 return {
15102 timeline: s.timeline,
15103 segmentIndex: i
15104 };
15105 }).filter(function (s) {
15106 return s.timeline === _this2.currentTimeline_;
15107 });
15108
15109 if (segmentIndexArray.length) {
15110 return segmentIndexArray[Math.min(segmentIndexArray.length - 1, 1)].segmentIndex;
15111 }
15112
15113 return Math.max(playlist.segments.length - 1, 0);
15114 };
15115
15116 _proto.generateSegmentInfo_ = function generateSegmentInfo_(playlist, mediaIndex, startOfSegment, isSyncRequest) {
15117 if (mediaIndex < 0 || mediaIndex >= playlist.segments.length) {
15118 return null;
15119 }
15120
15121 var segment = playlist.segments[mediaIndex];
15122 var audioBuffered = this.sourceUpdater_.audioBuffered();
15123 var videoBuffered = this.sourceUpdater_.videoBuffered();
15124 var audioAppendStart;
15125 var gopsToAlignWith;
15126
15127 if (audioBuffered.length) {
15128 // since the transmuxer is using the actual timing values, but the buffer is
15129 // adjusted by the timestamp offset, we must adjust the value here
15130 audioAppendStart = audioBuffered.end(audioBuffered.length - 1) - this.sourceUpdater_.audioTimestampOffset();
15131 }
15132
15133 if (videoBuffered.length) {
15134 gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
15135 // adjusted by the timestmap offset, we must adjust the value here
15136 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
15137 }
15138
15139 return {
15140 requestId: 'segment-loader-' + Math.random(),
15141 // resolve the segment URL relative to the playlist
15142 uri: segment.resolvedUri,
15143 // the segment's mediaIndex at the time it was requested
15144 mediaIndex: mediaIndex,
15145 // whether or not to update the SegmentLoader's state with this
15146 // segment's mediaIndex
15147 isSyncRequest: isSyncRequest,
15148 startOfSegment: startOfSegment,
15149 // the segment's playlist
15150 playlist: playlist,
15151 // unencrypted bytes of the segment
15152 bytes: null,
15153 // when a key is defined for this segment, the encrypted bytes
15154 encryptedBytes: null,
15155 // The target timestampOffset for this segment when we append it
15156 // to the source buffer
15157 timestampOffset: null,
15158 // The timeline that the segment is in
15159 timeline: segment.timeline,
15160 // The expected duration of the segment in seconds
15161 duration: segment.duration,
15162 // retain the segment in case the playlist updates while doing an async process
15163 segment: segment,
15164 byteLength: 0,
15165 transmuxer: this.transmuxer_,
15166 audioAppendStart: audioAppendStart,
15167 gopsToAlignWith: gopsToAlignWith
15168 };
15169 }
15170 /**
15171 * Determines if the network has enough bandwidth to complete the current segment
15172 * request in a timely manner. If not, the request will be aborted early and bandwidth
15173 * updated to trigger a playlist switch.
15174 *
15175 * @param {Object} stats
15176 * Object containing stats about the request timing and size
15177 * @private
15178 */
15179 ;
15180
15181 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
15182 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
15183 // TODO: Replace using timeout with a boolean indicating whether this playlist is
15184 // the lowestEnabledRendition.
15185 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
15186 !this.playlist_.attributes.BANDWIDTH) {
15187 return;
15188 } // Wait at least 1 second since the first byte of data has been received before
15189 // using the calculated bandwidth from the progress event to allow the bitrate
15190 // to stabilize
15191
15192
15193 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
15194 return;
15195 }
15196
15197 var currentTime = this.currentTime_();
15198 var measuredBandwidth = stats.bandwidth;
15199 var segmentDuration = this.pendingSegment_.duration;
15200 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
15201 // if we are only left with less than 1 second when the request completes.
15202 // A negative timeUntilRebuffering indicates we are already rebuffering
15203
15204 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
15205 // is larger than the estimated time until the player runs out of forward buffer
15206
15207 if (requestTimeRemaining <= timeUntilRebuffer$1) {
15208 return;
15209 }
15210
15211 var switchCandidate = minRebufferMaxBandwidthSelector({
15212 master: this.vhs_.playlists.master,
15213 currentTime: currentTime,
15214 bandwidth: measuredBandwidth,
15215 duration: this.duration_(),
15216 segmentDuration: segmentDuration,
15217 timeUntilRebuffer: timeUntilRebuffer$1,
15218 currentTimeline: this.currentTimeline_,
15219 syncController: this.syncController_
15220 });
15221
15222 if (!switchCandidate) {
15223 return;
15224 }
15225
15226 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
15227 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
15228 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
15229 // potential round trip time of the new request so that we are not too aggressive
15230 // with switching to a playlist that might save us a fraction of a second.
15231
15232 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
15233 minimumTimeSaving = 1;
15234 }
15235
15236 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
15237 return;
15238 } // set the bandwidth to that of the desired playlist being sure to scale by
15239 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
15240 // don't trigger a bandwidthupdate as the bandwidth is artifial
15241
15242
15243 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
15244 this.trigger('earlyabort');
15245 };
15246
15247 _proto.handleAbort_ = function handleAbort_() {
15248 this.mediaRequestsAborted += 1;
15249 }
15250 /**
15251 * XHR `progress` event handler
15252 *
15253 * @param {Event}
15254 * The XHR `progress` event
15255 * @param {Object} simpleSegment
15256 * A simplified segment object copy
15257 * @private
15258 */
15259 ;
15260
15261 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
15262 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15263
15264 if (this.checkForAbort_(simpleSegment.requestId)) {
15265 return;
15266 }
15267
15268 this.trigger('progress');
15269 };
15270
15271 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
15272 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15273
15274 if (this.checkForAbort_(simpleSegment.requestId)) {
15275 return;
15276 }
15277
15278 if (this.checkForIllegalMediaSwitch(trackInfo)) {
15279 return;
15280 }
15281
15282 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
15283 // Guard against cases where we're not getting track info at all until we are
15284 // certain that all streams will provide it.
15285
15286 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
15287 this.appendInitSegment_ = {
15288 audio: true,
15289 video: true
15290 };
15291 this.startingMediaInfo_ = trackInfo;
15292 this.currentMediaInfo_ = trackInfo;
15293 this.logger_('trackinfo update', trackInfo);
15294 this.trigger('trackinfo');
15295 } // trackinfo may cause an abort if the trackinfo
15296 // causes a codec change to an unsupported codec.
15297
15298
15299 if (this.checkForAbort_(simpleSegment.requestId)) {
15300 return;
15301 } // set trackinfo on the pending segment so that
15302 // it can append.
15303
15304
15305 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
15306
15307 if (this.hasEnoughInfoToAppend_()) {
15308 this.processCallQueue_();
15309 }
15310 };
15311
15312 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
15313 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15314
15315 if (this.checkForAbort_(simpleSegment.requestId)) {
15316 return;
15317 }
15318
15319 var segmentInfo = this.pendingSegment_;
15320 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
15321 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
15322 segmentInfo[timingInfoProperty][timeType] = time;
15323 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
15324
15325 if (this.hasEnoughInfoToAppend_()) {
15326 this.processCallQueue_();
15327 }
15328 };
15329
15330 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
15331 var _this3 = this;
15332
15333 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15334
15335 if (this.checkForAbort_(simpleSegment.requestId)) {
15336 return;
15337 } // This could only happen with fmp4 segments, but
15338 // should still not happen in general
15339
15340
15341 if (captionData.length === 0) {
15342 this.logger_('SegmentLoader received no captions from a caption event');
15343 return;
15344 }
15345
15346 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
15347 // can be adjusted by the timestamp offset
15348
15349 if (!segmentInfo.hasAppendedData_) {
15350 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
15351 return;
15352 }
15353
15354 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
15355 var captionTracks = {}; // get total start/end and captions for each track/stream
15356
15357 captionData.forEach(function (caption) {
15358 // caption.stream is actually a track name...
15359 // set to the existing values in tracks or default values
15360 captionTracks[caption.stream] = captionTracks[caption.stream] || {
15361 // Infinity, as any other value will be less than this
15362 startTime: Infinity,
15363 captions: [],
15364 // 0 as an other value will be more than this
15365 endTime: 0
15366 };
15367 var captionTrack = captionTracks[caption.stream];
15368 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
15369 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
15370 captionTrack.captions.push(caption);
15371 });
15372 Object.keys(captionTracks).forEach(function (trackName) {
15373 var _captionTracks$trackN = captionTracks[trackName],
15374 startTime = _captionTracks$trackN.startTime,
15375 endTime = _captionTracks$trackN.endTime,
15376 captions = _captionTracks$trackN.captions;
15377 var inbandTextTracks = _this3.inbandTextTracks_;
15378
15379 _this3.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
15380
15381 createCaptionsTrackIfNotExists(inbandTextTracks, _this3.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
15382 // We do this because a rendition change that also changes the timescale for captions
15383 // will result in captions being re-parsed for certain segments. If we add them again
15384 // without clearing we will have two of the same captions visible.
15385
15386 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
15387 addCaptionData({
15388 captionArray: captions,
15389 inbandTextTracks: inbandTextTracks,
15390 timestampOffset: timestampOffset
15391 });
15392 }); // Reset stored captions since we added parsed
15393 // captions to a text track at this point
15394
15395 if (this.transmuxer_) {
15396 this.transmuxer_.postMessage({
15397 action: 'clearParsedMp4Captions'
15398 });
15399 }
15400 };
15401
15402 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
15403 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15404
15405 if (this.checkForAbort_(simpleSegment.requestId)) {
15406 return;
15407 }
15408
15409 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
15410
15411 if (!segmentInfo.hasAppendedData_) {
15412 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
15413 return;
15414 }
15415
15416 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
15417 // audio/video source with a metadata track, and an alt audio with a metadata track.
15418 // However, this probably won't happen, and if it does it can be handled then.
15419
15420 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
15421 addMetadata({
15422 inbandTextTracks: this.inbandTextTracks_,
15423 metadataArray: id3Frames,
15424 timestampOffset: timestampOffset,
15425 videoDuration: this.duration_()
15426 });
15427 };
15428
15429 _proto.processMetadataQueue_ = function processMetadataQueue_() {
15430 this.metadataQueue_.id3.forEach(function (fn) {
15431 return fn();
15432 });
15433 this.metadataQueue_.caption.forEach(function (fn) {
15434 return fn();
15435 });
15436 this.metadataQueue_.id3 = [];
15437 this.metadataQueue_.caption = [];
15438 };
15439
15440 _proto.processCallQueue_ = function processCallQueue_() {
15441 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
15442 // functions may check the length of the load queue and default to pushing themselves
15443 // back onto the queue.
15444
15445 this.callQueue_ = [];
15446 callQueue.forEach(function (fun) {
15447 return fun();
15448 });
15449 };
15450
15451 _proto.processLoadQueue_ = function processLoadQueue_() {
15452 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
15453 // functions may check the length of the load queue and default to pushing themselves
15454 // back onto the queue.
15455
15456 this.loadQueue_ = [];
15457 loadQueue.forEach(function (fun) {
15458 return fun();
15459 });
15460 }
15461 /**
15462 * Determines whether the loader has enough info to load the next segment.
15463 *
15464 * @return {boolean}
15465 * Whether or not the loader has enough info to load the next segment
15466 */
15467 ;
15468
15469 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
15470 // Since primary timing goes by video, only the audio loader potentially needs to wait
15471 // to load.
15472 if (this.loaderType_ !== 'audio') {
15473 return true;
15474 }
15475
15476 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
15477 // enough info to load.
15478
15479 if (!segmentInfo) {
15480 return false;
15481 } // The first segment can and should be loaded immediately so that source buffers are
15482 // created together (before appending). Source buffer creation uses the presence of
15483 // audio and video data to determine whether to create audio/video source buffers, and
15484 // uses processed (transmuxed or parsed) media to determine the types required.
15485
15486
15487 if (!this.currentMediaInfo_) {
15488 return true;
15489 }
15490
15491 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
15492 // can be requested and downloaded and only wait before it is transmuxed or parsed.
15493 // But in practice, there are a few reasons why it is better to wait until a loader
15494 // is ready to append that segment before requesting and downloading:
15495 //
15496 // 1. Because audio and main loaders cross discontinuities together, if this loader
15497 // is waiting for the other to catch up, then instead of requesting another
15498 // segment and using up more bandwidth, by not yet loading, more bandwidth is
15499 // allotted to the loader currently behind.
15500 // 2. media-segment-request doesn't have to have logic to consider whether a segment
15501 // is ready to be processed or not, isolating the queueing behavior to the loader.
15502 // 3. The audio loader bases some of its segment properties on timing information
15503 // provided by the main loader, meaning that, if the logic for waiting on
15504 // processing was in media-segment-request, then it would also need to know how
15505 // to re-generate the segment information after the main loader caught up.
15506 shouldWaitForTimelineChange({
15507 timelineChangeController: this.timelineChangeController_,
15508 currentTimeline: this.currentTimeline_,
15509 segmentTimeline: segmentInfo.timeline,
15510 loaderType: this.loaderType_,
15511 audioDisabled: this.audioDisabled_
15512 })) {
15513 return false;
15514 }
15515
15516 return true;
15517 };
15518
15519 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
15520 if (!this.sourceUpdater_.ready()) {
15521 return false;
15522 }
15523
15524 var segmentInfo = this.pendingSegment_; // no segment to append any data for or
15525 // we do not have information on this specific
15526 // segment yet
15527
15528 if (!segmentInfo || !segmentInfo.trackInfo) {
15529 return false;
15530 }
15531
15532 if (!this.handlePartialData_) {
15533 var _this$currentMediaInf = this.currentMediaInfo_,
15534 hasAudio = _this$currentMediaInf.hasAudio,
15535 hasVideo = _this$currentMediaInf.hasVideo,
15536 isMuxed = _this$currentMediaInf.isMuxed;
15537
15538 if (hasVideo && !segmentInfo.videoTimingInfo) {
15539 return false;
15540 } // muxed content only relies on video timing information for now.
15541
15542
15543 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
15544 return false;
15545 }
15546 }
15547
15548 if (shouldWaitForTimelineChange({
15549 timelineChangeController: this.timelineChangeController_,
15550 currentTimeline: this.currentTimeline_,
15551 segmentTimeline: segmentInfo.timeline,
15552 loaderType: this.loaderType_,
15553 audioDisabled: this.audioDisabled_
15554 })) {
15555 return false;
15556 }
15557
15558 return true;
15559 };
15560
15561 _proto.handleData_ = function handleData_(simpleSegment, result) {
15562 this.earlyAbortWhenNeeded_(simpleSegment.stats);
15563
15564 if (this.checkForAbort_(simpleSegment.requestId)) {
15565 return;
15566 } // If there's anything in the call queue, then this data came later and should be
15567 // executed after the calls currently queued.
15568
15569
15570 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
15571 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
15572 return;
15573 }
15574
15575 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
15576
15577 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
15578
15579 this.updateMediaSecondsLoaded_(segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
15580 // logic may change behavior depending on the state, and changing state too early may
15581 // inflate our estimates of bandwidth. In the future this should be re-examined to
15582 // note more granular states.
15583 // don't process and append data if the mediaSource is closed
15584
15585 if (this.mediaSource_.readyState === 'closed') {
15586 return;
15587 } // if this request included an initialization segment, save that data
15588 // to the initSegment cache
15589
15590
15591 if (simpleSegment.map) {
15592 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
15593
15594 segmentInfo.segment.map = simpleSegment.map;
15595 } // if this request included a segment key, save that data in the cache
15596
15597
15598 if (simpleSegment.key) {
15599 this.segmentKey(simpleSegment.key, true);
15600 }
15601
15602 segmentInfo.isFmp4 = simpleSegment.isFmp4;
15603 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
15604
15605 if (segmentInfo.isFmp4) {
15606 this.trigger('fmp4');
15607 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
15608 } else {
15609 var useVideoTimingInfo = this.loaderType_ === 'main' && this.currentMediaInfo_.hasVideo;
15610 var firstVideoFrameTimeForData;
15611
15612 if (useVideoTimingInfo) {
15613 firstVideoFrameTimeForData = this.handlePartialData_ ? result.videoFramePtsTime : segmentInfo.videoTimingInfo.start;
15614 } // Segment loader knows more about segment timing than the transmuxer (in certain
15615 // aspects), so make any changes required for a more accurate start time.
15616 // Don't set the end time yet, as the segment may not be finished processing.
15617
15618
15619 segmentInfo.timingInfo.start = this.trueSegmentStart_({
15620 currentStart: segmentInfo.timingInfo.start,
15621 playlist: segmentInfo.playlist,
15622 mediaIndex: segmentInfo.mediaIndex,
15623 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
15624 useVideoTimingInfo: useVideoTimingInfo,
15625 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
15626 videoTimingInfo: segmentInfo.videoTimingInfo,
15627 audioTimingInfo: segmentInfo.audioTimingInfo
15628 });
15629 } // Init segments for audio and video only need to be appended in certain cases. Now
15630 // that data is about to be appended, we can check the final cases to determine
15631 // whether we should append an init segment.
15632
15633
15634 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
15635 // as we use the start of the segment to offset the best guess (playlist provided)
15636 // timestamp offset.
15637
15638 this.updateSourceBufferTimestampOffset_(segmentInfo); // Save some state so that in the future anything waiting on first append (and/or
15639 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
15640 // we need some notion of whether the timestamp offset or other relevant information
15641 // has had a chance to be set.
15642
15643 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
15644
15645 this.processMetadataQueue_();
15646 this.appendData_(segmentInfo, result);
15647 };
15648
15649 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
15650 // alt audio doesn't manage timestamp offset
15651 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
15652 // segment for each chunk
15653 !segmentInfo.changedTimestampOffset) {
15654 // if the timestamp offset changed, the timeline may have changed, so we have to re-
15655 // append init segments
15656 this.appendInitSegment_ = {
15657 audio: true,
15658 video: true
15659 };
15660 }
15661
15662 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
15663 // make sure we append init segment on playlist changes, in case the media config
15664 // changed
15665 this.appendInitSegment_[type] = true;
15666 }
15667 };
15668
15669 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
15670 var type = _ref4.type,
15671 initSegment = _ref4.initSegment,
15672 map = _ref4.map,
15673 playlist = _ref4.playlist;
15674
15675 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
15676 // (Section 3) required to parse the applicable Media Segments. It applies to every
15677 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
15678 // or until the end of the playlist."
15679 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
15680 if (map) {
15681 var id = initSegmentId(map);
15682
15683 if (this.activeInitSegmentId_ === id) {
15684 // don't need to re-append the init segment if the ID matches
15685 return null;
15686 } // a map-specified init segment takes priority over any transmuxed (or otherwise
15687 // obtained) init segment
15688 //
15689 // this also caches the init segment for later use
15690
15691
15692 initSegment = this.initSegmentForMap(map, true).bytes;
15693 this.activeInitSegmentId_ = id;
15694 } // We used to always prepend init segments for video, however, that shouldn't be
15695 // necessary. Instead, we should only append on changes, similar to what we've always
15696 // done for audio. This is more important (though may not be that important) for
15697 // frame-by-frame appending for LHLS, simply because of the increased quantity of
15698 // appends.
15699
15700
15701 if (initSegment && this.appendInitSegment_[type]) {
15702 // Make sure we track the playlist that we last used for the init segment, so that
15703 // we can re-append the init segment in the event that we get data from a new
15704 // playlist. Discontinuities and track changes are handled in other sections.
15705 this.playlistOfLastInitSegment_[type] = playlist; // we should only be appending the next init segment if we detect a change, or if
15706 // the segment has a map
15707
15708 this.appendInitSegment_[type] = map ? true : false; // we need to clear out the fmp4 active init segment id, since
15709 // we are appending the muxer init segment
15710
15711 this.activeInitSegmentId_ = null;
15712 return initSegment;
15713 }
15714
15715 return null;
15716 };
15717
15718 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref5) {
15719 var _this4 = this;
15720
15721 var segmentInfo = _ref5.segmentInfo,
15722 type = _ref5.type,
15723 initSegment = _ref5.initSegment,
15724 data = _ref5.data;
15725 var segments = [data];
15726 var byteLength = data.byteLength;
15727
15728 if (initSegment) {
15729 // if the media initialization segment is changing, append it before the content
15730 // segment
15731 segments.unshift(initSegment);
15732 byteLength += initSegment.byteLength;
15733 } // Technically we should be OK appending the init segment separately, however, we
15734 // haven't yet tested that, and prepending is how we have always done things.
15735
15736
15737 var bytes = concatSegments({
15738 bytes: byteLength,
15739 segments: segments
15740 });
15741 this.sourceUpdater_.appendBuffer({
15742 segmentInfo: segmentInfo,
15743 type: type,
15744 bytes: bytes
15745 }, function (error) {
15746 if (error) {
15747 _this4.error(type + " append of " + bytes.length + "b failed for segment #" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id); // If an append errors, we can't recover.
15748 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
15749 // Trigger a special error so that it can be handled separately from normal,
15750 // recoverable errors.
15751
15752
15753 _this4.trigger('appenderror');
15754 }
15755 });
15756 };
15757
15758 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
15759 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
15760 return;
15761 }
15762
15763 var segment = this.pendingSegment_.segment;
15764 var timingInfoProperty = type + "TimingInfo";
15765
15766 if (!segment[timingInfoProperty]) {
15767 segment[timingInfoProperty] = {};
15768 }
15769
15770 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
15771 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
15772 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
15773 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
15774 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
15775
15776 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
15777 };
15778
15779 _proto.appendData_ = function appendData_(segmentInfo, result) {
15780 var type = result.type,
15781 data = result.data;
15782
15783 if (!data || !data.byteLength) {
15784 return;
15785 }
15786
15787 if (type === 'audio' && this.audioDisabled_) {
15788 return;
15789 }
15790
15791 var initSegment = this.getInitSegmentAndUpdateState_({
15792 type: type,
15793 initSegment: result.initSegment,
15794 playlist: segmentInfo.playlist,
15795 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
15796 });
15797 this.appendToSourceBuffer_({
15798 segmentInfo: segmentInfo,
15799 type: type,
15800 initSegment: initSegment,
15801 data: data
15802 });
15803 }
15804 /**
15805 * load a specific segment from a request into the buffer
15806 *
15807 * @private
15808 */
15809 ;
15810
15811 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
15812 var _this5 = this;
15813
15814 this.state = 'WAITING';
15815 this.pendingSegment_ = segmentInfo;
15816 this.trimBackBuffer_(segmentInfo);
15817
15818 if (typeof segmentInfo.timestampOffset === 'number') {
15819 if (this.transmuxer_) {
15820 this.transmuxer_.postMessage({
15821 action: 'clearAllMp4Captions'
15822 });
15823 }
15824 }
15825
15826 if (!this.hasEnoughInfoToLoad_()) {
15827 this.loadQueue_.push(function () {
15828 var buffered = _this5.buffered_();
15829
15830 if (typeof segmentInfo.timestampOffset === 'number') {
15831 // The timestamp offset needs to be regenerated, as the buffer most likely
15832 // changed since the function was added to the queue. This is expected, as the
15833 // load is usually pending the main loader appending new segments.
15834 //
15835 // Note also that the overrideCheck property is set to true. This is because
15836 // isPendingTimestampOffset is set back to false after the first set of the
15837 // timestamp offset (before it was added to the queue). But the presence of
15838 // timestamp offset as a property of segmentInfo serves as enough evidence that
15839 // it should be regenerated.
15840 segmentInfo.timestampOffset = timestampOffsetForSegment({
15841 segmentTimeline: segmentInfo.timeline,
15842 currentTimeline: _this5.currentTimeline_,
15843 startOfSegment: segmentInfo.startOfSegment,
15844 buffered: buffered,
15845 overrideCheck: true
15846 });
15847 }
15848
15849 delete segmentInfo.audioAppendStart;
15850
15851 var audioBuffered = _this5.sourceUpdater_.audioBuffered();
15852
15853 if (audioBuffered.length) {
15854 // Because the audio timestamp offset may have been changed by the main loader,
15855 // the audioAppendStart should be regenerated.
15856 //
15857 // Since the transmuxer is using the actual timing values, but the buffer is
15858 // adjusted by the timestamp offset, the value must be adjusted.
15859 segmentInfo.audioAppendStart = audioBuffered.end(audioBuffered.length - 1) - _this5.sourceUpdater_.audioTimestampOffset();
15860 }
15861
15862 _this5.updateTransmuxerAndRequestSegment_(segmentInfo);
15863 });
15864 return;
15865 }
15866
15867 this.updateTransmuxerAndRequestSegment_(segmentInfo);
15868 };
15869
15870 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
15871 var _this6 = this;
15872
15873 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
15874 // the transmuxer still needs to be updated before then.
15875 //
15876 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
15877 // offset must be passed to the transmuxer for stream correcting adjustments.
15878 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
15879 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
15880
15881 segmentInfo.gopsToAlignWith = [];
15882 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
15883
15884 this.transmuxer_.postMessage({
15885 action: 'reset'
15886 });
15887 this.transmuxer_.postMessage({
15888 action: 'setTimestampOffset',
15889 timestampOffset: segmentInfo.timestampOffset
15890 });
15891 }
15892
15893 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
15894 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist);
15895 var isWalkingForward = this.mediaIndex !== null;
15896 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
15897 // the first timeline
15898 segmentInfo.timeline > 0;
15899 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
15900 segmentInfo.abortRequests = mediaSegmentRequest({
15901 xhr: this.vhs_.xhr,
15902 xhrOptions: this.xhrOptions_,
15903 decryptionWorker: this.decrypter_,
15904 segment: simpleSegment,
15905 handlePartialData: this.handlePartialData_,
15906 abortFn: this.handleAbort_.bind(this),
15907 progressFn: this.handleProgress_.bind(this),
15908 trackInfoFn: this.handleTrackInfo_.bind(this),
15909 timingInfoFn: this.handleTimingInfo_.bind(this),
15910 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
15911 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
15912 captionsFn: this.handleCaptions_.bind(this),
15913 isEndOfTimeline: isEndOfTimeline,
15914 endedTimelineFn: function endedTimelineFn() {
15915 _this6.logger_('received endedtimeline callback');
15916 },
15917 id3Fn: this.handleId3_.bind(this),
15918 dataFn: this.handleData_.bind(this),
15919 doneFn: this.segmentRequestFinished_.bind(this)
15920 });
15921 }
15922 /**
15923 * trim the back buffer so that we don't have too much data
15924 * in the source buffer
15925 *
15926 * @private
15927 *
15928 * @param {Object} segmentInfo - the current segment
15929 */
15930 ;
15931
15932 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
15933 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
15934 // buffer and a very conservative "garbage collector"
15935 // We manually clear out the old buffer to ensure
15936 // we don't trigger the QuotaExceeded error
15937 // on the source buffer during subsequent appends
15938
15939 if (removeToTime > 0) {
15940 this.remove(0, removeToTime);
15941 }
15942 }
15943 /**
15944 * created a simplified copy of the segment object with just the
15945 * information necessary to perform the XHR and decryption
15946 *
15947 * @private
15948 *
15949 * @param {Object} segmentInfo - the current segment
15950 * @return {Object} a simplified segment object copy
15951 */
15952 ;
15953
15954 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
15955 var segment = segmentInfo.segment;
15956 var simpleSegment = {
15957 resolvedUri: segment.resolvedUri,
15958 byterange: segment.byterange,
15959 requestId: segmentInfo.requestId,
15960 transmuxer: segmentInfo.transmuxer,
15961 audioAppendStart: segmentInfo.audioAppendStart,
15962 gopsToAlignWith: segmentInfo.gopsToAlignWith
15963 };
15964 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
15965
15966 if (previousSegment && previousSegment.timeline === segment.timeline) {
15967 // The baseStartTime of a segment is used to handle rollover when probing the TS
15968 // segment to retrieve timing information. Since the probe only looks at the media's
15969 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
15970 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
15971 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
15972 // seconds of media time, so should be used here. The previous segment is used since
15973 // the end of the previous segment should represent the beginning of the current
15974 // segment, so long as they are on the same timeline.
15975 if (previousSegment.videoTimingInfo) {
15976 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
15977 } else if (previousSegment.audioTimingInfo) {
15978 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
15979 }
15980 }
15981
15982 if (segment.key) {
15983 // if the media sequence is greater than 2^32, the IV will be incorrect
15984 // assuming 10s segments, that would be about 1300 years
15985 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
15986 simpleSegment.key = this.segmentKey(segment.key);
15987 simpleSegment.key.iv = iv;
15988 }
15989
15990 if (segment.map) {
15991 simpleSegment.map = this.initSegmentForMap(segment.map);
15992 }
15993
15994 return simpleSegment;
15995 };
15996
15997 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
15998 // every request counts as a media request even if it has been aborted
15999 // or canceled due to a timeout
16000 this.mediaRequests += 1;
16001
16002 if (stats) {
16003 this.mediaBytesTransferred += stats.bytesReceived;
16004 this.mediaTransferDuration += stats.roundTripTime;
16005 }
16006 };
16007
16008 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
16009 // byteLength will be used for throughput, and should be based on bytes receieved,
16010 // which we only know at the end of the request and should reflect total bytes
16011 // downloaded rather than just bytes processed from components of the segment
16012 this.pendingSegment_.byteLength = stats.bytesReceived;
16013
16014 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
16015 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
16016 return;
16017 }
16018
16019 this.bandwidth = stats.bandwidth;
16020 this.roundTrip = stats.roundTripTime;
16021 };
16022
16023 _proto.handleTimeout_ = function handleTimeout_() {
16024 // although the VTT segment loader bandwidth isn't really used, it's good to
16025 // maintain functinality between segment loaders
16026 this.mediaRequestsTimedout += 1;
16027 this.bandwidth = 1;
16028 this.roundTrip = NaN;
16029 this.trigger('bandwidthupdate');
16030 }
16031 /**
16032 * Handle the callback from the segmentRequest function and set the
16033 * associated SegmentLoader state and errors if necessary
16034 *
16035 * @private
16036 */
16037 ;
16038
16039 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
16040 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
16041 // check the call queue directly since this function doesn't need to deal with any
16042 // data, and can continue even if the source buffers are not set up and we didn't get
16043 // any data from the segment
16044 if (this.callQueue_.length) {
16045 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
16046 return;
16047 }
16048
16049 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
16050
16051 if (!this.pendingSegment_) {
16052 return;
16053 } // the request was aborted and the SegmentLoader has already started
16054 // another request. this can happen when the timeout for an aborted
16055 // request triggers due to a limitation in the XHR library
16056 // do not count this as any sort of request or we risk double-counting
16057
16058
16059 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
16060 return;
16061 } // an error occurred from the active pendingSegment_ so reset everything
16062
16063
16064 if (error) {
16065 this.pendingSegment_ = null;
16066 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
16067
16068 if (error.code === REQUEST_ERRORS.ABORTED) {
16069 return;
16070 }
16071
16072 this.pause(); // the error is really just that at least one of the requests timed-out
16073 // set the bandwidth to a very low value and trigger an ABR switch to
16074 // take emergency action
16075
16076 if (error.code === REQUEST_ERRORS.TIMEOUT) {
16077 this.handleTimeout_();
16078 return;
16079 } // if control-flow has arrived here, then the error is real
16080 // emit an error event to blacklist the current playlist
16081
16082
16083 this.mediaRequestsErrored += 1;
16084 this.error(error);
16085 this.trigger('error');
16086 return;
16087 }
16088
16089 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
16090 // generated for ABR purposes
16091
16092 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
16093 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
16094
16095 if (result.gopInfo) {
16096 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
16097 } // Although we may have already started appending on progress, we shouldn't switch the
16098 // state away from loading until we are officially done loading the segment data.
16099
16100
16101 this.state = 'APPENDING'; // used for testing
16102
16103 this.trigger('appending');
16104 this.waitForAppendsToComplete_(segmentInfo);
16105 };
16106
16107 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
16108 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
16109
16110 if (timelineMapping !== null) {
16111 this.timeMapping_ = timelineMapping;
16112 }
16113 };
16114
16115 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
16116 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
16117 this.mediaSecondsLoaded += segment.end - segment.start;
16118 } else {
16119 this.mediaSecondsLoaded += segment.duration;
16120 }
16121 };
16122
16123 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
16124 if (timestampOffset === null) {
16125 return false;
16126 } // note that we're potentially using the same timestamp offset for both video and
16127 // audio
16128
16129
16130 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
16131 return true;
16132 }
16133
16134 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
16135 return true;
16136 }
16137
16138 return false;
16139 };
16140
16141 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref6) {
16142 var currentStart = _ref6.currentStart,
16143 playlist = _ref6.playlist,
16144 mediaIndex = _ref6.mediaIndex,
16145 firstVideoFrameTimeForData = _ref6.firstVideoFrameTimeForData,
16146 currentVideoTimestampOffset = _ref6.currentVideoTimestampOffset,
16147 useVideoTimingInfo = _ref6.useVideoTimingInfo,
16148 videoTimingInfo = _ref6.videoTimingInfo,
16149 audioTimingInfo = _ref6.audioTimingInfo;
16150
16151 if (typeof currentStart !== 'undefined') {
16152 // if start was set once, keep using it
16153 return currentStart;
16154 }
16155
16156 if (!useVideoTimingInfo) {
16157 return audioTimingInfo.start;
16158 }
16159
16160 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
16161 // within that segment. Since the transmuxer maintains a cache of incomplete data
16162 // from and/or the last frame seen, the start time may reflect a frame that starts
16163 // in the previous segment. Check for that case and ensure the start time is
16164 // accurate for the segment.
16165
16166 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
16167 return firstVideoFrameTimeForData;
16168 }
16169
16170 return videoTimingInfo.start;
16171 };
16172
16173 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
16174 if (!this.currentMediaInfo_) {
16175 this.error({
16176 message: 'No starting media returned, likely due to an unsupported media format.',
16177 blacklistDuration: Infinity
16178 });
16179 this.trigger('error');
16180 return;
16181 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
16182 // on each queue this loader is responsible for to ensure that the appends are
16183 // complete.
16184
16185
16186 var _this$currentMediaInf2 = this.currentMediaInfo_,
16187 hasAudio = _this$currentMediaInf2.hasAudio,
16188 hasVideo = _this$currentMediaInf2.hasVideo,
16189 isMuxed = _this$currentMediaInf2.isMuxed;
16190 var waitForVideo = this.loaderType_ === 'main' && hasVideo; // TODO: does this break partial support for muxed content?
16191
16192 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
16193 segmentInfo.waitingOnAppends = 0; // segments with no data
16194
16195 if (!segmentInfo.hasAppendedData_) {
16196 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
16197 // When there's no audio or video data in the segment, there's no audio or video
16198 // timing information.
16199 //
16200 // If there's no audio or video timing information, then the timestamp offset
16201 // can't be adjusted to the appropriate value for the transmuxer and source
16202 // buffers.
16203 //
16204 // Therefore, the next segment should be used to set the timestamp offset.
16205 this.isPendingTimestampOffset_ = true;
16206 } // override settings for metadata only segments
16207
16208
16209 segmentInfo.timingInfo = {
16210 start: 0
16211 };
16212 segmentInfo.waitingOnAppends++;
16213
16214 if (!this.isPendingTimestampOffset_) {
16215 // update the timestampoffset
16216 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
16217 // no video/audio data.
16218
16219 this.processMetadataQueue_();
16220 } // append is "done" instantly with no data.
16221
16222
16223 this.checkAppendsDone_(segmentInfo);
16224 return;
16225 } // Since source updater could call back synchronously, do the increments first.
16226
16227
16228 if (waitForVideo) {
16229 segmentInfo.waitingOnAppends++;
16230 }
16231
16232 if (waitForAudio) {
16233 segmentInfo.waitingOnAppends++;
16234 }
16235
16236 if (waitForVideo) {
16237 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
16238 }
16239
16240 if (waitForAudio) {
16241 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
16242 }
16243 };
16244
16245 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
16246 if (this.checkForAbort_(segmentInfo.requestId)) {
16247 return;
16248 }
16249
16250 segmentInfo.waitingOnAppends--;
16251
16252 if (segmentInfo.waitingOnAppends === 0) {
16253 this.handleAppendsDone_();
16254 }
16255 };
16256
16257 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
16258 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.currentMediaInfo_, trackInfo);
16259
16260 if (illegalMediaSwitchError) {
16261 this.error({
16262 message: illegalMediaSwitchError,
16263 blacklistDuration: Infinity
16264 });
16265 this.trigger('error');
16266 return true;
16267 }
16268
16269 return false;
16270 };
16271
16272 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
16273 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
16274 // priority, timing-wise, so we must wait
16275 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
16276 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
16277 this.loaderType_ !== 'main') {
16278 return;
16279 }
16280
16281 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
16282 // the timing info here comes from video. In the event that the audio is longer than
16283 // the video, this will trim the start of the audio.
16284 // This also trims any offset from 0 at the beginning of the media
16285
16286 segmentInfo.timestampOffset -= segmentInfo.timingInfo.start; // In the event that there are partial segment downloads, each will try to update the
16287 // timestamp offset. Retaining this bit of state prevents us from updating in the
16288 // future (within the same segment), however, there may be a better way to handle it.
16289
16290 segmentInfo.changedTimestampOffset = true;
16291
16292 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
16293 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
16294 didChange = true;
16295 }
16296
16297 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
16298 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
16299 didChange = true;
16300 }
16301
16302 if (didChange) {
16303 this.trigger('timestampoffset');
16304 }
16305 };
16306
16307 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
16308 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
16309 var useVideoTimingInfo = this.loaderType_ === 'main' && this.currentMediaInfo_.hasVideo;
16310 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
16311
16312 if (!prioritizedTimingInfo) {
16313 return;
16314 }
16315
16316 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
16317 // current example is the case of fmp4), so use the rough duration to calculate an
16318 // end time.
16319 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
16320 }
16321 /**
16322 * callback to run when appendBuffer is finished. detects if we are
16323 * in a good state to do things with the data we got, or if we need
16324 * to wait for more
16325 *
16326 * @private
16327 */
16328 ;
16329
16330 _proto.handleAppendsDone_ = function handleAppendsDone_() {
16331 // appendsdone can cause an abort
16332 if (this.pendingSegment_) {
16333 this.trigger('appendsdone');
16334 }
16335
16336 if (!this.pendingSegment_) {
16337 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
16338 // all appending cases?
16339
16340 if (!this.paused()) {
16341 this.monitorBuffer_();
16342 }
16343
16344 return;
16345 }
16346
16347 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
16348 // best to wait until all appends are done so we're sure that the primary media is
16349 // finished (and we have its end time).
16350
16351 this.updateTimingInfoEnd_(segmentInfo);
16352
16353 if (this.shouldSaveSegmentTimingInfo_) {
16354 // Timeline mappings should only be saved for the main loader. This is for multiple
16355 // reasons:
16356 //
16357 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
16358 // and the main loader try to save the timeline mapping, whichever comes later
16359 // will overwrite the first. In theory this is OK, as the mappings should be the
16360 // same, however, it breaks for (2)
16361 // 2) In the event of a live stream, the initial live point will make for a somewhat
16362 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
16363 // the mapping will be off for one of the streams, dependent on which one was
16364 // first saved (see (1)).
16365 // 3) Primary timing goes by video in VHS, so the mapping should be video.
16366 //
16367 // Since the audio loader will wait for the main loader to load the first segment,
16368 // the main loader will save the first timeline mapping, and ensure that there won't
16369 // be a case where audio loads two segments without saving a mapping (thus leading
16370 // to missing segment timing info).
16371 this.syncController_.saveSegmentTimingInfo({
16372 segmentInfo: segmentInfo,
16373 shouldSaveTimelineMapping: this.loaderType_ === 'main'
16374 });
16375 }
16376
16377 this.logger_(segmentInfoString(segmentInfo));
16378 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
16379
16380 if (segmentDurationMessage) {
16381 if (segmentDurationMessage.severity === 'warn') {
16382 videojs__default['default'].log.warn(segmentDurationMessage.message);
16383 } else {
16384 this.logger_(segmentDurationMessage.message);
16385 }
16386 }
16387
16388 this.recordThroughput_(segmentInfo);
16389 this.pendingSegment_ = null;
16390 this.state = 'READY'; // TODO minor, but for partial segment downloads, this can be done earlier to save
16391 // on bandwidth and download time
16392
16393 if (segmentInfo.isSyncRequest) {
16394 this.trigger('syncinfoupdate');
16395 return;
16396 }
16397
16398 this.addSegmentMetadataCue_(segmentInfo);
16399 this.fetchAtBuffer_ = true;
16400
16401 if (this.currentTimeline_ !== segmentInfo.timeline) {
16402 this.timelineChangeController_.lastTimelineChange({
16403 type: this.loaderType_,
16404 from: this.currentTimeline_,
16405 to: segmentInfo.timeline
16406 }); // If audio is not disabled, the main segment loader is responsible for updating
16407 // the audio timeline as well. If the content is video only, this won't have any
16408 // impact.
16409
16410 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
16411 this.timelineChangeController_.lastTimelineChange({
16412 type: 'audio',
16413 from: this.currentTimeline_,
16414 to: segmentInfo.timeline
16415 });
16416 }
16417 }
16418
16419 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
16420 // the following conditional otherwise it may consider this a bad "guess"
16421 // and attempt to resync when the post-update seekable window and live
16422 // point would mean that this was the perfect segment to fetch
16423
16424 this.trigger('syncinfoupdate');
16425 var segment = segmentInfo.segment; // If we previously appended a segment that ends more than 3 targetDurations before
16426 // the currentTime_ that means that our conservative guess was too conservative.
16427 // In that case, reset the loader state so that we try to use any information gained
16428 // from the previous request to create a new, more accurate, sync-point.
16429
16430 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
16431 this.resetEverything();
16432 return;
16433 }
16434
16435 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
16436 // and conservatively guess
16437
16438 if (isWalkingForward) {
16439 this.trigger('bandwidthupdate');
16440 }
16441
16442 this.trigger('progress');
16443 this.mediaIndex = segmentInfo.mediaIndex; // any time an update finishes and the last segment is in the
16444 // buffer, end the stream. this ensures the "ended" event will
16445 // fire if playback reaches that point.
16446
16447 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist)) {
16448 this.endOfStream();
16449 } // used for testing
16450
16451
16452 this.trigger('appended');
16453
16454 if (!this.paused()) {
16455 this.monitorBuffer_();
16456 }
16457 }
16458 /**
16459 * Records the current throughput of the decrypt, transmux, and append
16460 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
16461 * moving average of the throughput. `throughput.count` is the number of
16462 * data points in the average.
16463 *
16464 * @private
16465 * @param {Object} segmentInfo the object returned by loadSegment
16466 */
16467 ;
16468
16469 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
16470 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
16471 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
16472 return;
16473 }
16474
16475 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
16476 // by zero in the case where the throughput is ridiculously high
16477
16478 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
16479
16480 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
16481 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
16482
16483 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
16484 }
16485 /**
16486 * Adds a cue to the segment-metadata track with some metadata information about the
16487 * segment
16488 *
16489 * @private
16490 * @param {Object} segmentInfo
16491 * the object returned by loadSegment
16492 * @method addSegmentMetadataCue_
16493 */
16494 ;
16495
16496 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
16497 if (!this.segmentMetadataTrack_) {
16498 return;
16499 }
16500
16501 var segment = segmentInfo.segment;
16502 var start = segment.start;
16503 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
16504
16505 if (!finite(start) || !finite(end)) {
16506 return;
16507 }
16508
16509 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
16510 var Cue = window__default['default'].WebKitDataCue || window__default['default'].VTTCue;
16511 var value = {
16512 custom: segment.custom,
16513 dateTimeObject: segment.dateTimeObject,
16514 dateTimeString: segment.dateTimeString,
16515 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
16516 resolution: segmentInfo.playlist.attributes.RESOLUTION,
16517 codecs: segmentInfo.playlist.attributes.CODECS,
16518 byteLength: segmentInfo.byteLength,
16519 uri: segmentInfo.uri,
16520 timeline: segmentInfo.timeline,
16521 playlist: segmentInfo.playlist.id,
16522 start: start,
16523 end: end
16524 };
16525 var data = JSON.stringify(value);
16526 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
16527 // the differences of WebKitDataCue in safari and VTTCue in other browsers
16528
16529 cue.value = value;
16530 this.segmentMetadataTrack_.addCue(cue);
16531 };
16532
16533 return SegmentLoader;
16534}(videojs__default['default'].EventTarget);
16535
16536function noop() {}
16537
16538var toTitleCase = function toTitleCase(string) {
16539 if (typeof string !== 'string') {
16540 return string;
16541 }
16542
16543 return string.replace(/./, function (w) {
16544 return w.toUpperCase();
16545 });
16546};
16547
16548var bufferTypes = ['video', 'audio'];
16549
16550var _updating = function updating(type, sourceUpdater) {
16551 var sourceBuffer = sourceUpdater[type + "Buffer"];
16552 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
16553};
16554
16555var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
16556 for (var i = 0; i < queue.length; i++) {
16557 var queueEntry = queue[i];
16558
16559 if (queueEntry.type === 'mediaSource') {
16560 // If the next entry is a media source entry (uses multiple source buffers), block
16561 // processing to allow it to go through first.
16562 return null;
16563 }
16564
16565 if (queueEntry.type === type) {
16566 return i;
16567 }
16568 }
16569
16570 return null;
16571};
16572
16573var shiftQueue = function shiftQueue(type, sourceUpdater) {
16574 if (sourceUpdater.queue.length === 0) {
16575 return;
16576 }
16577
16578 var queueIndex = 0;
16579 var queueEntry = sourceUpdater.queue[queueIndex];
16580
16581 if (queueEntry.type === 'mediaSource') {
16582 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
16583 sourceUpdater.queue.shift();
16584 queueEntry.action(sourceUpdater);
16585
16586 if (queueEntry.doneFn) {
16587 queueEntry.doneFn();
16588 } // Only specific source buffer actions must wait for async updateend events. Media
16589 // Source actions process synchronously. Therefore, both audio and video source
16590 // buffers are now clear to process the next queue entries.
16591
16592
16593 shiftQueue('audio', sourceUpdater);
16594 shiftQueue('video', sourceUpdater);
16595 } // Media Source actions require both source buffers, so if the media source action
16596 // couldn't process yet (because one or both source buffers are busy), block other
16597 // queue actions until both are available and the media source action can process.
16598
16599
16600 return;
16601 }
16602
16603 if (type === 'mediaSource') {
16604 // If the queue was shifted by a media source action (this happens when pushing a
16605 // media source action onto the queue), then it wasn't from an updateend event from an
16606 // audio or video source buffer, so there's no change from previous state, and no
16607 // processing should be done.
16608 return;
16609 } // Media source queue entries don't need to consider whether the source updater is
16610 // started (i.e., source buffers are created) as they don't need the source buffers, but
16611 // source buffer queue entries do.
16612
16613
16614 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
16615 return;
16616 }
16617
16618 if (queueEntry.type !== type) {
16619 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
16620
16621 if (queueIndex === null) {
16622 // Either there's no queue entry that uses this source buffer type in the queue, or
16623 // there's a media source queue entry before the next entry of this type, in which
16624 // case wait for that action to process first.
16625 return;
16626 }
16627
16628 queueEntry = sourceUpdater.queue[queueIndex];
16629 }
16630
16631 sourceUpdater.queue.splice(queueIndex, 1);
16632 queueEntry.action(type, sourceUpdater);
16633
16634 if (!queueEntry.doneFn) {
16635 // synchronous operation, process next entry
16636 shiftQueue(type, sourceUpdater);
16637 return;
16638 } // asynchronous operation, so keep a record that this source buffer type is in use
16639
16640
16641 sourceUpdater.queuePending[type] = queueEntry;
16642};
16643
16644var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
16645 var buffer = sourceUpdater[type + "Buffer"];
16646 var titleType = toTitleCase(type);
16647
16648 if (!buffer) {
16649 return;
16650 }
16651
16652 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
16653 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
16654 sourceUpdater.codecs[type] = null;
16655 sourceUpdater[type + "Buffer"] = null;
16656};
16657
16658var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
16659 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
16660};
16661
16662var actions = {
16663 appendBuffer: function appendBuffer(bytes, segmentInfo) {
16664 return function (type, sourceUpdater) {
16665 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
16666 // or the media source does not contain this source buffer.
16667
16668 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
16669 return;
16670 }
16671
16672 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
16673 sourceBuffer.appendBuffer(bytes);
16674 };
16675 },
16676 remove: function remove(start, end) {
16677 return function (type, sourceUpdater) {
16678 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
16679 // or the media source does not contain this source buffer.
16680
16681 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
16682 return;
16683 }
16684
16685 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
16686 sourceBuffer.remove(start, end);
16687 };
16688 },
16689 timestampOffset: function timestampOffset(offset) {
16690 return function (type, sourceUpdater) {
16691 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
16692 // or the media source does not contain this source buffer.
16693
16694 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
16695 return;
16696 }
16697
16698 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
16699 sourceBuffer.timestampOffset = offset;
16700 };
16701 },
16702 callback: function callback(_callback) {
16703 return function (type, sourceUpdater) {
16704 _callback();
16705 };
16706 },
16707 endOfStream: function endOfStream(error) {
16708 return function (sourceUpdater) {
16709 if (sourceUpdater.mediaSource.readyState !== 'open') {
16710 return;
16711 }
16712
16713 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
16714
16715 try {
16716 sourceUpdater.mediaSource.endOfStream(error);
16717 } catch (e) {
16718 videojs__default['default'].log.warn('Failed to call media source endOfStream', e);
16719 }
16720 };
16721 },
16722 duration: function duration(_duration) {
16723 return function (sourceUpdater) {
16724 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
16725
16726 try {
16727 sourceUpdater.mediaSource.duration = _duration;
16728 } catch (e) {
16729 videojs__default['default'].log.warn('Failed to set media source duration', e);
16730 }
16731 };
16732 },
16733 abort: function abort() {
16734 return function (type, sourceUpdater) {
16735 if (sourceUpdater.mediaSource.readyState !== 'open') {
16736 return;
16737 }
16738
16739 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
16740 // or the media source does not contain this source buffer.
16741
16742 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
16743 return;
16744 }
16745
16746 sourceUpdater.logger_("calling abort on " + type + "Buffer");
16747
16748 try {
16749 sourceBuffer.abort();
16750 } catch (e) {
16751 videojs__default['default'].log.warn("Failed to abort on " + type + "Buffer", e);
16752 }
16753 };
16754 },
16755 addSourceBuffer: function addSourceBuffer(type, codec) {
16756 return function (sourceUpdater) {
16757 var titleType = toTitleCase(type);
16758 var mime = codecs_js.getMimeForCodec(codec);
16759 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
16760 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
16761 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
16762 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
16763 sourceUpdater.codecs[type] = codec;
16764 sourceUpdater[type + "Buffer"] = sourceBuffer;
16765 };
16766 },
16767 removeSourceBuffer: function removeSourceBuffer(type) {
16768 return function (sourceUpdater) {
16769 var sourceBuffer = sourceUpdater[type + "Buffer"];
16770 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
16771 // or the media source does not contain this source buffer.
16772
16773 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
16774 return;
16775 }
16776
16777 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
16778
16779 try {
16780 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
16781 } catch (e) {
16782 videojs__default['default'].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
16783 }
16784 };
16785 },
16786 changeType: function changeType(codec) {
16787 return function (type, sourceUpdater) {
16788 var sourceBuffer = sourceUpdater[type + "Buffer"];
16789 var mime = codecs_js.getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
16790 // or the media source does not contain this source buffer.
16791
16792 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
16793 return;
16794 } // do not update codec if we don't need to.
16795
16796
16797 if (sourceUpdater.codecs[type] === codec) {
16798 return;
16799 }
16800
16801 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
16802 sourceBuffer.changeType(mime);
16803 sourceUpdater.codecs[type] = codec;
16804 };
16805 }
16806};
16807
16808var pushQueue = function pushQueue(_ref) {
16809 var type = _ref.type,
16810 sourceUpdater = _ref.sourceUpdater,
16811 action = _ref.action,
16812 doneFn = _ref.doneFn,
16813 name = _ref.name;
16814 sourceUpdater.queue.push({
16815 type: type,
16816 action: action,
16817 doneFn: doneFn,
16818 name: name
16819 });
16820 shiftQueue(type, sourceUpdater);
16821};
16822
16823var onUpdateend = function onUpdateend(type, sourceUpdater) {
16824 return function (e) {
16825 // Although there should, in theory, be a pending action for any updateend receieved,
16826 // there are some actions that may trigger updateend events without set definitions in
16827 // the w3c spec. For instance, setting the duration on the media source may trigger
16828 // updateend events on source buffers. This does not appear to be in the spec. As such,
16829 // if we encounter an updateend without a corresponding pending action from our queue
16830 // for that source buffer type, process the next action.
16831 if (sourceUpdater.queuePending[type]) {
16832 var doneFn = sourceUpdater.queuePending[type].doneFn;
16833 sourceUpdater.queuePending[type] = null;
16834
16835 if (doneFn) {
16836 // if there's an error, report it
16837 doneFn(sourceUpdater[type + "Error_"]);
16838 }
16839 }
16840
16841 shiftQueue(type, sourceUpdater);
16842 };
16843};
16844/**
16845 * A queue of callbacks to be serialized and applied when a
16846 * MediaSource and its associated SourceBuffers are not in the
16847 * updating state. It is used by the segment loader to update the
16848 * underlying SourceBuffers when new data is loaded, for instance.
16849 *
16850 * @class SourceUpdater
16851 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
16852 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
16853 */
16854
16855
16856var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
16857 _inheritsLoose__default['default'](SourceUpdater, _videojs$EventTarget);
16858
16859 function SourceUpdater(mediaSource) {
16860 var _this;
16861
16862 _this = _videojs$EventTarget.call(this) || this;
16863 _this.mediaSource = mediaSource;
16864
16865 _this.sourceopenListener_ = function () {
16866 return shiftQueue('mediaSource', _assertThisInitialized__default['default'](_this));
16867 };
16868
16869 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
16870
16871 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
16872
16873 _this.audioTimestampOffset_ = 0;
16874 _this.videoTimestampOffset_ = 0;
16875 _this.queue = [];
16876 _this.queuePending = {
16877 audio: null,
16878 video: null
16879 };
16880 _this.delayedAudioAppendQueue_ = [];
16881 _this.videoAppendQueued_ = false;
16882 _this.codecs = {};
16883 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized__default['default'](_this));
16884 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized__default['default'](_this));
16885
16886 _this.onVideoError_ = function (e) {
16887 // used for debugging
16888 _this.videoError_ = e;
16889 };
16890
16891 _this.onAudioError_ = function (e) {
16892 // used for debugging
16893 _this.audioError_ = e;
16894 };
16895
16896 _this.createdSourceBuffers_ = false;
16897 _this.initializedEme_ = false;
16898 _this.triggeredReady_ = false;
16899 return _this;
16900 }
16901
16902 var _proto = SourceUpdater.prototype;
16903
16904 _proto.initializedEme = function initializedEme() {
16905 this.initializedEme_ = true;
16906 this.triggerReady();
16907 };
16908
16909 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
16910 // if false, likely waiting on one of the segment loaders to get enough data to create
16911 // source buffers
16912 return this.createdSourceBuffers_;
16913 };
16914
16915 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
16916 return this.initializedEme_;
16917 };
16918
16919 _proto.ready = function ready() {
16920 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
16921 };
16922
16923 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
16924 if (this.hasCreatedSourceBuffers()) {
16925 // already created them before
16926 return;
16927 } // the intial addOrChangeSourceBuffers will always be
16928 // two add buffers.
16929
16930
16931 this.addOrChangeSourceBuffers(codecs);
16932 this.createdSourceBuffers_ = true;
16933 this.trigger('createdsourcebuffers');
16934 this.triggerReady();
16935 };
16936
16937 _proto.triggerReady = function triggerReady() {
16938 // only allow ready to be triggered once, this prevents the case
16939 // where:
16940 // 1. we trigger createdsourcebuffers
16941 // 2. ie 11 synchronously initializates eme
16942 // 3. the synchronous initialization causes us to trigger ready
16943 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
16944 if (this.ready() && !this.triggeredReady_) {
16945 this.triggeredReady_ = true;
16946 this.trigger('ready');
16947 }
16948 }
16949 /**
16950 * Add a type of source buffer to the media source.
16951 *
16952 * @param {string} type
16953 * The type of source buffer to add.
16954 *
16955 * @param {string} codec
16956 * The codec to add the source buffer with.
16957 */
16958 ;
16959
16960 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
16961 pushQueue({
16962 type: 'mediaSource',
16963 sourceUpdater: this,
16964 action: actions.addSourceBuffer(type, codec),
16965 name: 'addSourceBuffer'
16966 });
16967 }
16968 /**
16969 * call abort on a source buffer.
16970 *
16971 * @param {string} type
16972 * The type of source buffer to call abort on.
16973 */
16974 ;
16975
16976 _proto.abort = function abort(type) {
16977 pushQueue({
16978 type: type,
16979 sourceUpdater: this,
16980 action: actions.abort(type),
16981 name: 'abort'
16982 });
16983 }
16984 /**
16985 * Call removeSourceBuffer and remove a specific type
16986 * of source buffer on the mediaSource.
16987 *
16988 * @param {string} type
16989 * The type of source buffer to remove.
16990 */
16991 ;
16992
16993 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
16994 if (!this.canRemoveSourceBuffer()) {
16995 videojs__default['default'].log.error('removeSourceBuffer is not supported!');
16996 return;
16997 }
16998
16999 pushQueue({
17000 type: 'mediaSource',
17001 sourceUpdater: this,
17002 action: actions.removeSourceBuffer(type),
17003 name: 'removeSourceBuffer'
17004 });
17005 }
17006 /**
17007 * Whether or not the removeSourceBuffer function is supported
17008 * on the mediaSource.
17009 *
17010 * @return {boolean}
17011 * if removeSourceBuffer can be called.
17012 */
17013 ;
17014
17015 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
17016 // IE reports that it supports removeSourceBuffer, but often throws
17017 // errors when attempting to use the function. So we report that it
17018 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
17019 // throws errors, so we report that it does not support this as well.
17020 return !videojs__default['default'].browser.IE_VERSION && !videojs__default['default'].browser.IS_FIREFOX && window__default['default'].MediaSource && window__default['default'].MediaSource.prototype && typeof window__default['default'].MediaSource.prototype.removeSourceBuffer === 'function';
17021 }
17022 /**
17023 * Whether or not the changeType function is supported
17024 * on our SourceBuffers.
17025 *
17026 * @return {boolean}
17027 * if changeType can be called.
17028 */
17029 ;
17030
17031 SourceUpdater.canChangeType = function canChangeType() {
17032 return window__default['default'].SourceBuffer && window__default['default'].SourceBuffer.prototype && typeof window__default['default'].SourceBuffer.prototype.changeType === 'function';
17033 }
17034 /**
17035 * Whether or not the changeType function is supported
17036 * on our SourceBuffers.
17037 *
17038 * @return {boolean}
17039 * if changeType can be called.
17040 */
17041 ;
17042
17043 _proto.canChangeType = function canChangeType() {
17044 return this.constructor.canChangeType();
17045 }
17046 /**
17047 * Call the changeType function on a source buffer, given the code and type.
17048 *
17049 * @param {string} type
17050 * The type of source buffer to call changeType on.
17051 *
17052 * @param {string} codec
17053 * The codec string to change type with on the source buffer.
17054 */
17055 ;
17056
17057 _proto.changeType = function changeType(type, codec) {
17058 if (!this.canChangeType()) {
17059 videojs__default['default'].log.error('changeType is not supported!');
17060 return;
17061 }
17062
17063 pushQueue({
17064 type: type,
17065 sourceUpdater: this,
17066 action: actions.changeType(codec),
17067 name: 'changeType'
17068 });
17069 }
17070 /**
17071 * Add source buffers with a codec or, if they are already created,
17072 * call changeType on source buffers using changeType.
17073 *
17074 * @param {Object} codecs
17075 * Codecs to switch to
17076 */
17077 ;
17078
17079 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
17080 var _this2 = this;
17081
17082 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
17083 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
17084 }
17085
17086 Object.keys(codecs).forEach(function (type) {
17087 var codec = codecs[type];
17088
17089 if (!_this2.hasCreatedSourceBuffers()) {
17090 return _this2.addSourceBuffer(type, codec);
17091 }
17092
17093 if (_this2.canChangeType()) {
17094 _this2.changeType(type, codec);
17095 }
17096 });
17097 }
17098 /**
17099 * Queue an update to append an ArrayBuffer.
17100 *
17101 * @param {MediaObject} object containing audioBytes and/or videoBytes
17102 * @param {Function} done the function to call when done
17103 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
17104 */
17105 ;
17106
17107 _proto.appendBuffer = function appendBuffer(options, doneFn) {
17108 var _this3 = this;
17109
17110 var segmentInfo = options.segmentInfo,
17111 type = options.type,
17112 bytes = options.bytes;
17113 this.processedAppend_ = true;
17114
17115 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
17116 this.delayedAudioAppendQueue_.push([options, doneFn]);
17117 this.logger_("delayed audio append of " + bytes.length + " until video append");
17118 return;
17119 }
17120
17121 pushQueue({
17122 type: type,
17123 sourceUpdater: this,
17124 action: actions.appendBuffer(bytes, segmentInfo || {
17125 mediaIndex: -1
17126 }),
17127 doneFn: doneFn,
17128 name: 'appendBuffer'
17129 });
17130
17131 if (type === 'video') {
17132 this.videoAppendQueued_ = true;
17133
17134 if (!this.delayedAudioAppendQueue_.length) {
17135 return;
17136 }
17137
17138 var queue = this.delayedAudioAppendQueue_.slice();
17139 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
17140 this.delayedAudioAppendQueue_.length = 0;
17141 queue.forEach(function (que) {
17142 _this3.appendBuffer.apply(_this3, que);
17143 });
17144 }
17145 }
17146 /**
17147 * Get the audio buffer's buffered timerange.
17148 *
17149 * @return {TimeRange}
17150 * The audio buffer's buffered time range
17151 */
17152 ;
17153
17154 _proto.audioBuffered = function audioBuffered() {
17155 // no media source/source buffer or it isn't in the media sources
17156 // source buffer list
17157 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
17158 return videojs__default['default'].createTimeRange();
17159 }
17160
17161 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default['default'].createTimeRange();
17162 }
17163 /**
17164 * Get the video buffer's buffered timerange.
17165 *
17166 * @return {TimeRange}
17167 * The video buffer's buffered time range
17168 */
17169 ;
17170
17171 _proto.videoBuffered = function videoBuffered() {
17172 // no media source/source buffer or it isn't in the media sources
17173 // source buffer list
17174 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
17175 return videojs__default['default'].createTimeRange();
17176 }
17177
17178 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default['default'].createTimeRange();
17179 }
17180 /**
17181 * Get a combined video/audio buffer's buffered timerange.
17182 *
17183 * @return {TimeRange}
17184 * the combined time range
17185 */
17186 ;
17187
17188 _proto.buffered = function buffered() {
17189 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
17190 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
17191
17192 if (audio && !video) {
17193 return this.audioBuffered();
17194 }
17195
17196 if (video && !audio) {
17197 return this.videoBuffered();
17198 }
17199
17200 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
17201 }
17202 /**
17203 * Add a callback to the queue that will set duration on the mediaSource.
17204 *
17205 * @param {number} duration
17206 * The duration to set
17207 *
17208 * @param {Function} [doneFn]
17209 * function to run after duration has been set.
17210 */
17211 ;
17212
17213 _proto.setDuration = function setDuration(duration, doneFn) {
17214 if (doneFn === void 0) {
17215 doneFn = noop;
17216 }
17217
17218 // In order to set the duration on the media source, it's necessary to wait for all
17219 // source buffers to no longer be updating. "If the updating attribute equals true on
17220 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
17221 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
17222 pushQueue({
17223 type: 'mediaSource',
17224 sourceUpdater: this,
17225 action: actions.duration(duration),
17226 name: 'duration',
17227 doneFn: doneFn
17228 });
17229 }
17230 /**
17231 * Add a mediaSource endOfStream call to the queue
17232 *
17233 * @param {Error} [error]
17234 * Call endOfStream with an error
17235 *
17236 * @param {Function} [doneFn]
17237 * A function that should be called when the
17238 * endOfStream call has finished.
17239 */
17240 ;
17241
17242 _proto.endOfStream = function endOfStream(error, doneFn) {
17243 if (error === void 0) {
17244 error = null;
17245 }
17246
17247 if (doneFn === void 0) {
17248 doneFn = noop;
17249 }
17250
17251 if (typeof error !== 'string') {
17252 error = undefined;
17253 } // In order to set the duration on the media source, it's necessary to wait for all
17254 // source buffers to no longer be updating. "If the updating attribute equals true on
17255 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
17256 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
17257
17258
17259 pushQueue({
17260 type: 'mediaSource',
17261 sourceUpdater: this,
17262 action: actions.endOfStream(error),
17263 name: 'endOfStream',
17264 doneFn: doneFn
17265 });
17266 }
17267 /**
17268 * Queue an update to remove a time range from the buffer.
17269 *
17270 * @param {number} start where to start the removal
17271 * @param {number} end where to end the removal
17272 * @param {Function} [done=noop] optional callback to be executed when the remove
17273 * operation is complete
17274 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
17275 */
17276 ;
17277
17278 _proto.removeAudio = function removeAudio(start, end, done) {
17279 if (done === void 0) {
17280 done = noop;
17281 }
17282
17283 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
17284 done();
17285 return;
17286 }
17287
17288 pushQueue({
17289 type: 'audio',
17290 sourceUpdater: this,
17291 action: actions.remove(start, end),
17292 doneFn: done,
17293 name: 'remove'
17294 });
17295 }
17296 /**
17297 * Queue an update to remove a time range from the buffer.
17298 *
17299 * @param {number} start where to start the removal
17300 * @param {number} end where to end the removal
17301 * @param {Function} [done=noop] optional callback to be executed when the remove
17302 * operation is complete
17303 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
17304 */
17305 ;
17306
17307 _proto.removeVideo = function removeVideo(start, end, done) {
17308 if (done === void 0) {
17309 done = noop;
17310 }
17311
17312 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
17313 done();
17314 return;
17315 }
17316
17317 pushQueue({
17318 type: 'video',
17319 sourceUpdater: this,
17320 action: actions.remove(start, end),
17321 doneFn: done,
17322 name: 'remove'
17323 });
17324 }
17325 /**
17326 * Whether the underlying sourceBuffer is updating or not
17327 *
17328 * @return {boolean} the updating status of the SourceBuffer
17329 */
17330 ;
17331
17332 _proto.updating = function updating() {
17333 // the audio/video source buffer is updating
17334 if (_updating('audio', this) || _updating('video', this)) {
17335 return true;
17336 }
17337
17338 return false;
17339 }
17340 /**
17341 * Set/get the timestampoffset on the audio SourceBuffer
17342 *
17343 * @return {number} the timestamp offset
17344 */
17345 ;
17346
17347 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
17348 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
17349 this.audioTimestampOffset_ !== offset) {
17350 pushQueue({
17351 type: 'audio',
17352 sourceUpdater: this,
17353 action: actions.timestampOffset(offset),
17354 name: 'timestampOffset'
17355 });
17356 this.audioTimestampOffset_ = offset;
17357 }
17358
17359 return this.audioTimestampOffset_;
17360 }
17361 /**
17362 * Set/get the timestampoffset on the video SourceBuffer
17363 *
17364 * @return {number} the timestamp offset
17365 */
17366 ;
17367
17368 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
17369 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
17370 this.videoTimestampOffset !== offset) {
17371 pushQueue({
17372 type: 'video',
17373 sourceUpdater: this,
17374 action: actions.timestampOffset(offset),
17375 name: 'timestampOffset'
17376 });
17377 this.videoTimestampOffset_ = offset;
17378 }
17379
17380 return this.videoTimestampOffset_;
17381 }
17382 /**
17383 * Add a function to the queue that will be called
17384 * when it is its turn to run in the audio queue.
17385 *
17386 * @param {Function} callback
17387 * The callback to queue.
17388 */
17389 ;
17390
17391 _proto.audioQueueCallback = function audioQueueCallback(callback) {
17392 if (!this.audioBuffer) {
17393 return;
17394 }
17395
17396 pushQueue({
17397 type: 'audio',
17398 sourceUpdater: this,
17399 action: actions.callback(callback),
17400 name: 'callback'
17401 });
17402 }
17403 /**
17404 * Add a function to the queue that will be called
17405 * when it is its turn to run in the video queue.
17406 *
17407 * @param {Function} callback
17408 * The callback to queue.
17409 */
17410 ;
17411
17412 _proto.videoQueueCallback = function videoQueueCallback(callback) {
17413 if (!this.videoBuffer) {
17414 return;
17415 }
17416
17417 pushQueue({
17418 type: 'video',
17419 sourceUpdater: this,
17420 action: actions.callback(callback),
17421 name: 'callback'
17422 });
17423 }
17424 /**
17425 * dispose of the source updater and the underlying sourceBuffer
17426 */
17427 ;
17428
17429 _proto.dispose = function dispose() {
17430 var _this4 = this;
17431
17432 this.trigger('dispose');
17433 bufferTypes.forEach(function (type) {
17434 _this4.abort(type);
17435
17436 if (_this4.canRemoveSourceBuffer()) {
17437 _this4.removeSourceBuffer(type);
17438 } else {
17439 _this4[type + "QueueCallback"](function () {
17440 return cleanupBuffer(type, _this4);
17441 });
17442 }
17443 });
17444 this.videoAppendQueued_ = false;
17445 this.delayedAudioAppendQueue_.length = 0;
17446
17447 if (this.sourceopenListener_) {
17448 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
17449 }
17450
17451 this.off();
17452 };
17453
17454 return SourceUpdater;
17455}(videojs__default['default'].EventTarget);
17456
17457var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
17458 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
17459};
17460
17461var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
17462 return char.charCodeAt(0);
17463}));
17464/**
17465 * An object that manages segment loading and appending.
17466 *
17467 * @class VTTSegmentLoader
17468 * @param {Object} options required and optional options
17469 * @extends videojs.EventTarget
17470 */
17471
17472var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
17473 _inheritsLoose__default['default'](VTTSegmentLoader, _SegmentLoader);
17474
17475 function VTTSegmentLoader(settings, options) {
17476 var _this;
17477
17478 if (options === void 0) {
17479 options = {};
17480 }
17481
17482 _this = _SegmentLoader.call(this, settings, options) || this; // VTT can't handle partial data
17483
17484 _this.handlePartialData_ = false; // SegmentLoader requires a MediaSource be specified or it will throw an error;
17485 // however, VTTSegmentLoader has no need of a media source, so delete the reference
17486
17487 _this.mediaSource_ = null;
17488 _this.subtitlesTrack_ = null;
17489 _this.loaderType_ = 'subtitle';
17490 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
17491 // the sync controller leads to improper behavior.
17492
17493 _this.shouldSaveSegmentTimingInfo_ = false;
17494 return _this;
17495 }
17496
17497 var _proto = VTTSegmentLoader.prototype;
17498
17499 _proto.createTransmuxer_ = function createTransmuxer_() {
17500 // don't need to transmux any subtitles
17501 return null;
17502 }
17503 /**
17504 * Indicates which time ranges are buffered
17505 *
17506 * @return {TimeRange}
17507 * TimeRange object representing the current buffered ranges
17508 */
17509 ;
17510
17511 _proto.buffered_ = function buffered_() {
17512 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
17513 return videojs__default['default'].createTimeRanges();
17514 }
17515
17516 var cues = this.subtitlesTrack_.cues;
17517 var start = cues[0].startTime;
17518 var end = cues[cues.length - 1].startTime;
17519 return videojs__default['default'].createTimeRanges([[start, end]]);
17520 }
17521 /**
17522 * Gets and sets init segment for the provided map
17523 *
17524 * @param {Object} map
17525 * The map object representing the init segment to get or set
17526 * @param {boolean=} set
17527 * If true, the init segment for the provided map should be saved
17528 * @return {Object}
17529 * map object for desired init segment
17530 */
17531 ;
17532
17533 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
17534 if (set === void 0) {
17535 set = false;
17536 }
17537
17538 if (!map) {
17539 return null;
17540 }
17541
17542 var id = initSegmentId(map);
17543 var storedMap = this.initSegments_[id];
17544
17545 if (set && !storedMap && map.bytes) {
17546 // append WebVTT line terminators to the media initialization segment if it exists
17547 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
17548 // requires two or more WebVTT line terminators between the WebVTT header and the
17549 // rest of the file
17550 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
17551 var combinedSegment = new Uint8Array(combinedByteLength);
17552 combinedSegment.set(map.bytes);
17553 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
17554 this.initSegments_[id] = storedMap = {
17555 resolvedUri: map.resolvedUri,
17556 byterange: map.byterange,
17557 bytes: combinedSegment
17558 };
17559 }
17560
17561 return storedMap || map;
17562 }
17563 /**
17564 * Returns true if all configuration required for loading is present, otherwise false.
17565 *
17566 * @return {boolean} True if the all configuration is ready for loading
17567 * @private
17568 */
17569 ;
17570
17571 _proto.couldBeginLoading_ = function couldBeginLoading_() {
17572 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
17573 }
17574 /**
17575 * Once all the starting parameters have been specified, begin
17576 * operation. This method should only be invoked from the INIT
17577 * state.
17578 *
17579 * @private
17580 */
17581 ;
17582
17583 _proto.init_ = function init_() {
17584 this.state = 'READY';
17585 this.resetEverything();
17586 return this.monitorBuffer_();
17587 }
17588 /**
17589 * Set a subtitle track on the segment loader to add subtitles to
17590 *
17591 * @param {TextTrack=} track
17592 * The text track to add loaded subtitles to
17593 * @return {TextTrack}
17594 * Returns the subtitles track
17595 */
17596 ;
17597
17598 _proto.track = function track(_track) {
17599 if (typeof _track === 'undefined') {
17600 return this.subtitlesTrack_;
17601 }
17602
17603 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
17604 // buffering now
17605
17606 if (this.state === 'INIT' && this.couldBeginLoading_()) {
17607 this.init_();
17608 }
17609
17610 return this.subtitlesTrack_;
17611 }
17612 /**
17613 * Remove any data in the source buffer between start and end times
17614 *
17615 * @param {number} start - the start time of the region to remove from the buffer
17616 * @param {number} end - the end time of the region to remove from the buffer
17617 */
17618 ;
17619
17620 _proto.remove = function remove(start, end) {
17621 removeCuesFromTrack(start, end, this.subtitlesTrack_);
17622 }
17623 /**
17624 * fill the buffer with segements unless the sourceBuffers are
17625 * currently updating
17626 *
17627 * Note: this function should only ever be called by monitorBuffer_
17628 * and never directly
17629 *
17630 * @private
17631 */
17632 ;
17633
17634 _proto.fillBuffer_ = function fillBuffer_() {
17635 var _this2 = this;
17636
17637 if (!this.syncPoint_) {
17638 this.syncPoint_ = this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
17639 } // see if we need to begin loading immediately
17640
17641
17642 var segmentInfo = this.checkBuffer_(this.buffered_(), this.playlist_, this.mediaIndex, this.hasPlayed_(), this.currentTime_(), this.syncPoint_);
17643 segmentInfo = this.skipEmptySegments_(segmentInfo);
17644
17645 if (!segmentInfo) {
17646 return;
17647 }
17648
17649 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
17650 // We don't have the timestamp offset that we need to sync subtitles.
17651 // Rerun on a timestamp offset or user interaction.
17652 var checkTimestampOffset = function checkTimestampOffset() {
17653 _this2.state = 'READY';
17654
17655 if (!_this2.paused()) {
17656 // if not paused, queue a buffer check as soon as possible
17657 _this2.monitorBuffer_();
17658 }
17659 };
17660
17661 this.syncController_.one('timestampoffset', checkTimestampOffset);
17662 this.state = 'WAITING_ON_TIMELINE';
17663 return;
17664 }
17665
17666 this.loadSegment_(segmentInfo);
17667 }
17668 /**
17669 * Prevents the segment loader from requesting segments we know contain no subtitles
17670 * by walking forward until we find the next segment that we don't know whether it is
17671 * empty or not.
17672 *
17673 * @param {Object} segmentInfo
17674 * a segment info object that describes the current segment
17675 * @return {Object}
17676 * a segment info object that describes the current segment
17677 */
17678 ;
17679
17680 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
17681 while (segmentInfo && segmentInfo.segment.empty) {
17682 segmentInfo = this.generateSegmentInfo_(segmentInfo.playlist, segmentInfo.mediaIndex + 1, segmentInfo.startOfSegment + segmentInfo.duration, segmentInfo.isSyncRequest);
17683 }
17684
17685 return segmentInfo;
17686 };
17687
17688 _proto.stopForError = function stopForError(error) {
17689 this.error(error);
17690 this.state = 'READY';
17691 this.pause();
17692 this.trigger('error');
17693 }
17694 /**
17695 * append a decrypted segement to the SourceBuffer through a SourceUpdater
17696 *
17697 * @private
17698 */
17699 ;
17700
17701 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
17702 var _this3 = this;
17703
17704 if (!this.subtitlesTrack_) {
17705 this.state = 'READY';
17706 return;
17707 }
17708
17709 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
17710
17711 if (!this.pendingSegment_) {
17712 this.state = 'READY';
17713 this.mediaRequestsAborted += 1;
17714 return;
17715 }
17716
17717 if (error) {
17718 if (error.code === REQUEST_ERRORS.TIMEOUT) {
17719 this.handleTimeout_();
17720 }
17721
17722 if (error.code === REQUEST_ERRORS.ABORTED) {
17723 this.mediaRequestsAborted += 1;
17724 } else {
17725 this.mediaRequestsErrored += 1;
17726 }
17727
17728 this.stopForError(error);
17729 return;
17730 }
17731
17732 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
17733 // maintain functionality between segment loaders
17734
17735 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
17736 this.state = 'APPENDING'; // used for tests
17737
17738 this.trigger('appending');
17739 var segment = segmentInfo.segment;
17740
17741 if (segment.map) {
17742 segment.map.bytes = simpleSegment.map.bytes;
17743 }
17744
17745 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
17746
17747 if (typeof window__default['default'].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
17748 var loadHandler;
17749
17750 var errorHandler = function errorHandler() {
17751 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
17752
17753 _this3.stopForError({
17754 message: 'Error loading vtt.js'
17755 });
17756
17757 return;
17758 };
17759
17760 loadHandler = function loadHandler() {
17761 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
17762
17763 _this3.segmentRequestFinished_(error, simpleSegment, result);
17764 };
17765
17766 this.state = 'WAITING_ON_VTTJS';
17767 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
17768 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
17769 return;
17770 }
17771
17772 segment.requested = true;
17773
17774 try {
17775 this.parseVTTCues_(segmentInfo);
17776 } catch (e) {
17777 this.stopForError({
17778 message: e.message
17779 });
17780 return;
17781 }
17782
17783 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
17784
17785 if (segmentInfo.cues.length) {
17786 segmentInfo.timingInfo = {
17787 start: segmentInfo.cues[0].startTime,
17788 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
17789 };
17790 } else {
17791 segmentInfo.timingInfo = {
17792 start: segmentInfo.startOfSegment,
17793 end: segmentInfo.startOfSegment + segmentInfo.duration
17794 };
17795 }
17796
17797 if (segmentInfo.isSyncRequest) {
17798 this.trigger('syncinfoupdate');
17799 this.pendingSegment_ = null;
17800 this.state = 'READY';
17801 return;
17802 }
17803
17804 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
17805 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
17806 // the subtitle track
17807
17808 segmentInfo.cues.forEach(function (cue) {
17809 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window__default['default'].VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
17810 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
17811 // cues to have identical time-intervals, but if the text is also identical
17812 // we can safely assume it is a duplicate that can be removed (ex. when a cue
17813 // "overlaps" VTT segments)
17814
17815 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
17816 this.handleAppendsDone_();
17817 };
17818
17819 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
17820 // that we do not support here.
17821 };
17822
17823 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
17824 }
17825 /**
17826 * Uses the WebVTT parser to parse the segment response
17827 *
17828 * @param {Object} segmentInfo
17829 * a segment info object that describes the current segment
17830 * @private
17831 */
17832 ;
17833
17834 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
17835 var decoder;
17836 var decodeBytesToString = false;
17837
17838 if (typeof window__default['default'].TextDecoder === 'function') {
17839 decoder = new window__default['default'].TextDecoder('utf8');
17840 } else {
17841 decoder = window__default['default'].WebVTT.StringDecoder();
17842 decodeBytesToString = true;
17843 }
17844
17845 var parser = new window__default['default'].WebVTT.Parser(window__default['default'], window__default['default'].vttjs, decoder);
17846 segmentInfo.cues = [];
17847 segmentInfo.timestampmap = {
17848 MPEGTS: 0,
17849 LOCAL: 0
17850 };
17851 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
17852
17853 parser.ontimestampmap = function (map) {
17854 segmentInfo.timestampmap = map;
17855 };
17856
17857 parser.onparsingerror = function (error) {
17858 videojs__default['default'].log.warn('Error encountered when parsing cues: ' + error.message);
17859 };
17860
17861 if (segmentInfo.segment.map) {
17862 var mapData = segmentInfo.segment.map.bytes;
17863
17864 if (decodeBytesToString) {
17865 mapData = uint8ToUtf8(mapData);
17866 }
17867
17868 parser.parse(mapData);
17869 }
17870
17871 var segmentData = segmentInfo.bytes;
17872
17873 if (decodeBytesToString) {
17874 segmentData = uint8ToUtf8(segmentData);
17875 }
17876
17877 parser.parse(segmentData);
17878 parser.flush();
17879 }
17880 /**
17881 * Updates the start and end times of any cues parsed by the WebVTT parser using
17882 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
17883 * from the SyncController
17884 *
17885 * @param {Object} segmentInfo
17886 * a segment info object that describes the current segment
17887 * @param {Object} mappingObj
17888 * object containing a mapping from TS to media time
17889 * @param {Object} playlist
17890 * the playlist object containing the segment
17891 * @private
17892 */
17893 ;
17894
17895 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
17896 var segment = segmentInfo.segment;
17897
17898 if (!mappingObj) {
17899 // If the sync controller does not have a mapping of TS to Media Time for the
17900 // timeline, then we don't have enough information to update the cue
17901 // start/end times
17902 return;
17903 }
17904
17905 if (!segmentInfo.cues.length) {
17906 // If there are no cues, we also do not have enough information to figure out
17907 // segment timing. Mark that the segment contains no cues so we don't re-request
17908 // an empty segment.
17909 segment.empty = true;
17910 return;
17911 }
17912
17913 var timestampmap = segmentInfo.timestampmap;
17914 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
17915 segmentInfo.cues.forEach(function (cue) {
17916 // First convert cue time to TS time using the timestamp-map provided within the vtt
17917 cue.startTime += diff;
17918 cue.endTime += diff;
17919 });
17920
17921 if (!playlist.syncInfo) {
17922 var firstStart = segmentInfo.cues[0].startTime;
17923 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
17924 playlist.syncInfo = {
17925 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
17926 time: Math.min(firstStart, lastStart - segment.duration)
17927 };
17928 }
17929 };
17930
17931 return VTTSegmentLoader;
17932}(SegmentLoader);
17933
17934/**
17935 * @file ad-cue-tags.js
17936 */
17937/**
17938 * Searches for an ad cue that overlaps with the given mediaTime
17939 *
17940 * @param {Object} track
17941 * the track to find the cue for
17942 *
17943 * @param {number} mediaTime
17944 * the time to find the cue at
17945 *
17946 * @return {Object|null}
17947 * the found cue or null
17948 */
17949
17950var findAdCue = function findAdCue(track, mediaTime) {
17951 var cues = track.cues;
17952
17953 for (var i = 0; i < cues.length; i++) {
17954 var cue = cues[i];
17955
17956 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
17957 return cue;
17958 }
17959 }
17960
17961 return null;
17962};
17963var updateAdCues = function updateAdCues(media, track, offset) {
17964 if (offset === void 0) {
17965 offset = 0;
17966 }
17967
17968 if (!media.segments) {
17969 return;
17970 }
17971
17972 var mediaTime = offset;
17973 var cue;
17974
17975 for (var i = 0; i < media.segments.length; i++) {
17976 var segment = media.segments[i];
17977
17978 if (!cue) {
17979 // Since the cues will span for at least the segment duration, adding a fudge
17980 // factor of half segment duration will prevent duplicate cues from being
17981 // created when timing info is not exact (e.g. cue start time initialized
17982 // at 10.006677, but next call mediaTime is 10.003332 )
17983 cue = findAdCue(track, mediaTime + segment.duration / 2);
17984 }
17985
17986 if (cue) {
17987 if ('cueIn' in segment) {
17988 // Found a CUE-IN so end the cue
17989 cue.endTime = mediaTime;
17990 cue.adEndTime = mediaTime;
17991 mediaTime += segment.duration;
17992 cue = null;
17993 continue;
17994 }
17995
17996 if (mediaTime < cue.endTime) {
17997 // Already processed this mediaTime for this cue
17998 mediaTime += segment.duration;
17999 continue;
18000 } // otherwise extend cue until a CUE-IN is found
18001
18002
18003 cue.endTime += segment.duration;
18004 } else {
18005 if ('cueOut' in segment) {
18006 cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
18007 cue.adStartTime = mediaTime; // Assumes tag format to be
18008 // #EXT-X-CUE-OUT:30
18009
18010 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
18011 track.addCue(cue);
18012 }
18013
18014 if ('cueOutCont' in segment) {
18015 // Entered into the middle of an ad cue
18016 // Assumes tag formate to be
18017 // #EXT-X-CUE-OUT-CONT:10/30
18018 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
18019 adOffset = _segment$cueOutCont$s[0],
18020 adTotal = _segment$cueOutCont$s[1];
18021
18022 cue = new window__default['default'].VTTCue(mediaTime, mediaTime + segment.duration, '');
18023 cue.adStartTime = mediaTime - adOffset;
18024 cue.adEndTime = cue.adStartTime + adTotal;
18025 track.addCue(cue);
18026 }
18027 }
18028
18029 mediaTime += segment.duration;
18030 }
18031};
18032
18033var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
18034// the equivalence display-time 0 === segment-index 0
18035{
18036 name: 'VOD',
18037 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18038 if (duration !== Infinity) {
18039 var syncPoint = {
18040 time: 0,
18041 segmentIndex: 0
18042 };
18043 return syncPoint;
18044 }
18045
18046 return null;
18047 }
18048}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
18049{
18050 name: 'ProgramDateTime',
18051 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18052 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
18053 return null;
18054 }
18055
18056 var segments = playlist.segments || [];
18057 var syncPoint = null;
18058 var lastDistance = null;
18059 currentTime = currentTime || 0;
18060
18061 for (var i = 0; i < segments.length; i++) {
18062 var segment = segments[i];
18063 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
18064
18065 if (!datetimeMapping) {
18066 continue;
18067 }
18068
18069 if (segment.dateTimeObject) {
18070 var segmentTime = segment.dateTimeObject.getTime() / 1000;
18071 var segmentStart = segmentTime + datetimeMapping;
18072 var distance = Math.abs(currentTime - segmentStart); // Once the distance begins to increase, or if distance is 0, we have passed
18073 // currentTime and can stop looking for better candidates
18074
18075 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
18076 break;
18077 }
18078
18079 lastDistance = distance;
18080 syncPoint = {
18081 time: segmentStart,
18082 segmentIndex: i
18083 };
18084 }
18085 }
18086
18087 return syncPoint;
18088 }
18089}, // Stategy "Segment": We have a known time mapping for a timeline and a
18090// segment in the current timeline with timing data
18091{
18092 name: 'Segment',
18093 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18094 var segments = playlist.segments || [];
18095 var syncPoint = null;
18096 var lastDistance = null;
18097 currentTime = currentTime || 0;
18098
18099 for (var i = 0; i < segments.length; i++) {
18100 var segment = segments[i];
18101
18102 if (segment.timeline === currentTimeline && typeof segment.start !== 'undefined') {
18103 var distance = Math.abs(currentTime - segment.start); // Once the distance begins to increase, we have passed
18104 // currentTime and can stop looking for better candidates
18105
18106 if (lastDistance !== null && lastDistance < distance) {
18107 break;
18108 }
18109
18110 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
18111 lastDistance = distance;
18112 syncPoint = {
18113 time: segment.start,
18114 segmentIndex: i
18115 };
18116 }
18117 }
18118 }
18119
18120 return syncPoint;
18121 }
18122}, // Stategy "Discontinuity": We have a discontinuity with a known
18123// display-time
18124{
18125 name: 'Discontinuity',
18126 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18127 var syncPoint = null;
18128 currentTime = currentTime || 0;
18129
18130 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
18131 var lastDistance = null;
18132
18133 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
18134 var segmentIndex = playlist.discontinuityStarts[i];
18135 var discontinuity = playlist.discontinuitySequence + i + 1;
18136 var discontinuitySync = syncController.discontinuities[discontinuity];
18137
18138 if (discontinuitySync) {
18139 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
18140 // currentTime and can stop looking for better candidates
18141
18142 if (lastDistance !== null && lastDistance < distance) {
18143 break;
18144 }
18145
18146 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
18147 lastDistance = distance;
18148 syncPoint = {
18149 time: discontinuitySync.time,
18150 segmentIndex: segmentIndex
18151 };
18152 }
18153 }
18154 }
18155 }
18156
18157 return syncPoint;
18158 }
18159}, // Stategy "Playlist": We have a playlist with a known mapping of
18160// segment index to display time
18161{
18162 name: 'Playlist',
18163 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
18164 if (playlist.syncInfo) {
18165 var syncPoint = {
18166 time: playlist.syncInfo.time,
18167 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence
18168 };
18169 return syncPoint;
18170 }
18171
18172 return null;
18173 }
18174}];
18175
18176var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
18177 _inheritsLoose__default['default'](SyncController, _videojs$EventTarget);
18178
18179 function SyncController(options) {
18180 var _this;
18181
18182 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
18183
18184 _this.timelines = [];
18185 _this.discontinuities = [];
18186 _this.timelineToDatetimeMappings = {};
18187 _this.logger_ = logger('SyncController');
18188 return _this;
18189 }
18190 /**
18191 * Find a sync-point for the playlist specified
18192 *
18193 * A sync-point is defined as a known mapping from display-time to
18194 * a segment-index in the current playlist.
18195 *
18196 * @param {Playlist} playlist
18197 * The playlist that needs a sync-point
18198 * @param {number} duration
18199 * Duration of the MediaSource (Infinite if playing a live source)
18200 * @param {number} currentTimeline
18201 * The last timeline from which a segment was loaded
18202 * @return {Object}
18203 * A sync-point object
18204 */
18205
18206
18207 var _proto = SyncController.prototype;
18208
18209 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
18210 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
18211
18212 if (!syncPoints.length) {
18213 // Signal that we need to attempt to get a sync-point manually
18214 // by fetching a segment in the playlist and constructing
18215 // a sync-point from that information
18216 return null;
18217 } // Now find the sync-point that is closest to the currentTime because
18218 // that should result in the most accurate guess about which segment
18219 // to fetch
18220
18221
18222 return this.selectSyncPoint_(syncPoints, {
18223 key: 'time',
18224 value: currentTime
18225 });
18226 }
18227 /**
18228 * Calculate the amount of time that has expired off the playlist during playback
18229 *
18230 * @param {Playlist} playlist
18231 * Playlist object to calculate expired from
18232 * @param {number} duration
18233 * Duration of the MediaSource (Infinity if playling a live source)
18234 * @return {number|null}
18235 * The amount of time that has expired off the playlist during playback. Null
18236 * if no sync-points for the playlist can be found.
18237 */
18238 ;
18239
18240 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
18241 if (!playlist || !playlist.segments) {
18242 return null;
18243 }
18244
18245 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
18246
18247 if (!syncPoints.length) {
18248 return null;
18249 }
18250
18251 var syncPoint = this.selectSyncPoint_(syncPoints, {
18252 key: 'segmentIndex',
18253 value: 0
18254 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
18255 // duration from index 0 to syncPoint.segmentIndex instead of adding.
18256
18257 if (syncPoint.segmentIndex > 0) {
18258 syncPoint.time *= -1;
18259 }
18260
18261 return Math.abs(syncPoint.time + sumDurations(playlist, syncPoint.segmentIndex, 0));
18262 }
18263 /**
18264 * Runs each sync-point strategy and returns a list of sync-points returned by the
18265 * strategies
18266 *
18267 * @private
18268 * @param {Playlist} playlist
18269 * The playlist that needs a sync-point
18270 * @param {number} duration
18271 * Duration of the MediaSource (Infinity if playing a live source)
18272 * @param {number} currentTimeline
18273 * The last timeline from which a segment was loaded
18274 * @return {Array}
18275 * A list of sync-point objects
18276 */
18277 ;
18278
18279 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
18280 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
18281
18282 for (var i = 0; i < syncPointStrategies.length; i++) {
18283 var strategy = syncPointStrategies[i];
18284 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
18285
18286 if (syncPoint) {
18287 syncPoint.strategy = strategy.name;
18288 syncPoints.push({
18289 strategy: strategy.name,
18290 syncPoint: syncPoint
18291 });
18292 }
18293 }
18294
18295 return syncPoints;
18296 }
18297 /**
18298 * Selects the sync-point nearest the specified target
18299 *
18300 * @private
18301 * @param {Array} syncPoints
18302 * List of sync-points to select from
18303 * @param {Object} target
18304 * Object specifying the property and value we are targeting
18305 * @param {string} target.key
18306 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
18307 * @param {number} target.value
18308 * The value to target for the specified key.
18309 * @return {Object}
18310 * The sync-point nearest the target
18311 */
18312 ;
18313
18314 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
18315 var bestSyncPoint = syncPoints[0].syncPoint;
18316 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
18317 var bestStrategy = syncPoints[0].strategy;
18318
18319 for (var i = 1; i < syncPoints.length; i++) {
18320 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
18321
18322 if (newDistance < bestDistance) {
18323 bestDistance = newDistance;
18324 bestSyncPoint = syncPoints[i].syncPoint;
18325 bestStrategy = syncPoints[i].strategy;
18326 }
18327 }
18328
18329 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex + "]"));
18330 return bestSyncPoint;
18331 }
18332 /**
18333 * Save any meta-data present on the segments when segments leave
18334 * the live window to the playlist to allow for synchronization at the
18335 * playlist level later.
18336 *
18337 * @param {Playlist} oldPlaylist - The previous active playlist
18338 * @param {Playlist} newPlaylist - The updated and most current playlist
18339 */
18340 ;
18341
18342 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
18343 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // When a segment expires from the playlist and it has a start time
18344 // save that information as a possible sync-point reference in future
18345
18346 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
18347 var lastRemovedSegment = oldPlaylist.segments[i];
18348
18349 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
18350 newPlaylist.syncInfo = {
18351 mediaSequence: oldPlaylist.mediaSequence + i,
18352 time: lastRemovedSegment.start
18353 };
18354 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
18355 this.trigger('syncinfoupdate');
18356 break;
18357 }
18358 }
18359 }
18360 /**
18361 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
18362 * before segments start to load.
18363 *
18364 * @param {Playlist} playlist - The currently active playlist
18365 */
18366 ;
18367
18368 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
18369 // It's possible for the playlist to be updated before playback starts, meaning time
18370 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
18371 // crossed, then the old time zero mapping (for the prior timeline) would be retained
18372 // unless the mappings are cleared.
18373 this.timelineToDatetimeMappings = {};
18374
18375 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
18376 var firstSegment = playlist.segments[0];
18377 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
18378 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
18379 }
18380 }
18381 /**
18382 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
18383 * based on the latest timing information.
18384 *
18385 * @param {Object} options
18386 * Options object
18387 * @param {SegmentInfo} options.segmentInfo
18388 * The current active request information
18389 * @param {boolean} options.shouldSaveTimelineMapping
18390 * If there's a timeline change, determines if the timeline mapping should be
18391 * saved for timeline mapping and program date time mappings.
18392 */
18393 ;
18394
18395 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
18396 var segmentInfo = _ref.segmentInfo,
18397 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
18398 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
18399 var segment = segmentInfo.segment;
18400
18401 if (didCalculateSegmentTimeMapping) {
18402 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
18403 // now with segment timing information
18404
18405 if (!segmentInfo.playlist.syncInfo) {
18406 segmentInfo.playlist.syncInfo = {
18407 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
18408 time: segment.start
18409 };
18410 }
18411 }
18412
18413 var dateTime = segment.dateTimeObject;
18414
18415 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
18416 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
18417 }
18418 };
18419
18420 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
18421 if (typeof this.timelines[timeline] === 'undefined') {
18422 return null;
18423 }
18424
18425 return this.timelines[timeline].time;
18426 };
18427
18428 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
18429 if (typeof this.timelines[timeline] === 'undefined') {
18430 return null;
18431 }
18432
18433 return this.timelines[timeline].mapping;
18434 }
18435 /**
18436 * Use the "media time" for a segment to generate a mapping to "display time" and
18437 * save that display time to the segment.
18438 *
18439 * @private
18440 * @param {SegmentInfo} segmentInfo
18441 * The current active request information
18442 * @param {Object} timingInfo
18443 * The start and end time of the current segment in "media time"
18444 * @param {boolean} shouldSaveTimelineMapping
18445 * If there's a timeline change, determines if the timeline mapping should be
18446 * saved in timelines.
18447 * @return {boolean}
18448 * Returns false if segment time mapping could not be calculated
18449 */
18450 ;
18451
18452 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
18453 var segment = segmentInfo.segment;
18454 var mappingObj = this.timelines[segmentInfo.timeline];
18455
18456 if (typeof segmentInfo.timestampOffset === 'number') {
18457 mappingObj = {
18458 time: segmentInfo.startOfSegment,
18459 mapping: segmentInfo.startOfSegment - timingInfo.start
18460 };
18461
18462 if (shouldSaveTimelineMapping) {
18463 this.timelines[segmentInfo.timeline] = mappingObj;
18464 this.trigger('timestampoffset');
18465 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
18466 }
18467
18468 segment.start = segmentInfo.startOfSegment;
18469 segment.end = timingInfo.end + mappingObj.mapping;
18470 } else if (mappingObj) {
18471 segment.start = timingInfo.start + mappingObj.mapping;
18472 segment.end = timingInfo.end + mappingObj.mapping;
18473 } else {
18474 return false;
18475 }
18476
18477 return true;
18478 }
18479 /**
18480 * Each time we have discontinuity in the playlist, attempt to calculate the location
18481 * in display of the start of the discontinuity and save that. We also save an accuracy
18482 * value so that we save values with the most accuracy (closest to 0.)
18483 *
18484 * @private
18485 * @param {SegmentInfo} segmentInfo - The current active request information
18486 */
18487 ;
18488
18489 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
18490 var playlist = segmentInfo.playlist;
18491 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
18492 // the start of the range and it's accuracy is 0 (greater accuracy values
18493 // mean more approximation)
18494
18495 if (segment.discontinuity) {
18496 this.discontinuities[segment.timeline] = {
18497 time: segment.start,
18498 accuracy: 0
18499 };
18500 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
18501 // Search for future discontinuities that we can provide better timing
18502 // information for and save that information for sync purposes
18503 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
18504 var segmentIndex = playlist.discontinuityStarts[i];
18505 var discontinuity = playlist.discontinuitySequence + i + 1;
18506 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
18507 var accuracy = Math.abs(mediaIndexDiff);
18508
18509 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
18510 var time = void 0;
18511
18512 if (mediaIndexDiff < 0) {
18513 time = segment.start - sumDurations(playlist, segmentInfo.mediaIndex, segmentIndex);
18514 } else {
18515 time = segment.end + sumDurations(playlist, segmentInfo.mediaIndex + 1, segmentIndex);
18516 }
18517
18518 this.discontinuities[discontinuity] = {
18519 time: time,
18520 accuracy: accuracy
18521 };
18522 }
18523 }
18524 }
18525 };
18526
18527 _proto.dispose = function dispose() {
18528 this.trigger('dispose');
18529 this.off();
18530 };
18531
18532 return SyncController;
18533}(videojs__default['default'].EventTarget);
18534
18535/**
18536 * The TimelineChangeController acts as a source for segment loaders to listen for and
18537 * keep track of latest and pending timeline changes. This is useful to ensure proper
18538 * sync, as each loader may need to make a consideration for what timeline the other
18539 * loader is on before making changes which could impact the other loader's media.
18540 *
18541 * @class TimelineChangeController
18542 * @extends videojs.EventTarget
18543 */
18544
18545var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
18546 _inheritsLoose__default['default'](TimelineChangeController, _videojs$EventTarget);
18547
18548 function TimelineChangeController() {
18549 var _this;
18550
18551 _this = _videojs$EventTarget.call(this) || this;
18552 _this.pendingTimelineChanges_ = {};
18553 _this.lastTimelineChanges_ = {};
18554 return _this;
18555 }
18556
18557 var _proto = TimelineChangeController.prototype;
18558
18559 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
18560 this.pendingTimelineChanges_[type] = null;
18561 this.trigger('pendingtimelinechange');
18562 };
18563
18564 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
18565 var type = _ref.type,
18566 from = _ref.from,
18567 to = _ref.to;
18568
18569 if (typeof from === 'number' && typeof to === 'number') {
18570 this.pendingTimelineChanges_[type] = {
18571 type: type,
18572 from: from,
18573 to: to
18574 };
18575 this.trigger('pendingtimelinechange');
18576 }
18577
18578 return this.pendingTimelineChanges_[type];
18579 };
18580
18581 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
18582 var type = _ref2.type,
18583 from = _ref2.from,
18584 to = _ref2.to;
18585
18586 if (typeof from === 'number' && typeof to === 'number') {
18587 this.lastTimelineChanges_[type] = {
18588 type: type,
18589 from: from,
18590 to: to
18591 };
18592 delete this.pendingTimelineChanges_[type];
18593 this.trigger('timelinechange');
18594 }
18595
18596 return this.lastTimelineChanges_[type];
18597 };
18598
18599 _proto.dispose = function dispose() {
18600 this.trigger('dispose');
18601 this.pendingTimelineChanges_ = {};
18602 this.lastTimelineChanges_ = {};
18603 this.off();
18604 };
18605
18606 return TimelineChangeController;
18607}(videojs__default['default'].EventTarget);
18608
18609/* rollup-plugin-worker-factory start for worker!/Users/gkatsevman/p/http-streaming-release/src/decrypter-worker.js */
18610var workerCode$1 = transform(getWorkerString(function () {
18611
18612 function _defineProperties(target, props) {
18613 for (var i = 0; i < props.length; i++) {
18614 var descriptor = props[i];
18615 descriptor.enumerable = descriptor.enumerable || false;
18616 descriptor.configurable = true;
18617 if ("value" in descriptor) descriptor.writable = true;
18618 Object.defineProperty(target, descriptor.key, descriptor);
18619 }
18620 }
18621
18622 function _createClass(Constructor, protoProps, staticProps) {
18623 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
18624 if (staticProps) _defineProperties(Constructor, staticProps);
18625 return Constructor;
18626 }
18627
18628 var createClass = _createClass;
18629
18630 function _inheritsLoose(subClass, superClass) {
18631 subClass.prototype = Object.create(superClass.prototype);
18632 subClass.prototype.constructor = subClass;
18633 subClass.__proto__ = superClass;
18634 }
18635
18636 var inheritsLoose = _inheritsLoose;
18637 /**
18638 * @file stream.js
18639 */
18640
18641 /**
18642 * A lightweight readable stream implemention that handles event dispatching.
18643 *
18644 * @class Stream
18645 */
18646
18647 var Stream = /*#__PURE__*/function () {
18648 function Stream() {
18649 this.listeners = {};
18650 }
18651 /**
18652 * Add a listener for a specified event type.
18653 *
18654 * @param {string} type the event name
18655 * @param {Function} listener the callback to be invoked when an event of
18656 * the specified type occurs
18657 */
18658
18659
18660 var _proto = Stream.prototype;
18661
18662 _proto.on = function on(type, listener) {
18663 if (!this.listeners[type]) {
18664 this.listeners[type] = [];
18665 }
18666
18667 this.listeners[type].push(listener);
18668 }
18669 /**
18670 * Remove a listener for a specified event type.
18671 *
18672 * @param {string} type the event name
18673 * @param {Function} listener a function previously registered for this
18674 * type of event through `on`
18675 * @return {boolean} if we could turn it off or not
18676 */
18677 ;
18678
18679 _proto.off = function off(type, listener) {
18680 if (!this.listeners[type]) {
18681 return false;
18682 }
18683
18684 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
18685 // In Video.js we slice listener functions
18686 // on trigger so that it does not mess up the order
18687 // while we loop through.
18688 //
18689 // Here we slice on off so that the loop in trigger
18690 // can continue using it's old reference to loop without
18691 // messing up the order.
18692
18693 this.listeners[type] = this.listeners[type].slice(0);
18694 this.listeners[type].splice(index, 1);
18695 return index > -1;
18696 }
18697 /**
18698 * Trigger an event of the specified type on this stream. Any additional
18699 * arguments to this function are passed as parameters to event listeners.
18700 *
18701 * @param {string} type the event name
18702 */
18703 ;
18704
18705 _proto.trigger = function trigger(type) {
18706 var callbacks = this.listeners[type];
18707
18708 if (!callbacks) {
18709 return;
18710 } // Slicing the arguments on every invocation of this method
18711 // can add a significant amount of overhead. Avoid the
18712 // intermediate object creation for the common case of a
18713 // single callback argument
18714
18715
18716 if (arguments.length === 2) {
18717 var length = callbacks.length;
18718
18719 for (var i = 0; i < length; ++i) {
18720 callbacks[i].call(this, arguments[1]);
18721 }
18722 } else {
18723 var args = Array.prototype.slice.call(arguments, 1);
18724 var _length = callbacks.length;
18725
18726 for (var _i = 0; _i < _length; ++_i) {
18727 callbacks[_i].apply(this, args);
18728 }
18729 }
18730 }
18731 /**
18732 * Destroys the stream and cleans up.
18733 */
18734 ;
18735
18736 _proto.dispose = function dispose() {
18737 this.listeners = {};
18738 }
18739 /**
18740 * Forwards all `data` events on this stream to the destination stream. The
18741 * destination stream should provide a method `push` to receive the data
18742 * events as they arrive.
18743 *
18744 * @param {Stream} destination the stream that will receive all `data` events
18745 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
18746 */
18747 ;
18748
18749 _proto.pipe = function pipe(destination) {
18750 this.on('data', function (data) {
18751 destination.push(data);
18752 });
18753 };
18754
18755 return Stream;
18756 }();
18757 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
18758
18759 /**
18760 * Returns the subarray of a Uint8Array without PKCS#7 padding.
18761 *
18762 * @param padded {Uint8Array} unencrypted bytes that have been padded
18763 * @return {Uint8Array} the unpadded bytes
18764 * @see http://tools.ietf.org/html/rfc5652
18765 */
18766
18767
18768 function unpad(padded) {
18769 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
18770 }
18771 /*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
18772
18773 /**
18774 * @file aes.js
18775 *
18776 * This file contains an adaptation of the AES decryption algorithm
18777 * from the Standford Javascript Cryptography Library. That work is
18778 * covered by the following copyright and permissions notice:
18779 *
18780 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
18781 * All rights reserved.
18782 *
18783 * Redistribution and use in source and binary forms, with or without
18784 * modification, are permitted provided that the following conditions are
18785 * met:
18786 *
18787 * 1. Redistributions of source code must retain the above copyright
18788 * notice, this list of conditions and the following disclaimer.
18789 *
18790 * 2. Redistributions in binary form must reproduce the above
18791 * copyright notice, this list of conditions and the following
18792 * disclaimer in the documentation and/or other materials provided
18793 * with the distribution.
18794 *
18795 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
18796 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18797 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
18798 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
18799 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18800 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18801 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
18802 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
18803 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
18804 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
18805 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
18806 *
18807 * The views and conclusions contained in the software and documentation
18808 * are those of the authors and should not be interpreted as representing
18809 * official policies, either expressed or implied, of the authors.
18810 */
18811
18812 /**
18813 * Expand the S-box tables.
18814 *
18815 * @private
18816 */
18817
18818
18819 var precompute = function precompute() {
18820 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
18821 var encTable = tables[0];
18822 var decTable = tables[1];
18823 var sbox = encTable[4];
18824 var sboxInv = decTable[4];
18825 var i;
18826 var x;
18827 var xInv;
18828 var d = [];
18829 var th = [];
18830 var x2;
18831 var x4;
18832 var x8;
18833 var s;
18834 var tEnc;
18835 var tDec; // Compute double and third tables
18836
18837 for (i = 0; i < 256; i++) {
18838 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
18839 }
18840
18841 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
18842 // Compute sbox
18843 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
18844 s = s >> 8 ^ s & 255 ^ 99;
18845 sbox[x] = s;
18846 sboxInv[s] = x; // Compute MixColumns
18847
18848 x8 = d[x4 = d[x2 = d[x]]];
18849 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
18850 tEnc = d[s] * 0x101 ^ s * 0x1010100;
18851
18852 for (i = 0; i < 4; i++) {
18853 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
18854 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
18855 }
18856 } // Compactify. Considerable speedup on Firefox.
18857
18858
18859 for (i = 0; i < 5; i++) {
18860 encTable[i] = encTable[i].slice(0);
18861 decTable[i] = decTable[i].slice(0);
18862 }
18863
18864 return tables;
18865 };
18866
18867 var aesTables = null;
18868 /**
18869 * Schedule out an AES key for both encryption and decryption. This
18870 * is a low-level class. Use a cipher mode to do bulk encryption.
18871 *
18872 * @class AES
18873 * @param key {Array} The key as an array of 4, 6 or 8 words.
18874 */
18875
18876 var AES = /*#__PURE__*/function () {
18877 function AES(key) {
18878 /**
18879 * The expanded S-box and inverse S-box tables. These will be computed
18880 * on the client so that we don't have to send them down the wire.
18881 *
18882 * There are two tables, _tables[0] is for encryption and
18883 * _tables[1] is for decryption.
18884 *
18885 * The first 4 sub-tables are the expanded S-box with MixColumns. The
18886 * last (_tables[01][4]) is the S-box itself.
18887 *
18888 * @private
18889 */
18890 // if we have yet to precompute the S-box tables
18891 // do so now
18892 if (!aesTables) {
18893 aesTables = precompute();
18894 } // then make a copy of that object for use
18895
18896
18897 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
18898 var i;
18899 var j;
18900 var tmp;
18901 var sbox = this._tables[0][4];
18902 var decTable = this._tables[1];
18903 var keyLen = key.length;
18904 var rcon = 1;
18905
18906 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
18907 throw new Error('Invalid aes key size');
18908 }
18909
18910 var encKey = key.slice(0);
18911 var decKey = [];
18912 this._key = [encKey, decKey]; // schedule encryption keys
18913
18914 for (i = keyLen; i < 4 * keyLen + 28; i++) {
18915 tmp = encKey[i - 1]; // apply sbox
18916
18917 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
18918 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
18919
18920 if (i % keyLen === 0) {
18921 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
18922 rcon = rcon << 1 ^ (rcon >> 7) * 283;
18923 }
18924 }
18925
18926 encKey[i] = encKey[i - keyLen] ^ tmp;
18927 } // schedule decryption keys
18928
18929
18930 for (j = 0; i; j++, i--) {
18931 tmp = encKey[j & 3 ? i : i - 4];
18932
18933 if (i <= 4 || j < 4) {
18934 decKey[j] = tmp;
18935 } else {
18936 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
18937 }
18938 }
18939 }
18940 /**
18941 * Decrypt 16 bytes, specified as four 32-bit words.
18942 *
18943 * @param {number} encrypted0 the first word to decrypt
18944 * @param {number} encrypted1 the second word to decrypt
18945 * @param {number} encrypted2 the third word to decrypt
18946 * @param {number} encrypted3 the fourth word to decrypt
18947 * @param {Int32Array} out the array to write the decrypted words
18948 * into
18949 * @param {number} offset the offset into the output array to start
18950 * writing results
18951 * @return {Array} The plaintext.
18952 */
18953
18954
18955 var _proto = AES.prototype;
18956
18957 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
18958 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
18959
18960 var a = encrypted0 ^ key[0];
18961 var b = encrypted3 ^ key[1];
18962 var c = encrypted2 ^ key[2];
18963 var d = encrypted1 ^ key[3];
18964 var a2;
18965 var b2;
18966 var c2; // key.length === 2 ?
18967
18968 var nInnerRounds = key.length / 4 - 2;
18969 var i;
18970 var kIndex = 4;
18971 var table = this._tables[1]; // load up the tables
18972
18973 var table0 = table[0];
18974 var table1 = table[1];
18975 var table2 = table[2];
18976 var table3 = table[3];
18977 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
18978
18979 for (i = 0; i < nInnerRounds; i++) {
18980 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
18981 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
18982 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
18983 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
18984 kIndex += 4;
18985 a = a2;
18986 b = b2;
18987 c = c2;
18988 } // Last round.
18989
18990
18991 for (i = 0; i < 4; i++) {
18992 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
18993 a2 = a;
18994 a = b;
18995 b = c;
18996 c = d;
18997 d = a2;
18998 }
18999 };
19000
19001 return AES;
19002 }();
19003 /**
19004 * A wrapper around the Stream class to use setTimeout
19005 * and run stream "jobs" Asynchronously
19006 *
19007 * @class AsyncStream
19008 * @extends Stream
19009 */
19010
19011
19012 var AsyncStream = /*#__PURE__*/function (_Stream) {
19013 inheritsLoose(AsyncStream, _Stream);
19014
19015 function AsyncStream() {
19016 var _this;
19017
19018 _this = _Stream.call(this, Stream) || this;
19019 _this.jobs = [];
19020 _this.delay = 1;
19021 _this.timeout_ = null;
19022 return _this;
19023 }
19024 /**
19025 * process an async job
19026 *
19027 * @private
19028 */
19029
19030
19031 var _proto = AsyncStream.prototype;
19032
19033 _proto.processJob_ = function processJob_() {
19034 this.jobs.shift()();
19035
19036 if (this.jobs.length) {
19037 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
19038 } else {
19039 this.timeout_ = null;
19040 }
19041 }
19042 /**
19043 * push a job into the stream
19044 *
19045 * @param {Function} job the job to push into the stream
19046 */
19047 ;
19048
19049 _proto.push = function push(job) {
19050 this.jobs.push(job);
19051
19052 if (!this.timeout_) {
19053 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
19054 }
19055 };
19056
19057 return AsyncStream;
19058 }(Stream);
19059 /**
19060 * Convert network-order (big-endian) bytes into their little-endian
19061 * representation.
19062 */
19063
19064
19065 var ntoh = function ntoh(word) {
19066 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
19067 };
19068 /**
19069 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
19070 *
19071 * @param {Uint8Array} encrypted the encrypted bytes
19072 * @param {Uint32Array} key the bytes of the decryption key
19073 * @param {Uint32Array} initVector the initialization vector (IV) to
19074 * use for the first round of CBC.
19075 * @return {Uint8Array} the decrypted bytes
19076 *
19077 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
19078 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
19079 * @see https://tools.ietf.org/html/rfc2315
19080 */
19081
19082
19083 var decrypt = function decrypt(encrypted, key, initVector) {
19084 // word-level access to the encrypted bytes
19085 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
19086 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
19087
19088 var decrypted = new Uint8Array(encrypted.byteLength);
19089 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
19090 // decrypted data
19091
19092 var init0;
19093 var init1;
19094 var init2;
19095 var init3;
19096 var encrypted0;
19097 var encrypted1;
19098 var encrypted2;
19099 var encrypted3; // iteration variable
19100
19101 var wordIx; // pull out the words of the IV to ensure we don't modify the
19102 // passed-in reference and easier access
19103
19104 init0 = initVector[0];
19105 init1 = initVector[1];
19106 init2 = initVector[2];
19107 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
19108 // to each decrypted block
19109
19110 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
19111 // convert big-endian (network order) words into little-endian
19112 // (javascript order)
19113 encrypted0 = ntoh(encrypted32[wordIx]);
19114 encrypted1 = ntoh(encrypted32[wordIx + 1]);
19115 encrypted2 = ntoh(encrypted32[wordIx + 2]);
19116 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
19117
19118 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
19119 // plaintext
19120
19121 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
19122 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
19123 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
19124 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
19125
19126 init0 = encrypted0;
19127 init1 = encrypted1;
19128 init2 = encrypted2;
19129 init3 = encrypted3;
19130 }
19131
19132 return decrypted;
19133 };
19134 /**
19135 * The `Decrypter` class that manages decryption of AES
19136 * data through `AsyncStream` objects and the `decrypt`
19137 * function
19138 *
19139 * @param {Uint8Array} encrypted the encrypted bytes
19140 * @param {Uint32Array} key the bytes of the decryption key
19141 * @param {Uint32Array} initVector the initialization vector (IV) to
19142 * @param {Function} done the function to run when done
19143 * @class Decrypter
19144 */
19145
19146
19147 var Decrypter = /*#__PURE__*/function () {
19148 function Decrypter(encrypted, key, initVector, done) {
19149 var step = Decrypter.STEP;
19150 var encrypted32 = new Int32Array(encrypted.buffer);
19151 var decrypted = new Uint8Array(encrypted.byteLength);
19152 var i = 0;
19153 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
19154
19155 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
19156
19157 for (i = step; i < encrypted32.length; i += step) {
19158 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
19159 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
19160 } // invoke the done() callback when everything is finished
19161
19162
19163 this.asyncStream_.push(function () {
19164 // remove pkcs#7 padding from the decrypted bytes
19165 done(null, unpad(decrypted));
19166 });
19167 }
19168 /**
19169 * a getter for step the maximum number of bytes to process at one time
19170 *
19171 * @return {number} the value of step 32000
19172 */
19173
19174
19175 var _proto = Decrypter.prototype;
19176 /**
19177 * @private
19178 */
19179
19180 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
19181 return function () {
19182 var bytes = decrypt(encrypted, key, initVector);
19183 decrypted.set(bytes, encrypted.byteOffset);
19184 };
19185 };
19186
19187 createClass(Decrypter, null, [{
19188 key: "STEP",
19189 get: function get() {
19190 // 4 * 8000;
19191 return 32000;
19192 }
19193 }]);
19194 return Decrypter;
19195 }();
19196 /**
19197 * @file bin-utils.js
19198 */
19199
19200 /**
19201 * Creates an object for sending to a web worker modifying properties that are TypedArrays
19202 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
19203 *
19204 * @param {Object} message
19205 * Object of properties and values to send to the web worker
19206 * @return {Object}
19207 * Modified message with TypedArray values expanded
19208 * @function createTransferableMessage
19209 */
19210
19211
19212 var createTransferableMessage = function createTransferableMessage(message) {
19213 var transferable = {};
19214 Object.keys(message).forEach(function (key) {
19215 var value = message[key];
19216
19217 if (ArrayBuffer.isView(value)) {
19218 transferable[key] = {
19219 bytes: value.buffer,
19220 byteOffset: value.byteOffset,
19221 byteLength: value.byteLength
19222 };
19223 } else {
19224 transferable[key] = value;
19225 }
19226 });
19227 return transferable;
19228 };
19229 /* global self */
19230
19231 /**
19232 * Our web worker interface so that things can talk to aes-decrypter
19233 * that will be running in a web worker. the scope is passed to this by
19234 * webworkify.
19235 */
19236
19237
19238 self.onmessage = function (event) {
19239 var data = event.data;
19240 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
19241 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
19242 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
19243 /* eslint-disable no-new, handle-callback-err */
19244
19245 new Decrypter(encrypted, key, iv, function (err, bytes) {
19246 self.postMessage(createTransferableMessage({
19247 source: data.source,
19248 decrypted: bytes
19249 }), [bytes.buffer]);
19250 });
19251 /* eslint-enable */
19252 };
19253}));
19254var Decrypter = factory(workerCode$1);
19255/* rollup-plugin-worker-factory end for worker!/Users/gkatsevman/p/http-streaming-release/src/decrypter-worker.js */
19256
19257/**
19258 * Convert the properties of an HLS track into an audioTrackKind.
19259 *
19260 * @private
19261 */
19262
19263var audioTrackKind_ = function audioTrackKind_(properties) {
19264 var kind = properties.default ? 'main' : 'alternative';
19265
19266 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
19267 kind = 'main-desc';
19268 }
19269
19270 return kind;
19271};
19272/**
19273 * Pause provided segment loader and playlist loader if active
19274 *
19275 * @param {SegmentLoader} segmentLoader
19276 * SegmentLoader to pause
19277 * @param {Object} mediaType
19278 * Active media type
19279 * @function stopLoaders
19280 */
19281
19282
19283var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
19284 segmentLoader.abort();
19285 segmentLoader.pause();
19286
19287 if (mediaType && mediaType.activePlaylistLoader) {
19288 mediaType.activePlaylistLoader.pause();
19289 mediaType.activePlaylistLoader = null;
19290 }
19291};
19292/**
19293 * Start loading provided segment loader and playlist loader
19294 *
19295 * @param {PlaylistLoader} playlistLoader
19296 * PlaylistLoader to start loading
19297 * @param {Object} mediaType
19298 * Active media type
19299 * @function startLoaders
19300 */
19301
19302var startLoaders = function startLoaders(playlistLoader, mediaType) {
19303 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
19304 // playlist loader
19305 mediaType.activePlaylistLoader = playlistLoader;
19306 playlistLoader.load();
19307};
19308/**
19309 * Returns a function to be called when the media group changes. It performs a
19310 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
19311 * change of group is merely a rendition switch of the same content at another encoding,
19312 * rather than a change of content, such as switching audio from English to Spanish.
19313 *
19314 * @param {string} type
19315 * MediaGroup type
19316 * @param {Object} settings
19317 * Object containing required information for media groups
19318 * @return {Function}
19319 * Handler for a non-destructive resync of SegmentLoader when the active media
19320 * group changes.
19321 * @function onGroupChanged
19322 */
19323
19324var onGroupChanged = function onGroupChanged(type, settings) {
19325 return function () {
19326 var _settings$segmentLoad = settings.segmentLoaders,
19327 segmentLoader = _settings$segmentLoad[type],
19328 mainSegmentLoader = _settings$segmentLoad.main,
19329 mediaType = settings.mediaTypes[type];
19330 var activeTrack = mediaType.activeTrack();
19331 var activeGroup = mediaType.activeGroup(activeTrack);
19332 var previousActiveLoader = mediaType.activePlaylistLoader;
19333 stopLoaders(segmentLoader, mediaType);
19334
19335 if (!activeGroup) {
19336 // there is no group active
19337 return;
19338 }
19339
19340 if (!activeGroup.playlistLoader) {
19341 if (previousActiveLoader) {
19342 // The previous group had a playlist loader but the new active group does not
19343 // this means we are switching from demuxed to muxed audio. In this case we want to
19344 // do a destructive reset of the main segment loader and not restart the audio
19345 // loaders.
19346 mainSegmentLoader.resetEverything();
19347 }
19348
19349 return;
19350 } // Non-destructive resync
19351
19352
19353 segmentLoader.resyncLoader();
19354 startLoaders(activeGroup.playlistLoader, mediaType);
19355 };
19356};
19357var onGroupChanging = function onGroupChanging(type, settings) {
19358 return function () {
19359 var segmentLoader = settings.segmentLoaders[type];
19360 segmentLoader.abort();
19361 segmentLoader.pause();
19362 };
19363};
19364/**
19365 * Returns a function to be called when the media track changes. It performs a
19366 * destructive reset of the SegmentLoader to ensure we start loading as close to
19367 * currentTime as possible.
19368 *
19369 * @param {string} type
19370 * MediaGroup type
19371 * @param {Object} settings
19372 * Object containing required information for media groups
19373 * @return {Function}
19374 * Handler for a destructive reset of SegmentLoader when the active media
19375 * track changes.
19376 * @function onTrackChanged
19377 */
19378
19379var onTrackChanged = function onTrackChanged(type, settings) {
19380 return function () {
19381 var _settings$segmentLoad2 = settings.segmentLoaders,
19382 segmentLoader = _settings$segmentLoad2[type],
19383 mainSegmentLoader = _settings$segmentLoad2.main,
19384 mediaType = settings.mediaTypes[type];
19385 var activeTrack = mediaType.activeTrack();
19386 var activeGroup = mediaType.activeGroup(activeTrack);
19387 var previousActiveLoader = mediaType.activePlaylistLoader;
19388 stopLoaders(segmentLoader, mediaType);
19389
19390 if (!activeGroup) {
19391 // there is no group active so we do not want to restart loaders
19392 return;
19393 }
19394
19395 if (type === 'AUDIO') {
19396 if (!activeGroup.playlistLoader) {
19397 // when switching from demuxed audio/video to muxed audio/video (noted by no
19398 // playlist loader for the audio group), we want to do a destructive reset of the
19399 // main segment loader and not restart the audio loaders
19400 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
19401 // it should be stopped
19402
19403 mainSegmentLoader.resetEverything();
19404 return;
19405 } // although the segment loader is an audio segment loader, call the setAudio
19406 // function to ensure it is prepared to re-append the init segment (or handle other
19407 // config changes)
19408
19409
19410 segmentLoader.setAudio(true);
19411 mainSegmentLoader.setAudio(false);
19412 }
19413
19414 if (previousActiveLoader === activeGroup.playlistLoader) {
19415 // Nothing has actually changed. This can happen because track change events can fire
19416 // multiple times for a "single" change. One for enabling the new active track, and
19417 // one for disabling the track that was active
19418 startLoaders(activeGroup.playlistLoader, mediaType);
19419 return;
19420 }
19421
19422 if (segmentLoader.track) {
19423 // For WebVTT, set the new text track in the segmentloader
19424 segmentLoader.track(activeTrack);
19425 } // destructive reset
19426
19427
19428 segmentLoader.resetEverything();
19429 startLoaders(activeGroup.playlistLoader, mediaType);
19430 };
19431};
19432var onError = {
19433 /**
19434 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
19435 * an error.
19436 *
19437 * @param {string} type
19438 * MediaGroup type
19439 * @param {Object} settings
19440 * Object containing required information for media groups
19441 * @return {Function}
19442 * Error handler. Logs warning (or error if the playlist is blacklisted) to
19443 * console and switches back to default audio track.
19444 * @function onError.AUDIO
19445 */
19446 AUDIO: function AUDIO(type, settings) {
19447 return function () {
19448 var segmentLoader = settings.segmentLoaders[type],
19449 mediaType = settings.mediaTypes[type],
19450 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
19451 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
19452
19453 var activeTrack = mediaType.activeTrack();
19454 var activeGroup = mediaType.activeGroup();
19455 var id = (activeGroup.filter(function (group) {
19456 return group.default;
19457 })[0] || activeGroup[0]).id;
19458 var defaultTrack = mediaType.tracks[id];
19459
19460 if (activeTrack === defaultTrack) {
19461 // Default track encountered an error. All we can do now is blacklist the current
19462 // rendition and hope another will switch audio groups
19463 blacklistCurrentPlaylist({
19464 message: 'Problem encountered loading the default audio track.'
19465 });
19466 return;
19467 }
19468
19469 videojs__default['default'].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
19470
19471 for (var trackId in mediaType.tracks) {
19472 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
19473 }
19474
19475 mediaType.onTrackChanged();
19476 };
19477 },
19478
19479 /**
19480 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
19481 * an error.
19482 *
19483 * @param {string} type
19484 * MediaGroup type
19485 * @param {Object} settings
19486 * Object containing required information for media groups
19487 * @return {Function}
19488 * Error handler. Logs warning to console and disables the active subtitle track
19489 * @function onError.SUBTITLES
19490 */
19491 SUBTITLES: function SUBTITLES(type, settings) {
19492 return function () {
19493 var segmentLoader = settings.segmentLoaders[type],
19494 mediaType = settings.mediaTypes[type];
19495 videojs__default['default'].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
19496 stopLoaders(segmentLoader, mediaType);
19497 var track = mediaType.activeTrack();
19498
19499 if (track) {
19500 track.mode = 'disabled';
19501 }
19502
19503 mediaType.onTrackChanged();
19504 };
19505 }
19506};
19507var setupListeners = {
19508 /**
19509 * Setup event listeners for audio playlist loader
19510 *
19511 * @param {string} type
19512 * MediaGroup type
19513 * @param {PlaylistLoader|null} playlistLoader
19514 * PlaylistLoader to register listeners on
19515 * @param {Object} settings
19516 * Object containing required information for media groups
19517 * @function setupListeners.AUDIO
19518 */
19519 AUDIO: function AUDIO(type, playlistLoader, settings) {
19520 if (!playlistLoader) {
19521 // no playlist loader means audio will be muxed with the video
19522 return;
19523 }
19524
19525 var tech = settings.tech,
19526 requestOptions = settings.requestOptions,
19527 segmentLoader = settings.segmentLoaders[type];
19528 playlistLoader.on('loadedmetadata', function () {
19529 var media = playlistLoader.media();
19530 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
19531 // permits, start downloading segments
19532
19533 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
19534 segmentLoader.load();
19535 }
19536 });
19537 playlistLoader.on('loadedplaylist', function () {
19538 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
19539
19540 if (!tech.paused()) {
19541 segmentLoader.load();
19542 }
19543 });
19544 playlistLoader.on('error', onError[type](type, settings));
19545 },
19546
19547 /**
19548 * Setup event listeners for subtitle playlist loader
19549 *
19550 * @param {string} type
19551 * MediaGroup type
19552 * @param {PlaylistLoader|null} playlistLoader
19553 * PlaylistLoader to register listeners on
19554 * @param {Object} settings
19555 * Object containing required information for media groups
19556 * @function setupListeners.SUBTITLES
19557 */
19558 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
19559 var tech = settings.tech,
19560 requestOptions = settings.requestOptions,
19561 segmentLoader = settings.segmentLoaders[type],
19562 mediaType = settings.mediaTypes[type];
19563 playlistLoader.on('loadedmetadata', function () {
19564 var media = playlistLoader.media();
19565 segmentLoader.playlist(media, requestOptions);
19566 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
19567 // permits, start downloading segments
19568
19569 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
19570 segmentLoader.load();
19571 }
19572 });
19573 playlistLoader.on('loadedplaylist', function () {
19574 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
19575
19576 if (!tech.paused()) {
19577 segmentLoader.load();
19578 }
19579 });
19580 playlistLoader.on('error', onError[type](type, settings));
19581 }
19582};
19583var initialize = {
19584 /**
19585 * Setup PlaylistLoaders and AudioTracks for the audio groups
19586 *
19587 * @param {string} type
19588 * MediaGroup type
19589 * @param {Object} settings
19590 * Object containing required information for media groups
19591 * @function initialize.AUDIO
19592 */
19593 'AUDIO': function AUDIO(type, settings) {
19594 var vhs = settings.vhs,
19595 sourceType = settings.sourceType,
19596 segmentLoader = settings.segmentLoaders[type],
19597 requestOptions = settings.requestOptions,
19598 _settings$master = settings.master,
19599 mediaGroups = _settings$master.mediaGroups,
19600 playlists = _settings$master.playlists,
19601 _settings$mediaTypes$ = settings.mediaTypes[type],
19602 groups = _settings$mediaTypes$.groups,
19603 tracks = _settings$mediaTypes$.tracks,
19604 masterPlaylistLoader = settings.masterPlaylistLoader; // force a default if we have none
19605
19606 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
19607 mediaGroups[type] = {
19608 main: {
19609 default: {
19610 default: true
19611 }
19612 }
19613 };
19614 }
19615
19616 var _loop = function _loop(groupId) {
19617 if (!groups[groupId]) {
19618 groups[groupId] = [];
19619 } // List of playlists that have an AUDIO attribute value matching the current
19620 // group ID
19621
19622
19623 var groupPlaylists = playlists.filter(function (playlist) {
19624 return playlist.attributes[type] === groupId;
19625 });
19626
19627 var _loop2 = function _loop2(variantLabel) {
19628 var properties = mediaGroups[type][groupId][variantLabel]; // List of playlists for the current group ID that do not have a matching uri
19629 // with this alternate audio variant
19630
19631 var unmatchingPlaylists = groupPlaylists.filter(function (playlist) {
19632 return playlist.resolvedUri !== properties.resolvedUri;
19633 }); // If there are no playlists using this audio group other than ones
19634 // that match it's uri, then the playlist is audio only. We delete the resolvedUri
19635 // property here to prevent a playlist loader from being created so that we don't have
19636 // both the main and audio segment loaders loading the same audio segments
19637 // from the same playlist.
19638
19639 if (!unmatchingPlaylists.length && groupPlaylists.length) {
19640 delete properties.resolvedUri;
19641 }
19642
19643 var playlistLoader = void 0; // if vhs-json was provided as the source, and the media playlist was resolved,
19644 // use the resolved media playlist object
19645
19646 if (sourceType === 'vhs-json' && properties.playlists) {
19647 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
19648 } else if (properties.resolvedUri) {
19649 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
19650 } else if (properties.playlists && sourceType === 'dash') {
19651 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
19652 } else {
19653 // no resolvedUri means the audio is muxed with the video when using this
19654 // audio track
19655 playlistLoader = null;
19656 }
19657
19658 properties = videojs__default['default'].mergeOptions({
19659 id: variantLabel,
19660 playlistLoader: playlistLoader
19661 }, properties);
19662 setupListeners[type](type, properties.playlistLoader, settings);
19663 groups[groupId].push(properties);
19664
19665 if (typeof tracks[variantLabel] === 'undefined') {
19666 var track = new videojs__default['default'].AudioTrack({
19667 id: variantLabel,
19668 kind: audioTrackKind_(properties),
19669 enabled: false,
19670 language: properties.language,
19671 default: properties.default,
19672 label: variantLabel
19673 });
19674 tracks[variantLabel] = track;
19675 }
19676 };
19677
19678 for (var variantLabel in mediaGroups[type][groupId]) {
19679 _loop2(variantLabel);
19680 }
19681 };
19682
19683 for (var groupId in mediaGroups[type]) {
19684 _loop(groupId);
19685 } // setup single error event handler for the segment loader
19686
19687
19688 segmentLoader.on('error', onError[type](type, settings));
19689 },
19690
19691 /**
19692 * Setup PlaylistLoaders and TextTracks for the subtitle groups
19693 *
19694 * @param {string} type
19695 * MediaGroup type
19696 * @param {Object} settings
19697 * Object containing required information for media groups
19698 * @function initialize.SUBTITLES
19699 */
19700 'SUBTITLES': function SUBTITLES(type, settings) {
19701 var tech = settings.tech,
19702 vhs = settings.vhs,
19703 sourceType = settings.sourceType,
19704 segmentLoader = settings.segmentLoaders[type],
19705 requestOptions = settings.requestOptions,
19706 mediaGroups = settings.master.mediaGroups,
19707 _settings$mediaTypes$2 = settings.mediaTypes[type],
19708 groups = _settings$mediaTypes$2.groups,
19709 tracks = _settings$mediaTypes$2.tracks,
19710 masterPlaylistLoader = settings.masterPlaylistLoader;
19711
19712 for (var groupId in mediaGroups[type]) {
19713 if (!groups[groupId]) {
19714 groups[groupId] = [];
19715 }
19716
19717 for (var variantLabel in mediaGroups[type][groupId]) {
19718 if (mediaGroups[type][groupId][variantLabel].forced) {
19719 // Subtitle playlists with the forced attribute are not selectable in Safari.
19720 // According to Apple's HLS Authoring Specification:
19721 // If content has forced subtitles and regular subtitles in a given language,
19722 // the regular subtitles track in that language MUST contain both the forced
19723 // subtitles and the regular subtitles for that language.
19724 // Because of this requirement and that Safari does not add forced subtitles,
19725 // forced subtitles are skipped here to maintain consistent experience across
19726 // all platforms
19727 continue;
19728 }
19729
19730 var properties = mediaGroups[type][groupId][variantLabel];
19731 var playlistLoader = void 0;
19732
19733 if (sourceType === 'hls') {
19734 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
19735 } else if (sourceType === 'dash') {
19736 var playlists = properties.playlists.filter(function (p) {
19737 return p.excludeUntil !== Infinity;
19738 });
19739
19740 if (!playlists.length) {
19741 return;
19742 }
19743
19744 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
19745 } else if (sourceType === 'vhs-json') {
19746 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
19747 // as provided, otherwise use the resolved URI to load the playlist
19748 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
19749 }
19750
19751 properties = videojs__default['default'].mergeOptions({
19752 id: variantLabel,
19753 playlistLoader: playlistLoader
19754 }, properties);
19755 setupListeners[type](type, properties.playlistLoader, settings);
19756 groups[groupId].push(properties);
19757
19758 if (typeof tracks[variantLabel] === 'undefined') {
19759 var track = tech.addRemoteTextTrack({
19760 id: variantLabel,
19761 kind: 'subtitles',
19762 default: properties.default && properties.autoselect,
19763 language: properties.language,
19764 label: variantLabel
19765 }, false).track;
19766 tracks[variantLabel] = track;
19767 }
19768 }
19769 } // setup single error event handler for the segment loader
19770
19771
19772 segmentLoader.on('error', onError[type](type, settings));
19773 },
19774
19775 /**
19776 * Setup TextTracks for the closed-caption groups
19777 *
19778 * @param {String} type
19779 * MediaGroup type
19780 * @param {Object} settings
19781 * Object containing required information for media groups
19782 * @function initialize['CLOSED-CAPTIONS']
19783 */
19784 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
19785 var tech = settings.tech,
19786 mediaGroups = settings.master.mediaGroups,
19787 _settings$mediaTypes$3 = settings.mediaTypes[type],
19788 groups = _settings$mediaTypes$3.groups,
19789 tracks = _settings$mediaTypes$3.tracks;
19790
19791 for (var groupId in mediaGroups[type]) {
19792 if (!groups[groupId]) {
19793 groups[groupId] = [];
19794 }
19795
19796 for (var variantLabel in mediaGroups[type][groupId]) {
19797 var properties = mediaGroups[type][groupId][variantLabel]; // We only support CEA608 captions for now, so ignore anything that
19798 // doesn't use a CCx INSTREAM-ID
19799
19800 if (!properties.instreamId.match(/CC\d/)) {
19801 continue;
19802 } // No PlaylistLoader is required for Closed-Captions because the captions are
19803 // embedded within the video stream
19804
19805
19806 groups[groupId].push(videojs__default['default'].mergeOptions({
19807 id: variantLabel
19808 }, properties));
19809
19810 if (typeof tracks[variantLabel] === 'undefined') {
19811 var track = tech.addRemoteTextTrack({
19812 id: properties.instreamId,
19813 kind: 'captions',
19814 default: properties.default && properties.autoselect,
19815 language: properties.language,
19816 label: variantLabel
19817 }, false).track;
19818 tracks[variantLabel] = track;
19819 }
19820 }
19821 }
19822 }
19823};
19824/**
19825 * Returns a function used to get the active group of the provided type
19826 *
19827 * @param {string} type
19828 * MediaGroup type
19829 * @param {Object} settings
19830 * Object containing required information for media groups
19831 * @return {Function}
19832 * Function that returns the active media group for the provided type. Takes an
19833 * optional parameter {TextTrack} track. If no track is provided, a list of all
19834 * variants in the group, otherwise the variant corresponding to the provided
19835 * track is returned.
19836 * @function activeGroup
19837 */
19838
19839var activeGroup = function activeGroup(type, settings) {
19840 return function (track) {
19841 var masterPlaylistLoader = settings.masterPlaylistLoader,
19842 groups = settings.mediaTypes[type].groups;
19843 var media = masterPlaylistLoader.media();
19844
19845 if (!media) {
19846 return null;
19847 }
19848
19849 var variants = null;
19850
19851 if (media.attributes[type]) {
19852 variants = groups[media.attributes[type]];
19853 }
19854
19855 variants = variants || groups.main;
19856
19857 if (typeof track === 'undefined') {
19858 return variants;
19859 }
19860
19861 if (track === null) {
19862 // An active track was specified so a corresponding group is expected. track === null
19863 // means no track is currently active so there is no corresponding group
19864 return null;
19865 }
19866
19867 return variants.filter(function (props) {
19868 return props.id === track.id;
19869 })[0] || null;
19870 };
19871};
19872var activeTrack = {
19873 /**
19874 * Returns a function used to get the active track of type provided
19875 *
19876 * @param {string} type
19877 * MediaGroup type
19878 * @param {Object} settings
19879 * Object containing required information for media groups
19880 * @return {Function}
19881 * Function that returns the active media track for the provided type. Returns
19882 * null if no track is active
19883 * @function activeTrack.AUDIO
19884 */
19885 AUDIO: function AUDIO(type, settings) {
19886 return function () {
19887 var tracks = settings.mediaTypes[type].tracks;
19888
19889 for (var id in tracks) {
19890 if (tracks[id].enabled) {
19891 return tracks[id];
19892 }
19893 }
19894
19895 return null;
19896 };
19897 },
19898
19899 /**
19900 * Returns a function used to get the active track of type provided
19901 *
19902 * @param {string} type
19903 * MediaGroup type
19904 * @param {Object} settings
19905 * Object containing required information for media groups
19906 * @return {Function}
19907 * Function that returns the active media track for the provided type. Returns
19908 * null if no track is active
19909 * @function activeTrack.SUBTITLES
19910 */
19911 SUBTITLES: function SUBTITLES(type, settings) {
19912 return function () {
19913 var tracks = settings.mediaTypes[type].tracks;
19914
19915 for (var id in tracks) {
19916 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
19917 return tracks[id];
19918 }
19919 }
19920
19921 return null;
19922 };
19923 }
19924};
19925/**
19926 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
19927 * Closed-Captions) specified in the master manifest.
19928 *
19929 * @param {Object} settings
19930 * Object containing required information for setting up the media groups
19931 * @param {Tech} settings.tech
19932 * The tech of the player
19933 * @param {Object} settings.requestOptions
19934 * XHR request options used by the segment loaders
19935 * @param {PlaylistLoader} settings.masterPlaylistLoader
19936 * PlaylistLoader for the master source
19937 * @param {VhsHandler} settings.vhs
19938 * VHS SourceHandler
19939 * @param {Object} settings.master
19940 * The parsed master manifest
19941 * @param {Object} settings.mediaTypes
19942 * Object to store the loaders, tracks, and utility methods for each media type
19943 * @param {Function} settings.blacklistCurrentPlaylist
19944 * Blacklists the current rendition and forces a rendition switch.
19945 * @function setupMediaGroups
19946 */
19947
19948var setupMediaGroups = function setupMediaGroups(settings) {
19949 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
19950 initialize[type](type, settings);
19951 });
19952 var mediaTypes = settings.mediaTypes,
19953 masterPlaylistLoader = settings.masterPlaylistLoader,
19954 tech = settings.tech,
19955 vhs = settings.vhs; // setup active group and track getters and change event handlers
19956
19957 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
19958 mediaTypes[type].activeGroup = activeGroup(type, settings);
19959 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
19960 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
19961 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
19962 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
19963 }); // DO NOT enable the default subtitle or caption track.
19964 // DO enable the default audio track
19965
19966 var audioGroup = mediaTypes.AUDIO.activeGroup();
19967
19968 if (audioGroup) {
19969 var groupId = (audioGroup.filter(function (group) {
19970 return group.default;
19971 })[0] || audioGroup[0]).id;
19972 mediaTypes.AUDIO.tracks[groupId].enabled = true;
19973 mediaTypes.AUDIO.onTrackChanged();
19974 }
19975
19976 masterPlaylistLoader.on('mediachange', function () {
19977 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
19978 return mediaTypes[type].onGroupChanged();
19979 });
19980 });
19981 masterPlaylistLoader.on('mediachanging', function () {
19982 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
19983 return mediaTypes[type].onGroupChanging();
19984 });
19985 }); // custom audio track change event handler for usage event
19986
19987 var onAudioTrackChanged = function onAudioTrackChanged() {
19988 mediaTypes.AUDIO.onTrackChanged();
19989 tech.trigger({
19990 type: 'usage',
19991 name: 'vhs-audio-change'
19992 });
19993 tech.trigger({
19994 type: 'usage',
19995 name: 'hls-audio-change'
19996 });
19997 };
19998
19999 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
20000 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
20001 vhs.on('dispose', function () {
20002 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
20003 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
20004 }); // clear existing audio tracks and add the ones we just created
20005
20006 tech.clearTracks('audio');
20007
20008 for (var id in mediaTypes.AUDIO.tracks) {
20009 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
20010 }
20011};
20012/**
20013 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
20014 * media type
20015 *
20016 * @return {Object}
20017 * Object to store the loaders, tracks, and utility methods for each media type
20018 * @function createMediaTypes
20019 */
20020
20021var createMediaTypes = function createMediaTypes() {
20022 var mediaTypes = {};
20023 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
20024 mediaTypes[type] = {
20025 groups: {},
20026 tracks: {},
20027 activePlaylistLoader: null,
20028 activeGroup: noop,
20029 activeTrack: noop,
20030 onGroupChanged: noop,
20031 onTrackChanged: noop
20032 };
20033 });
20034 return mediaTypes;
20035};
20036
20037var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
20038var Vhs; // SegmentLoader stats that need to have each loader's
20039// values summed to calculate the final value
20040
20041var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred'];
20042
20043var sumLoaderStat = function sumLoaderStat(stat) {
20044 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
20045};
20046
20047var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
20048 var currentPlaylist = _ref.currentPlaylist,
20049 nextPlaylist = _ref.nextPlaylist,
20050 forwardBuffer = _ref.forwardBuffer,
20051 bufferLowWaterLine = _ref.bufferLowWaterLine,
20052 bufferHighWaterLine = _ref.bufferHighWaterLine,
20053 duration = _ref.duration,
20054 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
20055 log = _ref.log;
20056
20057 // we have no other playlist to switch to
20058 if (!nextPlaylist) {
20059 videojs__default['default'].log.warn('We received no playlist to switch to. Please check your stream.');
20060 return false;
20061 }
20062
20063 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id; // If the playlist is live, then we want to not take low water line into account.
20064 // This is because in LIVE, the player plays 3 segments from the end of the
20065 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
20066 // in those segments, a viewer will never experience a rendition upswitch.
20067
20068 if (!currentPlaylist || !currentPlaylist.endList) {
20069 log(sharedLogLine + " as current playlist " + (!currentPlaylist ? 'is not set' : 'is live'));
20070 return true;
20071 } // no need to switch playlist is the same
20072
20073
20074 if (nextPlaylist.id === currentPlaylist.id) {
20075 return false;
20076 }
20077
20078 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
20079 // duration is below the max potential low water line
20080
20081 if (duration < maxBufferLowWaterLine) {
20082 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
20083 return true;
20084 }
20085
20086 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
20087 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
20088 // we can switch down
20089
20090 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
20091 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
20092
20093 if (experimentalBufferBasedABR) {
20094 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
20095 }
20096
20097 log(logLine);
20098 return true;
20099 } // and if our buffer is higher than the low water line,
20100 // we can switch up
20101
20102
20103 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
20104 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
20105
20106 if (experimentalBufferBasedABR) {
20107 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
20108 }
20109
20110 log(_logLine);
20111 return true;
20112 }
20113
20114 log("not " + sharedLogLine + " as no switching criteria met");
20115 return false;
20116};
20117/**
20118 * the master playlist controller controller all interactons
20119 * between playlists and segmentloaders. At this time this mainly
20120 * involves a master playlist and a series of audio playlists
20121 * if they are available
20122 *
20123 * @class MasterPlaylistController
20124 * @extends videojs.EventTarget
20125 */
20126
20127
20128var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
20129 _inheritsLoose__default['default'](MasterPlaylistController, _videojs$EventTarget);
20130
20131 function MasterPlaylistController(options) {
20132 var _this;
20133
20134 _this = _videojs$EventTarget.call(this) || this;
20135 var src = options.src,
20136 handleManifestRedirects = options.handleManifestRedirects,
20137 withCredentials = options.withCredentials,
20138 tech = options.tech,
20139 bandwidth = options.bandwidth,
20140 externVhs = options.externVhs,
20141 useCueTags = options.useCueTags,
20142 blacklistDuration = options.blacklistDuration,
20143 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
20144 sourceType = options.sourceType,
20145 cacheEncryptionKeys = options.cacheEncryptionKeys,
20146 handlePartialData = options.handlePartialData,
20147 experimentalBufferBasedABR = options.experimentalBufferBasedABR;
20148
20149 if (!src) {
20150 throw new Error('A non-empty playlist URL or JSON manifest string is required');
20151 }
20152
20153 Vhs = externVhs;
20154 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
20155 _this.withCredentials = withCredentials;
20156 _this.tech_ = tech;
20157 _this.vhs_ = tech.vhs;
20158 _this.sourceType_ = sourceType;
20159 _this.useCueTags_ = useCueTags;
20160 _this.blacklistDuration = blacklistDuration;
20161 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
20162
20163 if (_this.useCueTags_) {
20164 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
20165 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
20166 }
20167
20168 _this.requestOptions_ = {
20169 withCredentials: withCredentials,
20170 handleManifestRedirects: handleManifestRedirects,
20171 timeout: null
20172 };
20173
20174 _this.on('error', _this.pauseLoading);
20175
20176 _this.mediaTypes_ = createMediaTypes();
20177 _this.mediaSource = new window__default['default'].MediaSource();
20178 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized__default['default'](_this));
20179 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized__default['default'](_this));
20180 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized__default['default'](_this));
20181
20182 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
20183
20184
20185 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
20186
20187 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
20188 // everything, and the MediaSource should not be detached without a proper disposal
20189
20190
20191 _this.seekable_ = videojs__default['default'].createTimeRanges();
20192 _this.hasPlayed_ = false;
20193 _this.syncController_ = new SyncController(options);
20194 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
20195 kind: 'metadata',
20196 label: 'segment-metadata'
20197 }, false).track;
20198 _this.decrypter_ = new Decrypter();
20199 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
20200 _this.inbandTextTracks_ = {};
20201 _this.timelineChangeController_ = new TimelineChangeController();
20202 var segmentLoaderSettings = {
20203 vhs: _this.vhs_,
20204 parse708captions: options.parse708captions,
20205 mediaSource: _this.mediaSource,
20206 currentTime: _this.tech_.currentTime.bind(_this.tech_),
20207 seekable: function seekable() {
20208 return _this.seekable();
20209 },
20210 seeking: function seeking() {
20211 return _this.tech_.seeking();
20212 },
20213 duration: function duration() {
20214 return _this.duration();
20215 },
20216 hasPlayed: function hasPlayed() {
20217 return _this.hasPlayed_;
20218 },
20219 goalBufferLength: function goalBufferLength() {
20220 return _this.goalBufferLength();
20221 },
20222 bandwidth: bandwidth,
20223 syncController: _this.syncController_,
20224 decrypter: _this.decrypter_,
20225 sourceType: _this.sourceType_,
20226 inbandTextTracks: _this.inbandTextTracks_,
20227 cacheEncryptionKeys: cacheEncryptionKeys,
20228 handlePartialData: handlePartialData,
20229 sourceUpdater: _this.sourceUpdater_,
20230 timelineChangeController: _this.timelineChangeController_
20231 }; // The source type check not only determines whether a special DASH playlist loader
20232 // should be used, but also covers the case where the provided src is a vhs-json
20233 // manifest object (instead of a URL). In the case of vhs-json, the default
20234 // PlaylistLoader should be used.
20235
20236 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
20237
20238 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
20239 // combined audio/video or just video when alternate audio track is selected
20240
20241
20242 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
20243 segmentMetadataTrack: _this.segmentMetadataTrack_,
20244 loaderType: 'main'
20245 }), options); // alternate audio track
20246
20247 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
20248 loaderType: 'audio'
20249 }), options);
20250 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default['default'].mergeOptions(segmentLoaderSettings, {
20251 loaderType: 'vtt',
20252 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
20253 }), options);
20254
20255 _this.setupSegmentLoaderListeners_();
20256
20257 if (_this.experimentalBufferBasedABR) {
20258 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
20259 return _this.startABRTimer_();
20260 });
20261
20262 _this.tech_.on('pause', function () {
20263 return _this.stopABRTimer_();
20264 });
20265
20266 _this.tech_.on('play', function () {
20267 return _this.startABRTimer_();
20268 });
20269 } // Create SegmentLoader stat-getters
20270 // mediaRequests_
20271 // mediaRequestsAborted_
20272 // mediaRequestsTimedout_
20273 // mediaRequestsErrored_
20274 // mediaTransferDuration_
20275 // mediaBytesTransferred_
20276
20277
20278 loaderStats.forEach(function (stat) {
20279 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized__default['default'](_this), stat);
20280 });
20281 _this.logger_ = logger('MPC');
20282 _this.triggeredFmp4Usage = false;
20283
20284 if (_this.tech_.preload() === 'none') {
20285 _this.loadOnPlay_ = function () {
20286 _this.loadOnPlay_ = null;
20287
20288 _this.masterPlaylistLoader_.load();
20289 };
20290
20291 _this.tech_.one('play', _this.loadOnPlay_);
20292 } else {
20293 _this.masterPlaylistLoader_.load();
20294 }
20295
20296 return _this;
20297 }
20298 /**
20299 * Run selectPlaylist and switch to the new playlist if we should
20300 *
20301 * @private
20302 *
20303 */
20304
20305
20306 var _proto = MasterPlaylistController.prototype;
20307
20308 _proto.checkABR_ = function checkABR_() {
20309 var nextPlaylist = this.selectPlaylist();
20310
20311 if (this.shouldSwitchToMedia_(nextPlaylist)) {
20312 this.masterPlaylistLoader_.media(nextPlaylist);
20313 }
20314 }
20315 /**
20316 * Start a timer that periodically calls checkABR_
20317 *
20318 * @private
20319 */
20320 ;
20321
20322 _proto.startABRTimer_ = function startABRTimer_() {
20323 var _this2 = this;
20324
20325 this.stopABRTimer_();
20326 this.abrTimer_ = window__default['default'].setInterval(function () {
20327 return _this2.checkABR_();
20328 }, 250);
20329 }
20330 /**
20331 * Stop the timer that periodically calls checkABR_
20332 *
20333 * @private
20334 */
20335 ;
20336
20337 _proto.stopABRTimer_ = function stopABRTimer_() {
20338 // if we're scrubbing, we don't need to pause.
20339 // This getter will be added to Video.js in version 7.11.
20340 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
20341 return;
20342 }
20343
20344 window__default['default'].clearInterval(this.abrTimer_);
20345 this.abrTimer_ = null;
20346 }
20347 /**
20348 * Register event handlers on the master playlist loader. A helper
20349 * function for construction time.
20350 *
20351 * @private
20352 */
20353 ;
20354
20355 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
20356 var _this3 = this;
20357
20358 this.masterPlaylistLoader_.on('loadedmetadata', function () {
20359 var media = _this3.masterPlaylistLoader_.media();
20360
20361 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
20362 // timeout the request.
20363
20364 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
20365 _this3.requestOptions_.timeout = 0;
20366 } else {
20367 _this3.requestOptions_.timeout = requestTimeout;
20368 } // if this isn't a live video and preload permits, start
20369 // downloading segments
20370
20371
20372 if (media.endList && _this3.tech_.preload() !== 'none') {
20373 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
20374
20375 _this3.mainSegmentLoader_.load();
20376 }
20377
20378 setupMediaGroups({
20379 sourceType: _this3.sourceType_,
20380 segmentLoaders: {
20381 AUDIO: _this3.audioSegmentLoader_,
20382 SUBTITLES: _this3.subtitleSegmentLoader_,
20383 main: _this3.mainSegmentLoader_
20384 },
20385 tech: _this3.tech_,
20386 requestOptions: _this3.requestOptions_,
20387 masterPlaylistLoader: _this3.masterPlaylistLoader_,
20388 vhs: _this3.vhs_,
20389 master: _this3.master(),
20390 mediaTypes: _this3.mediaTypes_,
20391 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
20392 });
20393
20394 _this3.triggerPresenceUsage_(_this3.master(), media);
20395
20396 _this3.setupFirstPlay();
20397
20398 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
20399 _this3.trigger('selectedinitialmedia');
20400 } else {
20401 // We must wait for the active audio playlist loader to
20402 // finish setting up before triggering this event so the
20403 // representations API and EME setup is correct
20404 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
20405 _this3.trigger('selectedinitialmedia');
20406 });
20407 }
20408 });
20409 this.masterPlaylistLoader_.on('loadedplaylist', function () {
20410 if (_this3.loadOnPlay_) {
20411 _this3.tech_.off('play', _this3.loadOnPlay_);
20412 }
20413
20414 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
20415
20416 if (!updatedPlaylist) {
20417 // exclude any variants that are not supported by the browser before selecting
20418 // an initial media as the playlist selectors do not consider browser support
20419 _this3.excludeUnsupportedVariants_();
20420
20421 var selectedMedia;
20422
20423 if (_this3.enableLowInitialPlaylist) {
20424 selectedMedia = _this3.selectInitialPlaylist();
20425 }
20426
20427 if (!selectedMedia) {
20428 selectedMedia = _this3.selectPlaylist();
20429 }
20430
20431 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
20432 return;
20433 }
20434
20435 _this3.initialMedia_ = selectedMedia;
20436
20437 _this3.masterPlaylistLoader_.media(_this3.initialMedia_); // Under the standard case where a source URL is provided, loadedplaylist will
20438 // fire again since the playlist will be requested. In the case of vhs-json
20439 // (where the manifest object is provided as the source), when the media
20440 // playlist's `segments` list is already available, a media playlist won't be
20441 // requested, and loadedplaylist won't fire again, so the playlist handler must be
20442 // called on its own here.
20443
20444
20445 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
20446
20447 if (!haveJsonSource) {
20448 return;
20449 }
20450
20451 updatedPlaylist = _this3.initialMedia_;
20452 }
20453
20454 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
20455 });
20456 this.masterPlaylistLoader_.on('error', function () {
20457 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
20458 });
20459 this.masterPlaylistLoader_.on('mediachanging', function () {
20460 _this3.mainSegmentLoader_.abort();
20461
20462 _this3.mainSegmentLoader_.pause();
20463 });
20464 this.masterPlaylistLoader_.on('mediachange', function () {
20465 var media = _this3.masterPlaylistLoader_.media();
20466
20467 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
20468 // timeout the request.
20469
20470 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
20471 _this3.requestOptions_.timeout = 0;
20472 } else {
20473 _this3.requestOptions_.timeout = requestTimeout;
20474 } // TODO: Create a new event on the PlaylistLoader that signals
20475 // that the segments have changed in some way and use that to
20476 // update the SegmentLoader instead of doing it twice here and
20477 // on `loadedplaylist`
20478
20479
20480 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
20481
20482 _this3.mainSegmentLoader_.load();
20483
20484 _this3.tech_.trigger({
20485 type: 'mediachange',
20486 bubbles: true
20487 });
20488 });
20489 this.masterPlaylistLoader_.on('playlistunchanged', function () {
20490 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
20491
20492 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
20493
20494 if (playlistOutdated) {
20495 // Playlist has stopped updating and we're stuck at its end. Try to
20496 // blacklist it and switch to another playlist in the hope that that
20497 // one is updating (and give the player a chance to re-adjust to the
20498 // safe live point).
20499 _this3.blacklistCurrentPlaylist({
20500 message: 'Playlist no longer updating.'
20501 }); // useful for monitoring QoS
20502
20503
20504 _this3.tech_.trigger('playliststuck');
20505 }
20506 });
20507 this.masterPlaylistLoader_.on('renditiondisabled', function () {
20508 _this3.tech_.trigger({
20509 type: 'usage',
20510 name: 'vhs-rendition-disabled'
20511 });
20512
20513 _this3.tech_.trigger({
20514 type: 'usage',
20515 name: 'hls-rendition-disabled'
20516 });
20517 });
20518 this.masterPlaylistLoader_.on('renditionenabled', function () {
20519 _this3.tech_.trigger({
20520 type: 'usage',
20521 name: 'vhs-rendition-enabled'
20522 });
20523
20524 _this3.tech_.trigger({
20525 type: 'usage',
20526 name: 'hls-rendition-enabled'
20527 });
20528 });
20529 }
20530 /**
20531 * Given an updated media playlist (whether it was loaded for the first time, or
20532 * refreshed for live playlists), update any relevant properties and state to reflect
20533 * changes in the media that should be accounted for (e.g., cues and duration).
20534 *
20535 * @param {Object} updatedPlaylist the updated media playlist object
20536 *
20537 * @private
20538 */
20539 ;
20540
20541 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
20542 if (this.useCueTags_) {
20543 this.updateAdCues_(updatedPlaylist);
20544 } // TODO: Create a new event on the PlaylistLoader that signals
20545 // that the segments have changed in some way and use that to
20546 // update the SegmentLoader instead of doing it twice here and
20547 // on `mediachange`
20548
20549
20550 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
20551 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
20552 // as it is possible that it was temporarily stopped while waiting for
20553 // a playlist (e.g., in case the playlist errored and we re-requested it).
20554
20555 if (!this.tech_.paused()) {
20556 this.mainSegmentLoader_.load();
20557
20558 if (this.audioSegmentLoader_) {
20559 this.audioSegmentLoader_.load();
20560 }
20561 }
20562 }
20563 /**
20564 * A helper function for triggerring presence usage events once per source
20565 *
20566 * @private
20567 */
20568 ;
20569
20570 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
20571 var mediaGroups = master.mediaGroups || {};
20572 var defaultDemuxed = true;
20573 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
20574
20575 for (var mediaGroup in mediaGroups.AUDIO) {
20576 for (var label in mediaGroups.AUDIO[mediaGroup]) {
20577 var properties = mediaGroups.AUDIO[mediaGroup][label];
20578
20579 if (!properties.uri) {
20580 defaultDemuxed = false;
20581 }
20582 }
20583 }
20584
20585 if (defaultDemuxed) {
20586 this.tech_.trigger({
20587 type: 'usage',
20588 name: 'vhs-demuxed'
20589 });
20590 this.tech_.trigger({
20591 type: 'usage',
20592 name: 'hls-demuxed'
20593 });
20594 }
20595
20596 if (Object.keys(mediaGroups.SUBTITLES).length) {
20597 this.tech_.trigger({
20598 type: 'usage',
20599 name: 'vhs-webvtt'
20600 });
20601 this.tech_.trigger({
20602 type: 'usage',
20603 name: 'hls-webvtt'
20604 });
20605 }
20606
20607 if (Vhs.Playlist.isAes(media)) {
20608 this.tech_.trigger({
20609 type: 'usage',
20610 name: 'vhs-aes'
20611 });
20612 this.tech_.trigger({
20613 type: 'usage',
20614 name: 'hls-aes'
20615 });
20616 }
20617
20618 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
20619 this.tech_.trigger({
20620 type: 'usage',
20621 name: 'vhs-alternate-audio'
20622 });
20623 this.tech_.trigger({
20624 type: 'usage',
20625 name: 'hls-alternate-audio'
20626 });
20627 }
20628
20629 if (this.useCueTags_) {
20630 this.tech_.trigger({
20631 type: 'usage',
20632 name: 'vhs-playlist-cue-tags'
20633 });
20634 this.tech_.trigger({
20635 type: 'usage',
20636 name: 'hls-playlist-cue-tags'
20637 });
20638 }
20639 };
20640
20641 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
20642 var currentPlaylist = this.masterPlaylistLoader_.media();
20643 var buffered = this.tech_.buffered();
20644 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - this.tech_.currentTime() : 0;
20645 var bufferLowWaterLine = this.bufferLowWaterLine();
20646 var bufferHighWaterLine = this.bufferHighWaterLine();
20647 return shouldSwitchToMedia({
20648 currentPlaylist: currentPlaylist,
20649 nextPlaylist: nextPlaylist,
20650 forwardBuffer: forwardBuffer,
20651 bufferLowWaterLine: bufferLowWaterLine,
20652 bufferHighWaterLine: bufferHighWaterLine,
20653 duration: this.duration(),
20654 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
20655 log: this.logger_
20656 });
20657 }
20658 /**
20659 * Register event handlers on the segment loaders. A helper function
20660 * for construction time.
20661 *
20662 * @private
20663 */
20664 ;
20665
20666 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
20667 var _this4 = this;
20668
20669 if (!this.experimentalBufferBasedABR) {
20670 this.mainSegmentLoader_.on('bandwidthupdate', function () {
20671 var nextPlaylist = _this4.selectPlaylist();
20672
20673 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
20674 _this4.masterPlaylistLoader_.media(nextPlaylist);
20675 }
20676
20677 _this4.tech_.trigger('bandwidthupdate');
20678 });
20679 this.mainSegmentLoader_.on('progress', function () {
20680 _this4.trigger('progress');
20681 });
20682 }
20683
20684 this.mainSegmentLoader_.on('error', function () {
20685 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
20686 });
20687 this.mainSegmentLoader_.on('appenderror', function () {
20688 _this4.error = _this4.mainSegmentLoader_.error_;
20689
20690 _this4.trigger('error');
20691 });
20692 this.mainSegmentLoader_.on('syncinfoupdate', function () {
20693 _this4.onSyncInfoUpdate_();
20694 });
20695 this.mainSegmentLoader_.on('timestampoffset', function () {
20696 _this4.tech_.trigger({
20697 type: 'usage',
20698 name: 'vhs-timestamp-offset'
20699 });
20700
20701 _this4.tech_.trigger({
20702 type: 'usage',
20703 name: 'hls-timestamp-offset'
20704 });
20705 });
20706 this.audioSegmentLoader_.on('syncinfoupdate', function () {
20707 _this4.onSyncInfoUpdate_();
20708 });
20709 this.audioSegmentLoader_.on('appenderror', function () {
20710 _this4.error = _this4.audioSegmentLoader_.error_;
20711
20712 _this4.trigger('error');
20713 });
20714 this.mainSegmentLoader_.on('ended', function () {
20715 _this4.logger_('main segment loader ended');
20716
20717 _this4.onEndOfStream();
20718 });
20719 this.mainSegmentLoader_.on('earlyabort', function (event) {
20720 // never try to early abort with the new ABR algorithm
20721 if (_this4.experimentalBufferBasedABR) {
20722 return;
20723 }
20724
20725 _this4.delegateLoaders_('all', ['abort']);
20726
20727 _this4.blacklistCurrentPlaylist({
20728 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
20729 }, ABORT_EARLY_BLACKLIST_SECONDS);
20730 });
20731
20732 var updateCodecs = function updateCodecs() {
20733 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
20734 return _this4.tryToCreateSourceBuffers_();
20735 }
20736
20737 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
20738
20739
20740 if (!codecs) {
20741 return;
20742 }
20743
20744 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
20745 };
20746
20747 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
20748 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
20749 this.mainSegmentLoader_.on('fmp4', function () {
20750 if (!_this4.triggeredFmp4Usage) {
20751 _this4.tech_.trigger({
20752 type: 'usage',
20753 name: 'vhs-fmp4'
20754 });
20755
20756 _this4.tech_.trigger({
20757 type: 'usage',
20758 name: 'hls-fmp4'
20759 });
20760
20761 _this4.triggeredFmp4Usage = true;
20762 }
20763 });
20764 this.audioSegmentLoader_.on('fmp4', function () {
20765 if (!_this4.triggeredFmp4Usage) {
20766 _this4.tech_.trigger({
20767 type: 'usage',
20768 name: 'vhs-fmp4'
20769 });
20770
20771 _this4.tech_.trigger({
20772 type: 'usage',
20773 name: 'hls-fmp4'
20774 });
20775
20776 _this4.triggeredFmp4Usage = true;
20777 }
20778 });
20779 this.audioSegmentLoader_.on('ended', function () {
20780 _this4.logger_('audioSegmentLoader ended');
20781
20782 _this4.onEndOfStream();
20783 });
20784 };
20785
20786 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
20787 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
20788 }
20789 /**
20790 * Call load on our SegmentLoaders
20791 */
20792 ;
20793
20794 _proto.load = function load() {
20795 this.mainSegmentLoader_.load();
20796
20797 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
20798 this.audioSegmentLoader_.load();
20799 }
20800
20801 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
20802 this.subtitleSegmentLoader_.load();
20803 }
20804 }
20805 /**
20806 * Re-tune playback quality level for the current player
20807 * conditions without performing destructive actions, like
20808 * removing already buffered content
20809 *
20810 * @private
20811 */
20812 ;
20813
20814 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
20815 if (media === void 0) {
20816 media = this.selectPlaylist();
20817 }
20818
20819 if (media === this.masterPlaylistLoader_.media()) {
20820 return;
20821 }
20822
20823 this.masterPlaylistLoader_.media(media);
20824 this.mainSegmentLoader_.resetLoader(); // don't need to reset audio as it is reset when media changes
20825 }
20826 /**
20827 * Re-tune playback quality level for the current player
20828 * conditions. This method will perform destructive actions like removing
20829 * already buffered content in order to readjust the currently active
20830 * playlist quickly. This is good for manual quality changes
20831 *
20832 * @private
20833 */
20834 ;
20835
20836 _proto.fastQualityChange_ = function fastQualityChange_(media) {
20837 var _this5 = this;
20838
20839 if (media === void 0) {
20840 media = this.selectPlaylist();
20841 }
20842
20843 if (media === this.masterPlaylistLoader_.media()) {
20844 this.logger_('skipping fastQualityChange because new media is same as old');
20845 return;
20846 }
20847
20848 this.masterPlaylistLoader_.media(media); // Delete all buffered data to allow an immediate quality switch, then seek to give
20849 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
20850 // ahead is roughly the minimum that will accomplish this across a variety of content
20851 // in IE and Edge, but seeking in place is sufficient on all other browsers)
20852 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
20853 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
20854
20855 this.mainSegmentLoader_.resetEverything(function () {
20856 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
20857 // from the previously enabled rendition to load before the new playlist has finished loading
20858 if (videojs__default['default'].browser.IE_VERSION || videojs__default['default'].browser.IS_EDGE) {
20859 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
20860 } else {
20861 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
20862 }
20863 }); // don't need to reset audio as it is reset when media changes
20864 }
20865 /**
20866 * Begin playback.
20867 */
20868 ;
20869
20870 _proto.play = function play() {
20871 if (this.setupFirstPlay()) {
20872 return;
20873 }
20874
20875 if (this.tech_.ended()) {
20876 this.tech_.setCurrentTime(0);
20877 }
20878
20879 if (this.hasPlayed_) {
20880 this.load();
20881 }
20882
20883 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
20884 // seek forward to the live point
20885
20886 if (this.tech_.duration() === Infinity) {
20887 if (this.tech_.currentTime() < seekable.start(0)) {
20888 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
20889 }
20890 }
20891 }
20892 /**
20893 * Seek to the latest media position if this is a live video and the
20894 * player and video are loaded and initialized.
20895 */
20896 ;
20897
20898 _proto.setupFirstPlay = function setupFirstPlay() {
20899 var _this6 = this;
20900
20901 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
20902 // If 1) there is no active media
20903 // 2) the player is paused
20904 // 3) the first play has already been setup
20905 // then exit early
20906
20907 if (!media || this.tech_.paused() || this.hasPlayed_) {
20908 return false;
20909 } // when the video is a live stream
20910
20911
20912 if (!media.endList) {
20913 var seekable = this.seekable();
20914
20915 if (!seekable.length) {
20916 // without a seekable range, the player cannot seek to begin buffering at the live
20917 // point
20918 return false;
20919 }
20920
20921 if (videojs__default['default'].browser.IE_VERSION && this.tech_.readyState() === 0) {
20922 // IE11 throws an InvalidStateError if you try to set currentTime while the
20923 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
20924 this.tech_.one('loadedmetadata', function () {
20925 _this6.trigger('firstplay');
20926
20927 _this6.tech_.setCurrentTime(seekable.end(0));
20928
20929 _this6.hasPlayed_ = true;
20930 });
20931 return false;
20932 } // trigger firstplay to inform the source handler to ignore the next seek event
20933
20934
20935 this.trigger('firstplay'); // seek to the live point
20936
20937 this.tech_.setCurrentTime(seekable.end(0));
20938 }
20939
20940 this.hasPlayed_ = true; // we can begin loading now that everything is ready
20941
20942 this.load();
20943 return true;
20944 }
20945 /**
20946 * handle the sourceopen event on the MediaSource
20947 *
20948 * @private
20949 */
20950 ;
20951
20952 _proto.handleSourceOpen_ = function handleSourceOpen_() {
20953 // Only attempt to create the source buffer if none already exist.
20954 // handleSourceOpen is also called when we are "re-opening" a source buffer
20955 // after `endOfStream` has been called (in response to a seek for instance)
20956 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
20957 // code in video.js but is required because play() must be invoked
20958 // *after* the media source has opened.
20959
20960 if (this.tech_.autoplay()) {
20961 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
20962 // on browsers which return a promise
20963
20964 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
20965 playPromise.then(null, function (e) {});
20966 }
20967 }
20968
20969 this.trigger('sourceopen');
20970 }
20971 /**
20972 * handle the sourceended event on the MediaSource
20973 *
20974 * @private
20975 */
20976 ;
20977
20978 _proto.handleSourceEnded_ = function handleSourceEnded_() {
20979 if (!this.inbandTextTracks_.metadataTrack_) {
20980 return;
20981 }
20982
20983 var cues = this.inbandTextTracks_.metadataTrack_.cues;
20984
20985 if (!cues || !cues.length) {
20986 return;
20987 }
20988
20989 var duration = this.duration();
20990 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
20991 }
20992 /**
20993 * handle the durationchange event on the MediaSource
20994 *
20995 * @private
20996 */
20997 ;
20998
20999 _proto.handleDurationChange_ = function handleDurationChange_() {
21000 this.tech_.trigger('durationchange');
21001 }
21002 /**
21003 * Calls endOfStream on the media source when all active stream types have called
21004 * endOfStream
21005 *
21006 * @param {string} streamType
21007 * Stream type of the segment loader that called endOfStream
21008 * @private
21009 */
21010 ;
21011
21012 _proto.onEndOfStream = function onEndOfStream() {
21013 var isEndOfStream = this.mainSegmentLoader_.ended_;
21014
21015 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
21016 // if the audio playlist loader exists, then alternate audio is active
21017 if (!this.mainSegmentLoader_.currentMediaInfo_ || this.mainSegmentLoader_.currentMediaInfo_.hasVideo) {
21018 // if we do not know if the main segment loader contains video yet or if we
21019 // definitively know the main segment loader contains video, then we need to wait
21020 // for both main and audio segment loaders to call endOfStream
21021 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
21022 } else {
21023 // otherwise just rely on the audio loader
21024 isEndOfStream = this.audioSegmentLoader_.ended_;
21025 }
21026 }
21027
21028 if (!isEndOfStream) {
21029 return;
21030 }
21031
21032 this.stopABRTimer_();
21033 this.sourceUpdater_.endOfStream();
21034 }
21035 /**
21036 * Check if a playlist has stopped being updated
21037 *
21038 * @param {Object} playlist the media playlist object
21039 * @return {boolean} whether the playlist has stopped being updated or not
21040 */
21041 ;
21042
21043 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
21044 var seekable = this.seekable();
21045
21046 if (!seekable.length) {
21047 // playlist doesn't have enough information to determine whether we are stuck
21048 return false;
21049 }
21050
21051 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
21052
21053 if (expired === null) {
21054 return false;
21055 } // does not use the safe live end to calculate playlist end, since we
21056 // don't want to say we are stuck while there is still content
21057
21058
21059 var absolutePlaylistEnd = Vhs.Playlist.playlistEnd(playlist, expired);
21060 var currentTime = this.tech_.currentTime();
21061 var buffered = this.tech_.buffered();
21062
21063 if (!buffered.length) {
21064 // return true if the playhead reached the absolute end of the playlist
21065 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
21066 }
21067
21068 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
21069 // end of playlist
21070
21071 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
21072 }
21073 /**
21074 * Blacklists a playlist when an error occurs for a set amount of time
21075 * making it unavailable for selection by the rendition selection algorithm
21076 * and then forces a new playlist (rendition) selection.
21077 *
21078 * @param {Object=} error an optional error that may include the playlist
21079 * to blacklist
21080 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
21081 * playlist
21082 */
21083 ;
21084
21085 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
21086 if (error === void 0) {
21087 error = {};
21088 }
21089
21090 // If the `error` was generated by the playlist loader, it will contain
21091 // the playlist we were trying to load (but failed) and that should be
21092 // blacklisted instead of the currently selected playlist which is likely
21093 // out-of-date in this scenario
21094 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
21095 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
21096 // trying to load the master OR while we were disposing of the tech
21097
21098 if (!currentPlaylist) {
21099 this.error = error;
21100
21101 if (this.mediaSource.readyState !== 'open') {
21102 this.trigger('error');
21103 } else {
21104 this.sourceUpdater_.endOfStream('network');
21105 }
21106
21107 return;
21108 }
21109
21110 var playlists = this.masterPlaylistLoader_.master.playlists;
21111 var enabledPlaylists = playlists.filter(isEnabled);
21112 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
21113 // forever
21114
21115 if (playlists.length === 1 && blacklistDuration !== Infinity) {
21116 videojs__default['default'].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
21117 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
21118
21119 return this.masterPlaylistLoader_.load(isFinalRendition);
21120 }
21121
21122 if (isFinalRendition) {
21123 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
21124 // it, instead of erring the player or retrying this playlist, clear out the current
21125 // blacklist. This allows other playlists to be attempted in case any have been
21126 // fixed.
21127 var reincluded = false;
21128 playlists.forEach(function (playlist) {
21129 // skip current playlist which is about to be blacklisted
21130 if (playlist === currentPlaylist) {
21131 return;
21132 }
21133
21134 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
21135
21136 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
21137 reincluded = true;
21138 delete playlist.excludeUntil;
21139 }
21140 });
21141
21142 if (reincluded) {
21143 videojs__default['default'].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
21144 // playlist. This is needed for users relying on the retryplaylist event to catch a
21145 // case where the player might be stuck and looping through "dead" playlists.
21146
21147 this.tech_.trigger('retryplaylist');
21148 }
21149 } // Blacklist this playlist
21150
21151
21152 currentPlaylist.excludeUntil = Date.now() + blacklistDuration * 1000;
21153 this.tech_.trigger('blacklistplaylist');
21154 this.tech_.trigger({
21155 type: 'usage',
21156 name: 'vhs-rendition-blacklisted'
21157 });
21158 this.tech_.trigger({
21159 type: 'usage',
21160 name: 'hls-rendition-blacklisted'
21161 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
21162 // Would be something like media().id !=== currentPlaylist.id and we would need something
21163 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
21164 // from loading a new playlist on any blacklist.
21165 // Select a new playlist
21166
21167 var nextPlaylist = this.selectPlaylist();
21168
21169 if (!nextPlaylist) {
21170 this.error = 'Playback cannot continue. No available working or supported playlists.';
21171 this.trigger('error');
21172 return;
21173 }
21174
21175 var logFn = error.internal ? this.logger_ : videojs__default['default'].log.warn;
21176 var errorMessage = error.message ? ' ' + error.message : '';
21177 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
21178
21179 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
21180 this.delegateLoaders_('audio', ['abort', 'pause']);
21181 } // if subtitle group changed reset subtitle loaders
21182
21183
21184 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
21185 this.delegateLoaders_('subtitle', ['abort', 'pause']);
21186 }
21187
21188 this.delegateLoaders_('main', ['abort', 'pause']);
21189 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
21190 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
21191
21192 return this.masterPlaylistLoader_.media(nextPlaylist, isFinalRendition || shouldDelay);
21193 }
21194 /**
21195 * Pause all segment/playlist loaders
21196 */
21197 ;
21198
21199 _proto.pauseLoading = function pauseLoading() {
21200 this.delegateLoaders_('all', ['abort', 'pause']);
21201 this.stopABRTimer_();
21202 }
21203 /**
21204 * Call a set of functions in order on playlist loaders, segment loaders,
21205 * or both types of loaders.
21206 *
21207 * @param {string} filter
21208 * Filter loaders that should call fnNames using a string. Can be:
21209 * * all - run on all loaders
21210 * * audio - run on all audio loaders
21211 * * subtitle - run on all subtitle loaders
21212 * * main - run on the main/master loaders
21213 *
21214 * @param {Array|string} fnNames
21215 * A string or array of function names to call.
21216 */
21217 ;
21218
21219 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
21220 var _this7 = this;
21221
21222 var loaders = [];
21223 var dontFilterPlaylist = filter === 'all';
21224
21225 if (dontFilterPlaylist || filter === 'main') {
21226 loaders.push(this.masterPlaylistLoader_);
21227 }
21228
21229 var mediaTypes = [];
21230
21231 if (dontFilterPlaylist || filter === 'audio') {
21232 mediaTypes.push('AUDIO');
21233 }
21234
21235 if (dontFilterPlaylist || filter === 'subtitle') {
21236 mediaTypes.push('CLOSED-CAPTIONS');
21237 mediaTypes.push('SUBTITLES');
21238 }
21239
21240 mediaTypes.forEach(function (mediaType) {
21241 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
21242
21243 if (loader) {
21244 loaders.push(loader);
21245 }
21246 });
21247 ['main', 'audio', 'subtitle'].forEach(function (name) {
21248 var loader = _this7[name + "SegmentLoader_"];
21249
21250 if (loader && (filter === name || filter === 'all')) {
21251 loaders.push(loader);
21252 }
21253 });
21254 loaders.forEach(function (loader) {
21255 return fnNames.forEach(function (fnName) {
21256 if (typeof loader[fnName] === 'function') {
21257 loader[fnName]();
21258 }
21259 });
21260 });
21261 }
21262 /**
21263 * set the current time on all segment loaders
21264 *
21265 * @param {TimeRange} currentTime the current time to set
21266 * @return {TimeRange} the current time
21267 */
21268 ;
21269
21270 _proto.setCurrentTime = function setCurrentTime(currentTime) {
21271 var buffered = findRange(this.tech_.buffered(), currentTime);
21272
21273 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
21274 // return immediately if the metadata is not ready yet
21275 return 0;
21276 } // it's clearly an edge-case but don't thrown an error if asked to
21277 // seek within an empty playlist
21278
21279
21280 if (!this.masterPlaylistLoader_.media().segments) {
21281 return 0;
21282 } // if the seek location is already buffered, continue buffering as usual
21283
21284
21285 if (buffered && buffered.length) {
21286 return currentTime;
21287 } // cancel outstanding requests so we begin buffering at the new
21288 // location
21289
21290
21291 this.mainSegmentLoader_.resetEverything();
21292 this.mainSegmentLoader_.abort();
21293
21294 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
21295 this.audioSegmentLoader_.resetEverything();
21296 this.audioSegmentLoader_.abort();
21297 }
21298
21299 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
21300 this.subtitleSegmentLoader_.resetEverything();
21301 this.subtitleSegmentLoader_.abort();
21302 } // start segment loader loading in case they are paused
21303
21304
21305 this.load();
21306 }
21307 /**
21308 * get the current duration
21309 *
21310 * @return {TimeRange} the duration
21311 */
21312 ;
21313
21314 _proto.duration = function duration() {
21315 if (!this.masterPlaylistLoader_) {
21316 return 0;
21317 }
21318
21319 var media = this.masterPlaylistLoader_.media();
21320
21321 if (!media) {
21322 // no playlists loaded yet, so can't determine a duration
21323 return 0;
21324 } // Don't rely on the media source for duration in the case of a live playlist since
21325 // setting the native MediaSource's duration to infinity ends up with consequences to
21326 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
21327 //
21328 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
21329 // however, few browsers have support for setLiveSeekableRange()
21330 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
21331 //
21332 // Until a time when the duration of the media source can be set to infinity, and a
21333 // seekable range specified across browsers, just return Infinity.
21334
21335
21336 if (!media.endList) {
21337 return Infinity;
21338 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
21339 // available). If it's not available, fall back to a playlist-calculated estimate.
21340
21341
21342 if (this.mediaSource) {
21343 return this.mediaSource.duration;
21344 }
21345
21346 return Vhs.Playlist.duration(media);
21347 }
21348 /**
21349 * check the seekable range
21350 *
21351 * @return {TimeRange} the seekable range
21352 */
21353 ;
21354
21355 _proto.seekable = function seekable() {
21356 return this.seekable_;
21357 };
21358
21359 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
21360 var audioSeekable;
21361
21362 if (!this.masterPlaylistLoader_) {
21363 return;
21364 }
21365
21366 var media = this.masterPlaylistLoader_.media();
21367
21368 if (!media) {
21369 return;
21370 }
21371
21372 var expired = this.syncController_.getExpiredTime(media, this.duration());
21373
21374 if (expired === null) {
21375 // not enough information to update seekable
21376 return;
21377 }
21378
21379 var suggestedPresentationDelay = this.masterPlaylistLoader_.master.suggestedPresentationDelay;
21380 var mainSeekable = Vhs.Playlist.seekable(media, expired, suggestedPresentationDelay);
21381
21382 if (mainSeekable.length === 0) {
21383 return;
21384 }
21385
21386 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
21387 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
21388 expired = this.syncController_.getExpiredTime(media, this.duration());
21389
21390 if (expired === null) {
21391 return;
21392 }
21393
21394 audioSeekable = Vhs.Playlist.seekable(media, expired, suggestedPresentationDelay);
21395
21396 if (audioSeekable.length === 0) {
21397 return;
21398 }
21399 }
21400
21401 var oldEnd;
21402 var oldStart;
21403
21404 if (this.seekable_ && this.seekable_.length) {
21405 oldEnd = this.seekable_.end(0);
21406 oldStart = this.seekable_.start(0);
21407 }
21408
21409 if (!audioSeekable) {
21410 // seekable has been calculated based on buffering video data so it
21411 // can be returned directly
21412 this.seekable_ = mainSeekable;
21413 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
21414 // seekables are pretty far off, rely on main
21415 this.seekable_ = mainSeekable;
21416 } else {
21417 this.seekable_ = videojs__default['default'].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
21418 } // seekable is the same as last time
21419
21420
21421 if (this.seekable_ && this.seekable_.length) {
21422 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
21423 return;
21424 }
21425 }
21426
21427 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
21428 this.tech_.trigger('seekablechanged');
21429 }
21430 /**
21431 * Update the player duration
21432 */
21433 ;
21434
21435 _proto.updateDuration = function updateDuration(isLive) {
21436 if (this.updateDuration_) {
21437 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
21438 this.updateDuration_ = null;
21439 }
21440
21441 if (this.mediaSource.readyState !== 'open') {
21442 this.updateDuration_ = this.updateDuration.bind(this, isLive);
21443 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
21444 return;
21445 }
21446
21447 if (isLive) {
21448 var seekable = this.seekable();
21449
21450 if (!seekable.length) {
21451 return;
21452 } // Even in the case of a live playlist, the native MediaSource's duration should not
21453 // be set to Infinity (even though this would be expected for a live playlist), since
21454 // setting the native MediaSource's duration to infinity ends up with consequences to
21455 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
21456 //
21457 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
21458 // however, few browsers have support for setLiveSeekableRange()
21459 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
21460 //
21461 // Until a time when the duration of the media source can be set to infinity, and a
21462 // seekable range specified across browsers, the duration should be greater than or
21463 // equal to the last possible seekable value.
21464 // MediaSource duration starts as NaN
21465 // It is possible (and probable) that this case will never be reached for many
21466 // sources, since the MediaSource reports duration as the highest value without
21467 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
21468 // we buffered times 0 to 100 with real times of 100 to 200, even though current
21469 // time will be between 0 and 100, the native media source may report the duration
21470 // as 200. However, since we report duration separate from the media source (as
21471 // Infinity), and as long as the native media source duration value is greater than
21472 // our reported seekable range, seeks will work as expected. The large number as
21473 // duration for live is actually a strategy used by some players to work around the
21474 // issue of live seekable ranges cited above.
21475
21476
21477 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
21478 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
21479 }
21480
21481 return;
21482 }
21483
21484 var buffered = this.tech_.buffered();
21485 var duration = Vhs.Playlist.duration(this.masterPlaylistLoader_.media());
21486
21487 if (buffered.length > 0) {
21488 duration = Math.max(duration, buffered.end(buffered.length - 1));
21489 }
21490
21491 if (this.mediaSource.duration !== duration) {
21492 this.sourceUpdater_.setDuration(duration);
21493 }
21494 }
21495 /**
21496 * dispose of the MasterPlaylistController and everything
21497 * that it controls
21498 */
21499 ;
21500
21501 _proto.dispose = function dispose() {
21502 var _this8 = this;
21503
21504 this.trigger('dispose');
21505 this.decrypter_.terminate();
21506 this.masterPlaylistLoader_.dispose();
21507 this.mainSegmentLoader_.dispose();
21508
21509 if (this.loadOnPlay_) {
21510 this.tech_.off('play', this.loadOnPlay_);
21511 }
21512
21513 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21514 var groups = _this8.mediaTypes_[type].groups;
21515
21516 for (var id in groups) {
21517 groups[id].forEach(function (group) {
21518 if (group.playlistLoader) {
21519 group.playlistLoader.dispose();
21520 }
21521 });
21522 }
21523 });
21524 this.audioSegmentLoader_.dispose();
21525 this.subtitleSegmentLoader_.dispose();
21526 this.sourceUpdater_.dispose();
21527 this.timelineChangeController_.dispose();
21528 this.stopABRTimer_();
21529
21530 if (this.updateDuration_) {
21531 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
21532 }
21533
21534 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
21535
21536 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
21537 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
21538 this.off();
21539 }
21540 /**
21541 * return the master playlist object if we have one
21542 *
21543 * @return {Object} the master playlist object that we parsed
21544 */
21545 ;
21546
21547 _proto.master = function master() {
21548 return this.masterPlaylistLoader_.master;
21549 }
21550 /**
21551 * return the currently selected playlist
21552 *
21553 * @return {Object} the currently selected playlist object that we parsed
21554 */
21555 ;
21556
21557 _proto.media = function media() {
21558 // playlist loader will not return media if it has not been fully loaded
21559 return this.masterPlaylistLoader_.media() || this.initialMedia_;
21560 };
21561
21562 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
21563 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader; // one or both loaders has not loaded sufficently to get codecs
21564
21565 if (!this.mainSegmentLoader_.currentMediaInfo_ || usingAudioLoader && !this.audioSegmentLoader_.currentMediaInfo_) {
21566 return false;
21567 }
21568
21569 return true;
21570 };
21571
21572 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
21573 var _this9 = this;
21574
21575 var media = {
21576 main: this.mainSegmentLoader_.currentMediaInfo_ || {},
21577 audio: this.audioSegmentLoader_.currentMediaInfo_ || {}
21578 }; // set "main" media equal to video
21579
21580 media.video = media.main;
21581 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
21582 var codecs = {};
21583 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
21584
21585 if (media.main.hasVideo) {
21586 codecs.video = playlistCodecs.video || media.main.videoCodec || codecs_js.DEFAULT_VIDEO_CODEC;
21587 }
21588
21589 if (media.main.isMuxed) {
21590 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC);
21591 }
21592
21593 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
21594 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
21595
21596 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
21597 } // no codecs, no playback.
21598
21599
21600 if (!codecs.audio && !codecs.video) {
21601 this.blacklistCurrentPlaylist({
21602 playlist: this.media(),
21603 message: 'Could not determine codecs for playlist.',
21604 blacklistDuration: Infinity
21605 });
21606 return;
21607 } // fmp4 relies on browser support, while ts relies on muxer support
21608
21609
21610 var supportFunction = function supportFunction(isFmp4, codec) {
21611 return isFmp4 ? codecs_js.browserSupportsCodec(codec) : codecs_js.muxerSupportsCodec(codec);
21612 };
21613
21614 var unsupportedCodecs = {};
21615 var unsupportedAudio;
21616 ['video', 'audio'].forEach(function (type) {
21617 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
21618 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
21619 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
21620 unsupportedCodecs[supporter].push(codecs[type]);
21621
21622 if (type === 'audio') {
21623 unsupportedAudio = supporter;
21624 }
21625 }
21626 });
21627
21628 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
21629 var audioGroup = this.media().attributes.AUDIO;
21630 this.master().playlists.forEach(function (variant) {
21631 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
21632
21633 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
21634 variant.excludeUntil = Infinity;
21635 }
21636 });
21637 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
21638 } // if we have any unsupported codecs blacklist this playlist.
21639
21640
21641 if (Object.keys(unsupportedCodecs).length) {
21642 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
21643 if (acc) {
21644 acc += ', ';
21645 }
21646
21647 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
21648 return acc;
21649 }, '') + '.';
21650 this.blacklistCurrentPlaylist({
21651 playlist: this.media(),
21652 internal: true,
21653 message: message,
21654 blacklistDuration: Infinity
21655 });
21656 return;
21657 } // check if codec switching is happening
21658
21659
21660 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
21661 var switchMessages = [];
21662 ['video', 'audio'].forEach(function (type) {
21663 var newCodec = (codecs_js.parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
21664 var oldCodec = (codecs_js.parseCodecs(codecs[type] || '')[0] || {}).type;
21665
21666 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
21667 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
21668 }
21669 });
21670
21671 if (switchMessages.length) {
21672 this.blacklistCurrentPlaylist({
21673 playlist: this.media(),
21674 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
21675 blacklistDuration: Infinity,
21676 internal: true
21677 });
21678 return;
21679 }
21680 } // TODO: when using the muxer shouldn't we just return
21681 // the codecs that the muxer outputs?
21682
21683
21684 return codecs;
21685 }
21686 /**
21687 * Create source buffers and exlude any incompatible renditions.
21688 *
21689 * @private
21690 */
21691 ;
21692
21693 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
21694 // media source is not ready yet or sourceBuffers are already
21695 // created.
21696 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
21697 return;
21698 }
21699
21700 if (!this.areMediaTypesKnown_()) {
21701 return;
21702 }
21703
21704 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
21705
21706 if (!codecs) {
21707 return;
21708 }
21709
21710 this.sourceUpdater_.createSourceBuffers(codecs);
21711 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
21712 this.excludeIncompatibleVariants_(codecString);
21713 }
21714 /**
21715 * Excludes playlists with codecs that are unsupported by the muxer and browser.
21716 */
21717 ;
21718
21719 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
21720 var _this10 = this;
21721
21722 var playlists = this.master().playlists;
21723 var ids = []; // TODO: why don't we have a property to loop through all
21724 // playlist? Why did we ever mix indexes and keys?
21725
21726 Object.keys(playlists).forEach(function (key) {
21727 var variant = playlists[key]; // check if we already processed this playlist.
21728
21729 if (ids.indexOf(variant.id) !== -1) {
21730 return;
21731 }
21732
21733 ids.push(variant.id);
21734 var codecs = codecsForPlaylist(_this10.master, variant);
21735 var unsupported = [];
21736
21737 if (codecs.audio && !codecs_js.muxerSupportsCodec(codecs.audio) && !codecs_js.browserSupportsCodec(codecs.audio)) {
21738 unsupported.push("audio codec " + codecs.audio);
21739 }
21740
21741 if (codecs.video && !codecs_js.muxerSupportsCodec(codecs.video) && !codecs_js.browserSupportsCodec(codecs.video)) {
21742 unsupported.push("video codec " + codecs.video);
21743 }
21744
21745 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
21746 unsupported.push("text codec " + codecs.text);
21747 }
21748
21749 if (unsupported.length) {
21750 variant.excludeUntil = Infinity;
21751
21752 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
21753 }
21754 });
21755 }
21756 /**
21757 * Blacklist playlists that are known to be codec or
21758 * stream-incompatible with the SourceBuffer configuration. For
21759 * instance, Media Source Extensions would cause the video element to
21760 * stall waiting for video data if you switched from a variant with
21761 * video and audio to an audio-only one.
21762 *
21763 * @param {Object} media a media playlist compatible with the current
21764 * set of SourceBuffers. Variants in the current master playlist that
21765 * do not appear to have compatible codec or stream configurations
21766 * will be excluded from the default playlist selection algorithm
21767 * indefinitely.
21768 * @private
21769 */
21770 ;
21771
21772 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
21773 var _this11 = this;
21774
21775 var ids = [];
21776 var playlists = this.master().playlists;
21777 var codecs = unwrapCodecList(codecs_js.parseCodecs(codecString));
21778 var codecCount_ = codecCount(codecs);
21779 var videoDetails = codecs.video && codecs_js.parseCodecs(codecs.video)[0] || null;
21780 var audioDetails = codecs.audio && codecs_js.parseCodecs(codecs.audio)[0] || null;
21781 Object.keys(playlists).forEach(function (key) {
21782 var variant = playlists[key]; // check if we already processed this playlist.
21783 // or it if it is already excluded forever.
21784
21785 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
21786 return;
21787 }
21788
21789 ids.push(variant.id);
21790 var blacklistReasons = []; // get codecs from the playlist for this variant
21791
21792 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
21793 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
21794 // variant is incompatible. Wait for mux.js to probe
21795
21796 if (!variantCodecs.audio && !variantCodecs.video) {
21797 return;
21798 } // TODO: we can support this by removing the
21799 // old media source and creating a new one, but it will take some work.
21800 // The number of streams cannot change
21801
21802
21803 if (variantCodecCount !== codecCount_) {
21804 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
21805 } // only exclude playlists by codec change, if codecs cannot switch
21806 // during playback.
21807
21808
21809 if (!_this11.sourceUpdater_.canChangeType()) {
21810 var variantVideoDetails = variantCodecs.video && codecs_js.parseCodecs(variantCodecs.video)[0] || null;
21811 var variantAudioDetails = variantCodecs.audio && codecs_js.parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
21812
21813 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
21814 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
21815 } // the audio codec cannot change
21816
21817
21818 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
21819 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
21820 }
21821 }
21822
21823 if (blacklistReasons.length) {
21824 variant.excludeUntil = Infinity;
21825
21826 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
21827 }
21828 });
21829 };
21830
21831 _proto.updateAdCues_ = function updateAdCues_(media) {
21832 var offset = 0;
21833 var seekable = this.seekable();
21834
21835 if (seekable.length) {
21836 offset = seekable.start(0);
21837 }
21838
21839 updateAdCues(media, this.cueTagsTrack_, offset);
21840 }
21841 /**
21842 * Calculates the desired forward buffer length based on current time
21843 *
21844 * @return {number} Desired forward buffer length in seconds
21845 */
21846 ;
21847
21848 _proto.goalBufferLength = function goalBufferLength() {
21849 var currentTime = this.tech_.currentTime();
21850 var initial = Config.GOAL_BUFFER_LENGTH;
21851 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
21852 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
21853 return Math.min(initial + currentTime * rate, max);
21854 }
21855 /**
21856 * Calculates the desired buffer low water line based on current time
21857 *
21858 * @return {number} Desired buffer low water line in seconds
21859 */
21860 ;
21861
21862 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
21863 var currentTime = this.tech_.currentTime();
21864 var initial = Config.BUFFER_LOW_WATER_LINE;
21865 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
21866 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
21867 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
21868 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
21869 };
21870
21871 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
21872 return Config.BUFFER_HIGH_WATER_LINE;
21873 };
21874
21875 return MasterPlaylistController;
21876}(videojs__default['default'].EventTarget);
21877
21878/**
21879 * Returns a function that acts as the Enable/disable playlist function.
21880 *
21881 * @param {PlaylistLoader} loader - The master playlist loader
21882 * @param {string} playlistID - id of the playlist
21883 * @param {Function} changePlaylistFn - A function to be called after a
21884 * playlist's enabled-state has been changed. Will NOT be called if a
21885 * playlist's enabled-state is unchanged
21886 * @param {boolean=} enable - Value to set the playlist enabled-state to
21887 * or if undefined returns the current enabled-state for the playlist
21888 * @return {Function} Function for setting/getting enabled
21889 */
21890
21891var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
21892 return function (enable) {
21893 var playlist = loader.master.playlists[playlistID];
21894 var incompatible = isIncompatible(playlist);
21895 var currentlyEnabled = isEnabled(playlist);
21896
21897 if (typeof enable === 'undefined') {
21898 return currentlyEnabled;
21899 }
21900
21901 if (enable) {
21902 delete playlist.disabled;
21903 } else {
21904 playlist.disabled = true;
21905 }
21906
21907 if (enable !== currentlyEnabled && !incompatible) {
21908 // Ensure the outside world knows about our changes
21909 changePlaylistFn();
21910
21911 if (enable) {
21912 loader.trigger('renditionenabled');
21913 } else {
21914 loader.trigger('renditiondisabled');
21915 }
21916 }
21917
21918 return enable;
21919 };
21920};
21921/**
21922 * The representation object encapsulates the publicly visible information
21923 * in a media playlist along with a setter/getter-type function (enabled)
21924 * for changing the enabled-state of a particular playlist entry
21925 *
21926 * @class Representation
21927 */
21928
21929
21930var Representation = function Representation(vhsHandler, playlist, id) {
21931 var mpc = vhsHandler.masterPlaylistController_,
21932 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
21933
21934 var changeType = smoothQualityChange ? 'smooth' : 'fast';
21935 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
21936
21937 if (playlist.attributes.RESOLUTION) {
21938 var resolution = playlist.attributes.RESOLUTION;
21939 this.width = resolution.width;
21940 this.height = resolution.height;
21941 }
21942
21943 this.bandwidth = playlist.attributes.BANDWIDTH;
21944 this.codecs = codecsForPlaylist(mpc.master(), playlist);
21945 this.playlist = playlist; // The id is simply the ordinality of the media playlist
21946 // within the master playlist
21947
21948 this.id = id; // Partially-apply the enableFunction to create a playlist-
21949 // specific variant
21950
21951 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
21952};
21953/**
21954 * A mixin function that adds the `representations` api to an instance
21955 * of the VhsHandler class
21956 *
21957 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
21958 * representation API into
21959 */
21960
21961
21962var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
21963 var playlists = vhsHandler.playlists; // Add a single API-specific function to the VhsHandler instance
21964
21965 vhsHandler.representations = function () {
21966 if (!playlists || !playlists.master || !playlists.master.playlists) {
21967 return [];
21968 }
21969
21970 return playlists.master.playlists.filter(function (media) {
21971 return !isIncompatible(media);
21972 }).map(function (e, i) {
21973 return new Representation(vhsHandler, e, e.id);
21974 });
21975 };
21976};
21977
21978/**
21979 * @file playback-watcher.js
21980 *
21981 * Playback starts, and now my watch begins. It shall not end until my death. I shall
21982 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
21983 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
21984 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
21985 * my life and honor to the Playback Watch, for this Player and all the Players to come.
21986 */
21987
21988var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
21989/**
21990 * Returns whether or not the current time should be considered close to buffered content,
21991 * taking into consideration whether there's enough buffered content for proper playback.
21992 *
21993 * @param {Object} options
21994 * Options object
21995 * @param {TimeRange} options.buffered
21996 * Current buffer
21997 * @param {number} options.targetDuration
21998 * The active playlist's target duration
21999 * @param {number} options.currentTime
22000 * The current time of the player
22001 * @return {boolean}
22002 * Whether the current time should be considered close to the buffer
22003 */
22004
22005var closeToBufferedContent = function closeToBufferedContent(_ref) {
22006 var buffered = _ref.buffered,
22007 targetDuration = _ref.targetDuration,
22008 currentTime = _ref.currentTime;
22009
22010 if (!buffered.length) {
22011 return false;
22012 } // At least two to three segments worth of content should be buffered before there's a
22013 // full enough buffer to consider taking any actions.
22014
22015
22016 if (buffered.end(0) - buffered.start(0) < targetDuration * 2) {
22017 return false;
22018 } // It's possible that, on seek, a remove hasn't completed and the buffered range is
22019 // somewhere past the current time. In that event, don't consider the buffered content
22020 // close.
22021
22022
22023 if (currentTime > buffered.start(0)) {
22024 return false;
22025 } // Since target duration generally represents the max (or close to max) duration of a
22026 // segment, if the buffer is within a segment of the current time, the gap probably
22027 // won't be closed, and current time should be considered close to buffered content.
22028
22029
22030 return buffered.start(0) - currentTime < targetDuration;
22031};
22032/**
22033 * @class PlaybackWatcher
22034 */
22035
22036var PlaybackWatcher = /*#__PURE__*/function () {
22037 /**
22038 * Represents an PlaybackWatcher object.
22039 *
22040 * @class
22041 * @param {Object} options an object that includes the tech and settings
22042 */
22043 function PlaybackWatcher(options) {
22044 var _this = this;
22045
22046 this.masterPlaylistController_ = options.masterPlaylistController;
22047 this.tech_ = options.tech;
22048 this.seekable = options.seekable;
22049 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
22050 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
22051 this.media = options.media;
22052 this.consecutiveUpdates = 0;
22053 this.lastRecordedTime = null;
22054 this.timer_ = null;
22055 this.checkCurrentTimeTimeout_ = null;
22056 this.logger_ = logger('PlaybackWatcher');
22057 this.logger_('initialize');
22058
22059 var canPlayHandler = function canPlayHandler() {
22060 return _this.monitorCurrentTime_();
22061 };
22062
22063 var waitingHandler = function waitingHandler() {
22064 return _this.techWaiting_();
22065 };
22066
22067 var cancelTimerHandler = function cancelTimerHandler() {
22068 return _this.cancelTimer_();
22069 };
22070
22071 var fixesBadSeeksHandler = function fixesBadSeeksHandler() {
22072 return _this.fixesBadSeeks_();
22073 };
22074
22075 var mpc = this.masterPlaylistController_;
22076 var loaderTypes = ['main', 'subtitle', 'audio'];
22077 var loaderChecks = {};
22078 loaderTypes.forEach(function (type) {
22079 loaderChecks[type] = {
22080 reset: function reset() {
22081 return _this.resetSegmentDownloads_(type);
22082 },
22083 updateend: function updateend() {
22084 return _this.checkSegmentDownloads_(type);
22085 }
22086 };
22087 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
22088 // isn't changing we want to reset. We cannot assume that the new rendition
22089 // will also be stalled, until after new appends.
22090
22091 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
22092 // This prevents one segment playlists (single vtt or single segment content)
22093 // from being detected as stalling. As the buffer will not change in those cases, since
22094 // the buffer is the entire video duration.
22095
22096 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
22097 });
22098 this.tech_.on('seekablechanged', fixesBadSeeksHandler);
22099 this.tech_.on('waiting', waitingHandler);
22100 this.tech_.on(timerCancelEvents, cancelTimerHandler);
22101 this.tech_.on('canplay', canPlayHandler); // Define the dispose function to clean up our events
22102
22103 this.dispose = function () {
22104 _this.logger_('dispose');
22105
22106 _this.tech_.off('seekablechanged', fixesBadSeeksHandler);
22107
22108 _this.tech_.off('waiting', waitingHandler);
22109
22110 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
22111
22112 _this.tech_.off('canplay', canPlayHandler);
22113
22114 loaderTypes.forEach(function (type) {
22115 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
22116 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
22117
22118 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
22119 });
22120
22121 if (_this.checkCurrentTimeTimeout_) {
22122 window__default['default'].clearTimeout(_this.checkCurrentTimeTimeout_);
22123 }
22124
22125 _this.cancelTimer_();
22126 };
22127 }
22128 /**
22129 * Periodically check current time to see if playback stopped
22130 *
22131 * @private
22132 */
22133
22134
22135 var _proto = PlaybackWatcher.prototype;
22136
22137 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
22138 this.checkCurrentTime_();
22139
22140 if (this.checkCurrentTimeTimeout_) {
22141 window__default['default'].clearTimeout(this.checkCurrentTimeTimeout_);
22142 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
22143
22144
22145 this.checkCurrentTimeTimeout_ = window__default['default'].setTimeout(this.monitorCurrentTime_.bind(this), 250);
22146 }
22147 /**
22148 * Reset stalled download stats for a specific type of loader
22149 *
22150 * @param {string} type
22151 * The segment loader type to check.
22152 *
22153 * @listens SegmentLoader#playlistupdate
22154 * @listens Tech#seeking
22155 * @listens Tech#seeked
22156 */
22157 ;
22158
22159 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
22160 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
22161
22162 if (this[type + "StalledDownloads_"] > 0) {
22163 this.logger_("resetting possible stalled download count for " + type + " loader");
22164 }
22165
22166 this[type + "StalledDownloads_"] = 0;
22167 this[type + "Buffered_"] = loader.buffered_();
22168 }
22169 /**
22170 * Checks on every segment `appendsdone` to see
22171 * if segment appends are making progress. If they are not
22172 * and we are still downloading bytes. We blacklist the playlist.
22173 *
22174 * @param {string} type
22175 * The segment loader type to check.
22176 *
22177 * @listens SegmentLoader#appendsdone
22178 */
22179 ;
22180
22181 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
22182 var mpc = this.masterPlaylistController_;
22183 var loader = mpc[type + "SegmentLoader_"];
22184 var buffered = loader.buffered_();
22185 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
22186 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
22187 // the buffered value for this loader changed
22188 // appends are working
22189
22190 if (isBufferedDifferent) {
22191 this.resetSegmentDownloads_(type);
22192 return;
22193 }
22194
22195 this[type + "StalledDownloads_"]++;
22196 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
22197 playlistId: loader.playlist_ && loader.playlist_.id,
22198 buffered: timeRangesToArray(buffered)
22199 }); // after 10 possibly stalled appends with no reset, exclude
22200
22201 if (this[type + "StalledDownloads_"] < 10) {
22202 return;
22203 }
22204
22205 this.logger_(type + " loader stalled download exclusion");
22206 this.resetSegmentDownloads_(type);
22207 this.tech_.trigger({
22208 type: 'usage',
22209 name: "vhs-" + type + "-download-exclusion"
22210 });
22211
22212 if (type === 'subtitle') {
22213 return;
22214 } // TODO: should we exclude audio tracks rather than main tracks
22215 // when type is audio?
22216
22217
22218 mpc.blacklistCurrentPlaylist({
22219 message: "Excessive " + type + " segment downloading detected."
22220 }, Infinity);
22221 }
22222 /**
22223 * The purpose of this function is to emulate the "waiting" event on
22224 * browsers that do not emit it when they are waiting for more
22225 * data to continue playback
22226 *
22227 * @private
22228 */
22229 ;
22230
22231 _proto.checkCurrentTime_ = function checkCurrentTime_() {
22232 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
22233 this.consecutiveUpdates = 0;
22234 this.lastRecordedTime = this.tech_.currentTime();
22235 return;
22236 }
22237
22238 if (this.tech_.paused() || this.tech_.seeking()) {
22239 return;
22240 }
22241
22242 var currentTime = this.tech_.currentTime();
22243 var buffered = this.tech_.buffered();
22244
22245 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
22246 // If current time is at the end of the final buffered region, then any playback
22247 // stall is most likely caused by buffering in a low bandwidth environment. The tech
22248 // should fire a `waiting` event in this scenario, but due to browser and tech
22249 // inconsistencies. Calling `techWaiting_` here allows us to simulate
22250 // responding to a native `waiting` event when the tech fails to emit one.
22251 return this.techWaiting_();
22252 }
22253
22254 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
22255 this.consecutiveUpdates++;
22256 this.waiting_();
22257 } else if (currentTime === this.lastRecordedTime) {
22258 this.consecutiveUpdates++;
22259 } else {
22260 this.consecutiveUpdates = 0;
22261 this.lastRecordedTime = currentTime;
22262 }
22263 }
22264 /**
22265 * Cancels any pending timers and resets the 'timeupdate' mechanism
22266 * designed to detect that we are stalled
22267 *
22268 * @private
22269 */
22270 ;
22271
22272 _proto.cancelTimer_ = function cancelTimer_() {
22273 this.consecutiveUpdates = 0;
22274
22275 if (this.timer_) {
22276 this.logger_('cancelTimer_');
22277 clearTimeout(this.timer_);
22278 }
22279
22280 this.timer_ = null;
22281 }
22282 /**
22283 * Fixes situations where there's a bad seek
22284 *
22285 * @return {boolean} whether an action was taken to fix the seek
22286 * @private
22287 */
22288 ;
22289
22290 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
22291 var seeking = this.tech_.seeking();
22292
22293 if (!seeking) {
22294 return false;
22295 }
22296
22297 var seekable = this.seekable();
22298 var currentTime = this.tech_.currentTime();
22299 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
22300 var seekTo;
22301
22302 if (isAfterSeekableRange) {
22303 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
22304
22305 seekTo = seekableEnd;
22306 }
22307
22308 if (this.beforeSeekableWindow_(seekable, currentTime)) {
22309 var seekableStart = seekable.start(0); // sync to the beginning of the live window
22310 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
22311
22312 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
22313 // happen in live with a 3 segment playlist), then don't use a time delta
22314 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
22315 }
22316
22317 if (typeof seekTo !== 'undefined') {
22318 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
22319 this.tech_.setCurrentTime(seekTo);
22320 return true;
22321 }
22322
22323 var buffered = this.tech_.buffered();
22324
22325 if (closeToBufferedContent({
22326 buffered: buffered,
22327 targetDuration: this.media().targetDuration,
22328 currentTime: currentTime
22329 })) {
22330 seekTo = buffered.start(0) + SAFE_TIME_DELTA;
22331 this.logger_("Buffered region starts (" + buffered.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
22332 this.tech_.setCurrentTime(seekTo);
22333 return true;
22334 }
22335
22336 return false;
22337 }
22338 /**
22339 * Handler for situations when we determine the player is waiting.
22340 *
22341 * @private
22342 */
22343 ;
22344
22345 _proto.waiting_ = function waiting_() {
22346 if (this.techWaiting_()) {
22347 return;
22348 } // All tech waiting checks failed. Use last resort correction
22349
22350
22351 var currentTime = this.tech_.currentTime();
22352 var buffered = this.tech_.buffered();
22353 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
22354 // region with no indication that anything is amiss (seen in Firefox). Seeking to
22355 // currentTime is usually enough to kickstart the player. This checks that the player
22356 // is currently within a buffered region before attempting a corrective seek.
22357 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
22358 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
22359 // make sure there is ~3 seconds of forward buffer before taking any corrective action
22360 // to avoid triggering an `unknownwaiting` event when the network is slow.
22361
22362 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
22363 this.cancelTimer_();
22364 this.tech_.setCurrentTime(currentTime);
22365 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
22366
22367 this.tech_.trigger({
22368 type: 'usage',
22369 name: 'vhs-unknown-waiting'
22370 });
22371 this.tech_.trigger({
22372 type: 'usage',
22373 name: 'hls-unknown-waiting'
22374 });
22375 return;
22376 }
22377 }
22378 /**
22379 * Handler for situations when the tech fires a `waiting` event
22380 *
22381 * @return {boolean}
22382 * True if an action (or none) was needed to correct the waiting. False if no
22383 * checks passed
22384 * @private
22385 */
22386 ;
22387
22388 _proto.techWaiting_ = function techWaiting_() {
22389 var seekable = this.seekable();
22390 var currentTime = this.tech_.currentTime();
22391
22392 if (this.tech_.seeking() && this.fixesBadSeeks_()) {
22393 // Tech is seeking or bad seek fixed, no action needed
22394 return true;
22395 }
22396
22397 if (this.tech_.seeking() || this.timer_ !== null) {
22398 // Tech is seeking or already waiting on another action, no action needed
22399 return true;
22400 }
22401
22402 if (this.beforeSeekableWindow_(seekable, currentTime)) {
22403 var livePoint = seekable.end(seekable.length - 1);
22404 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
22405 this.cancelTimer_();
22406 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
22407
22408 this.tech_.trigger({
22409 type: 'usage',
22410 name: 'vhs-live-resync'
22411 });
22412 this.tech_.trigger({
22413 type: 'usage',
22414 name: 'hls-live-resync'
22415 });
22416 return true;
22417 }
22418
22419 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
22420 var buffered = this.tech_.buffered();
22421 var videoUnderflow = this.videoUnderflow_({
22422 audioBuffered: sourceUpdater.audioBuffered(),
22423 videoBuffered: sourceUpdater.videoBuffered(),
22424 currentTime: currentTime
22425 });
22426
22427 if (videoUnderflow) {
22428 // Even though the video underflowed and was stuck in a gap, the audio overplayed
22429 // the gap, leading currentTime into a buffered range. Seeking to currentTime
22430 // allows the video to catch up to the audio position without losing any audio
22431 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
22432 this.cancelTimer_();
22433 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
22434
22435 this.tech_.trigger({
22436 type: 'usage',
22437 name: 'vhs-video-underflow'
22438 });
22439 this.tech_.trigger({
22440 type: 'usage',
22441 name: 'hls-video-underflow'
22442 });
22443 return true;
22444 }
22445
22446 var nextRange = findNextRange(buffered, currentTime); // check for gap
22447
22448 if (nextRange.length > 0) {
22449 var difference = nextRange.start(0) - currentTime;
22450 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
22451 this.cancelTimer_();
22452 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
22453 return true;
22454 } // All checks failed. Returning false to indicate failure to correct waiting
22455
22456
22457 return false;
22458 };
22459
22460 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
22461 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
22462 allowSeeksWithinUnsafeLiveWindow = false;
22463 }
22464
22465 if (!seekable.length) {
22466 // we can't make a solid case if there's no seekable, default to false
22467 return false;
22468 }
22469
22470 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
22471 var isLive = !playlist.endList;
22472
22473 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
22474 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
22475 }
22476
22477 if (currentTime > allowedEnd) {
22478 return true;
22479 }
22480
22481 return false;
22482 };
22483
22484 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
22485 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
22486 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
22487 return true;
22488 }
22489
22490 return false;
22491 };
22492
22493 _proto.videoUnderflow_ = function videoUnderflow_(_ref2) {
22494 var videoBuffered = _ref2.videoBuffered,
22495 audioBuffered = _ref2.audioBuffered,
22496 currentTime = _ref2.currentTime;
22497
22498 // audio only content will not have video underflow :)
22499 if (!videoBuffered) {
22500 return;
22501 }
22502
22503 var gap; // find a gap in demuxed content.
22504
22505 if (videoBuffered.length && audioBuffered.length) {
22506 // in Chrome audio will continue to play for ~3s when we run out of video
22507 // so we have to check that the video buffer did have some buffer in the
22508 // past.
22509 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
22510 var videoRange = findRange(videoBuffered, currentTime);
22511 var audioRange = findRange(audioBuffered, currentTime);
22512
22513 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
22514 gap = {
22515 start: lastVideoRange.end(0),
22516 end: audioRange.end(0)
22517 };
22518 } // find a gap in muxed content.
22519
22520 } else {
22521 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
22522 // stuck in a gap due to video underflow.
22523
22524 if (!nextRange.length) {
22525 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
22526 }
22527 }
22528
22529 if (gap) {
22530 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
22531 return true;
22532 }
22533
22534 return false;
22535 }
22536 /**
22537 * Timer callback. If playback still has not proceeded, then we seek
22538 * to the start of the next buffered region.
22539 *
22540 * @private
22541 */
22542 ;
22543
22544 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
22545 var buffered = this.tech_.buffered();
22546 var currentTime = this.tech_.currentTime();
22547 var nextRange = findNextRange(buffered, currentTime);
22548 this.cancelTimer_();
22549
22550 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
22551 return;
22552 }
22553
22554 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
22555
22556 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
22557 this.tech_.trigger({
22558 type: 'usage',
22559 name: 'vhs-gap-skip'
22560 });
22561 this.tech_.trigger({
22562 type: 'usage',
22563 name: 'hls-gap-skip'
22564 });
22565 };
22566
22567 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
22568 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
22569 // playing for ~3 seconds after the video gap starts. This is done to account for
22570 // video buffer underflow/underrun (note that this is not done when there is audio
22571 // buffer underflow/underrun -- in that case the video will stop as soon as it
22572 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
22573 // video stalls). The player's time will reflect the playthrough of audio, so the
22574 // time will appear as if we are in a buffered region, even if we are stuck in a
22575 // "gap."
22576 //
22577 // Example:
22578 // video buffer: 0 => 10.1, 10.2 => 20
22579 // audio buffer: 0 => 20
22580 // overall buffer: 0 => 10.1, 10.2 => 20
22581 // current time: 13
22582 //
22583 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
22584 // however, the audio continued playing until it reached ~3 seconds past the gap
22585 // (13 seconds), at which point it stops as well. Since current time is past the
22586 // gap, findNextRange will return no ranges.
22587 //
22588 // To check for this issue, we see if there is a gap that starts somewhere within
22589 // a 3 second range (3 seconds +/- 1 second) back from our current time.
22590 var gaps = findGaps(buffered);
22591
22592 for (var i = 0; i < gaps.length; i++) {
22593 var start = gaps.start(i);
22594 var end = gaps.end(i); // gap is starts no more than 4 seconds back
22595
22596 if (currentTime - start < 4 && currentTime - start > 2) {
22597 return {
22598 start: start,
22599 end: end
22600 };
22601 }
22602 }
22603
22604 return null;
22605 };
22606
22607 return PlaybackWatcher;
22608}();
22609
22610var defaultOptions = {
22611 errorInterval: 30,
22612 getSource: function getSource(next) {
22613 var tech = this.tech({
22614 IWillNotUseThisInPlugins: true
22615 });
22616 var sourceObj = tech.currentSource_ || this.currentSource();
22617 return next(sourceObj);
22618 }
22619};
22620/**
22621 * Main entry point for the plugin
22622 *
22623 * @param {Player} player a reference to a videojs Player instance
22624 * @param {Object} [options] an object with plugin options
22625 * @private
22626 */
22627
22628var initPlugin = function initPlugin(player, options) {
22629 var lastCalled = 0;
22630 var seekTo = 0;
22631 var localOptions = videojs__default['default'].mergeOptions(defaultOptions, options);
22632 player.ready(function () {
22633 player.trigger({
22634 type: 'usage',
22635 name: 'vhs-error-reload-initialized'
22636 });
22637 player.trigger({
22638 type: 'usage',
22639 name: 'hls-error-reload-initialized'
22640 });
22641 });
22642 /**
22643 * Player modifications to perform that must wait until `loadedmetadata`
22644 * has been triggered
22645 *
22646 * @private
22647 */
22648
22649 var loadedMetadataHandler = function loadedMetadataHandler() {
22650 if (seekTo) {
22651 player.currentTime(seekTo);
22652 }
22653 };
22654 /**
22655 * Set the source on the player element, play, and seek if necessary
22656 *
22657 * @param {Object} sourceObj An object specifying the source url and mime-type to play
22658 * @private
22659 */
22660
22661
22662 var setSource = function setSource(sourceObj) {
22663 if (sourceObj === null || sourceObj === undefined) {
22664 return;
22665 }
22666
22667 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
22668 player.one('loadedmetadata', loadedMetadataHandler);
22669 player.src(sourceObj);
22670 player.trigger({
22671 type: 'usage',
22672 name: 'vhs-error-reload'
22673 });
22674 player.trigger({
22675 type: 'usage',
22676 name: 'hls-error-reload'
22677 });
22678 player.play();
22679 };
22680 /**
22681 * Attempt to get a source from either the built-in getSource function
22682 * or a custom function provided via the options
22683 *
22684 * @private
22685 */
22686
22687
22688 var errorHandler = function errorHandler() {
22689 // Do not attempt to reload the source if a source-reload occurred before
22690 // 'errorInterval' time has elapsed since the last source-reload
22691 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
22692 player.trigger({
22693 type: 'usage',
22694 name: 'vhs-error-reload-canceled'
22695 });
22696 player.trigger({
22697 type: 'usage',
22698 name: 'hls-error-reload-canceled'
22699 });
22700 return;
22701 }
22702
22703 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
22704 videojs__default['default'].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
22705 return;
22706 }
22707
22708 lastCalled = Date.now();
22709 return localOptions.getSource.call(player, setSource);
22710 };
22711 /**
22712 * Unbind any event handlers that were bound by the plugin
22713 *
22714 * @private
22715 */
22716
22717
22718 var cleanupEvents = function cleanupEvents() {
22719 player.off('loadedmetadata', loadedMetadataHandler);
22720 player.off('error', errorHandler);
22721 player.off('dispose', cleanupEvents);
22722 };
22723 /**
22724 * Cleanup before re-initializing the plugin
22725 *
22726 * @param {Object} [newOptions] an object with plugin options
22727 * @private
22728 */
22729
22730
22731 var reinitPlugin = function reinitPlugin(newOptions) {
22732 cleanupEvents();
22733 initPlugin(player, newOptions);
22734 };
22735
22736 player.on('error', errorHandler);
22737 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
22738 // initializing the plugin
22739
22740 player.reloadSourceOnError = reinitPlugin;
22741};
22742/**
22743 * Reload the source when an error is detected as long as there
22744 * wasn't an error previously within the last 30 seconds
22745 *
22746 * @param {Object} [options] an object with plugin options
22747 */
22748
22749
22750var reloadSourceOnError = function reloadSourceOnError(options) {
22751 initPlugin(this, options);
22752};
22753
22754var version = "2.6.1";
22755
22756var version$1 = "5.9.1";
22757
22758var version$2 = "0.15.2";
22759
22760var version$3 = "4.5.2";
22761
22762var version$4 = "3.1.2";
22763
22764var Vhs$1 = {
22765 PlaylistLoader: PlaylistLoader,
22766 Playlist: Playlist,
22767 utils: utils,
22768 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
22769 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
22770 lastBandwidthSelector: lastBandwidthSelector,
22771 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
22772 comparePlaylistBandwidth: comparePlaylistBandwidth,
22773 comparePlaylistResolution: comparePlaylistResolution,
22774 xhr: xhrFactory()
22775}; // Define getter/setters for config properties
22776
22777Object.keys(Config).forEach(function (prop) {
22778 Object.defineProperty(Vhs$1, prop, {
22779 get: function get() {
22780 videojs__default['default'].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
22781 return Config[prop];
22782 },
22783 set: function set(value) {
22784 videojs__default['default'].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
22785
22786 if (typeof value !== 'number' || value < 0) {
22787 videojs__default['default'].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
22788 return;
22789 }
22790
22791 Config[prop] = value;
22792 }
22793 });
22794});
22795var LOCAL_STORAGE_KEY = 'videojs-vhs';
22796/**
22797 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
22798 *
22799 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
22800 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
22801 * @function handleVhsMediaChange
22802 */
22803
22804var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
22805 var newPlaylist = playlistLoader.media();
22806 var selectedIndex = -1;
22807
22808 for (var i = 0; i < qualityLevels.length; i++) {
22809 if (qualityLevels[i].id === newPlaylist.id) {
22810 selectedIndex = i;
22811 break;
22812 }
22813 }
22814
22815 qualityLevels.selectedIndex_ = selectedIndex;
22816 qualityLevels.trigger({
22817 selectedIndex: selectedIndex,
22818 type: 'change'
22819 });
22820};
22821/**
22822 * Adds quality levels to list once playlist metadata is available
22823 *
22824 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
22825 * @param {Object} vhs Vhs object to listen to for media events.
22826 * @function handleVhsLoadedMetadata
22827 */
22828
22829
22830var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
22831 vhs.representations().forEach(function (rep) {
22832 qualityLevels.addQualityLevel(rep);
22833 });
22834 handleVhsMediaChange(qualityLevels, vhs.playlists);
22835}; // HLS is a source handler, not a tech. Make sure attempts to use it
22836// as one do not cause exceptions.
22837
22838
22839Vhs$1.canPlaySource = function () {
22840 return videojs__default['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
22841};
22842
22843var emeKeySystems = function emeKeySystems(keySystemOptions, videoPlaylist, audioPlaylist) {
22844 if (!keySystemOptions) {
22845 return keySystemOptions;
22846 }
22847
22848 var codecs = {
22849 video: videoPlaylist && videoPlaylist.attributes && videoPlaylist.attributes.CODECS,
22850 audio: audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS
22851 };
22852
22853 if (!codecs.audio && codecs.video && codecs.video.split(',').length > 1) {
22854 codecs.video.split(',').forEach(function (codec) {
22855 codec = codec.trim();
22856
22857 if (codecs_js.isAudioCodec(codec)) {
22858 codecs.audio = codec;
22859 } else if (codecs_js.isVideoCodec(codec)) {
22860 codecs.video = codec;
22861 }
22862 });
22863 }
22864
22865 var videoContentType = codecs.video ? "video/mp4;codecs=\"" + codecs.video + "\"" : null;
22866 var audioContentType = codecs.audio ? "audio/mp4;codecs=\"" + codecs.audio + "\"" : null; // upsert the content types based on the selected playlist
22867
22868 var keySystemContentTypes = {};
22869
22870 for (var keySystem in keySystemOptions) {
22871 keySystemContentTypes[keySystem] = {
22872 audioContentType: audioContentType,
22873 videoContentType: videoContentType
22874 }; // Default to using the video playlist's PSSH even though they may be different, as
22875 // videojs-contrib-eme will only accept one in the options.
22876 //
22877 // This shouldn't be an issue for most cases as early intialization will handle all
22878 // unique PSSH values, and if they aren't, then encrypted events should have the
22879 // specific information needed for the unique license.
22880
22881 if (videoPlaylist.contentProtection && videoPlaylist.contentProtection[keySystem] && videoPlaylist.contentProtection[keySystem].pssh) {
22882 keySystemContentTypes[keySystem].pssh = videoPlaylist.contentProtection[keySystem].pssh;
22883 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
22884 // so we need to prevent overwriting the URL entirely
22885
22886
22887 if (typeof keySystemOptions[keySystem] === 'string') {
22888 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
22889 }
22890 }
22891
22892 return videojs__default['default'].mergeOptions(keySystemOptions, keySystemContentTypes);
22893};
22894/**
22895 * @typedef {Object} KeySystems
22896 *
22897 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
22898 * Note: not all options are listed here.
22899 *
22900 * @property {Uint8Array} [pssh]
22901 * Protection System Specific Header
22902 */
22903
22904/**
22905 * Goes through all the playlists and collects an array of KeySystems options objects
22906 * containing each playlist's keySystems and their pssh values, if available.
22907 *
22908 * @param {Object[]} playlists
22909 * The playlists to look through
22910 * @param {string[]} keySystems
22911 * The keySystems to collect pssh values for
22912 *
22913 * @return {KeySystems[]}
22914 * An array of KeySystems objects containing available key systems and their
22915 * pssh values
22916 */
22917
22918
22919var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
22920 return playlists.reduce(function (keySystemsArr, playlist) {
22921 if (!playlist.contentProtection) {
22922 return keySystemsArr;
22923 }
22924
22925 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
22926 var keySystemOptions = playlist.contentProtection[keySystem];
22927
22928 if (keySystemOptions && keySystemOptions.pssh) {
22929 keySystemsObj[keySystem] = {
22930 pssh: keySystemOptions.pssh
22931 };
22932 }
22933
22934 return keySystemsObj;
22935 }, {});
22936
22937 if (Object.keys(keySystemsOptions).length) {
22938 keySystemsArr.push(keySystemsOptions);
22939 }
22940
22941 return keySystemsArr;
22942 }, []);
22943};
22944/**
22945 * Returns a promise that waits for the
22946 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
22947 *
22948 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
22949 * browsers.
22950 *
22951 * As per the above ticket, this is particularly important for Chrome, where, if
22952 * unencrypted content is appended before encrypted content and the key session has not
22953 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
22954 * during playback.
22955 *
22956 * @param {Object} player
22957 * The player instance
22958 * @param {Object[]} sourceKeySystems
22959 * The key systems options from the player source
22960 * @param {Object} [audioMedia]
22961 * The active audio media playlist (optional)
22962 * @param {Object[]} mainPlaylists
22963 * The playlists found on the master playlist object
22964 *
22965 * @return {Object}
22966 * Promise that resolves when the key session has been created
22967 */
22968
22969
22970var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
22971 var player = _ref.player,
22972 sourceKeySystems = _ref.sourceKeySystems,
22973 audioMedia = _ref.audioMedia,
22974 mainPlaylists = _ref.mainPlaylists;
22975
22976 if (!player.eme.initializeMediaKeys) {
22977 return Promise.resolve();
22978 } // TODO should all audio PSSH values be initialized for DRM?
22979 //
22980 // All unique video rendition pssh values are initialized for DRM, but here only
22981 // the initial audio playlist license is initialized. In theory, an encrypted
22982 // event should be fired if the user switches to an alternative audio playlist
22983 // where a license is required, but this case hasn't yet been tested. In addition, there
22984 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
22985 // languages).
22986
22987
22988 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
22989 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
22990 var initializationFinishedPromises = [];
22991 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
22992 // only place where it should not be deduped is for ms-prefixed APIs, but the early
22993 // return for IE11 above, and the existence of modern EME APIs in addition to
22994 // ms-prefixed APIs on Edge should prevent this from being a concern.
22995 // initializeMediaKeys also won't use the webkit-prefixed APIs.
22996
22997 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
22998 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
22999 player.tech_.one('keysessioncreated', resolve);
23000 }));
23001 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
23002 player.eme.initializeMediaKeys({
23003 keySystems: keySystemsOptions
23004 }, function (err) {
23005 if (err) {
23006 reject(err);
23007 return;
23008 }
23009
23010 resolve();
23011 });
23012 }));
23013 }); // The reasons Promise.race is chosen over Promise.any:
23014 //
23015 // * Promise.any is only available in Safari 14+.
23016 // * None of these promises are expected to reject. If they do reject, it might be
23017 // better here for the race to surface the rejection, rather than mask it by using
23018 // Promise.any.
23019
23020 return Promise.race([// If a session was previously created, these will all finish resolving without
23021 // creating a new session, otherwise it will take until the end of all license
23022 // requests, which is why the key session check is used (to make setup much faster).
23023 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
23024 Promise.race(keySessionCreatedPromises)]);
23025};
23026/**
23027 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
23028 * there are keySystems on the source, sets up source options to prepare the source for
23029 * eme.
23030 *
23031 * @param {Object} player
23032 * The player instance
23033 * @param {Object[]} sourceKeySystems
23034 * The key systems options from the player source
23035 * @param {Object} media
23036 * The active media playlist
23037 * @param {Object} [audioMedia]
23038 * The active audio media playlist (optional)
23039 *
23040 * @return {boolean}
23041 * Whether or not options were configured and EME is available
23042 */
23043
23044var setupEmeOptions = function setupEmeOptions(_ref2) {
23045 var player = _ref2.player,
23046 sourceKeySystems = _ref2.sourceKeySystems,
23047 media = _ref2.media,
23048 audioMedia = _ref2.audioMedia;
23049 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
23050
23051 if (!sourceOptions) {
23052 return false;
23053 }
23054
23055 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
23056 // do nothing.
23057
23058 if (sourceOptions && !player.eme) {
23059 videojs__default['default'].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
23060 return false;
23061 }
23062
23063 return true;
23064};
23065
23066var getVhsLocalStorage = function getVhsLocalStorage() {
23067 if (!window__default['default'].localStorage) {
23068 return null;
23069 }
23070
23071 var storedObject = window__default['default'].localStorage.getItem(LOCAL_STORAGE_KEY);
23072
23073 if (!storedObject) {
23074 return null;
23075 }
23076
23077 try {
23078 return JSON.parse(storedObject);
23079 } catch (e) {
23080 // someone may have tampered with the value
23081 return null;
23082 }
23083};
23084
23085var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
23086 if (!window__default['default'].localStorage) {
23087 return false;
23088 }
23089
23090 var objectToStore = getVhsLocalStorage();
23091 objectToStore = objectToStore ? videojs__default['default'].mergeOptions(objectToStore, options) : options;
23092
23093 try {
23094 window__default['default'].localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
23095 } catch (e) {
23096 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
23097 // storage is set to 0).
23098 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
23099 // No need to perform any operation.
23100 return false;
23101 }
23102
23103 return objectToStore;
23104};
23105/**
23106 * Parses VHS-supported media types from data URIs. See
23107 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
23108 * for information on data URIs.
23109 *
23110 * @param {string} dataUri
23111 * The data URI
23112 *
23113 * @return {string|Object}
23114 * The parsed object/string, or the original string if no supported media type
23115 * was found
23116 */
23117
23118
23119var expandDataUri = function expandDataUri(dataUri) {
23120 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
23121 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
23122 } // no known case for this data URI, return the string as-is
23123
23124
23125 return dataUri;
23126};
23127/**
23128 * Whether the browser has built-in HLS support.
23129 */
23130
23131
23132Vhs$1.supportsNativeHls = function () {
23133 if (!document__default['default'] || !document__default['default'].createElement) {
23134 return false;
23135 }
23136
23137 var video = document__default['default'].createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
23138
23139 if (!videojs__default['default'].getTech('Html5').isSupported()) {
23140 return false;
23141 } // HLS manifests can go by many mime-types
23142
23143
23144 var canPlay = [// Apple santioned
23145 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
23146 'audio/mpegurl', // Very common
23147 'audio/x-mpegurl', // Very common
23148 'application/x-mpegurl', // Included for completeness
23149 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
23150 return canPlay.some(function (canItPlay) {
23151 return /maybe|probably/i.test(video.canPlayType(canItPlay));
23152 });
23153}();
23154
23155Vhs$1.supportsNativeDash = function () {
23156 if (!document__default['default'] || !document__default['default'].createElement || !videojs__default['default'].getTech('Html5').isSupported()) {
23157 return false;
23158 }
23159
23160 return /maybe|probably/i.test(document__default['default'].createElement('video').canPlayType('application/dash+xml'));
23161}();
23162
23163Vhs$1.supportsTypeNatively = function (type) {
23164 if (type === 'hls') {
23165 return Vhs$1.supportsNativeHls;
23166 }
23167
23168 if (type === 'dash') {
23169 return Vhs$1.supportsNativeDash;
23170 }
23171
23172 return false;
23173};
23174/**
23175 * HLS is a source handler, not a tech. Make sure attempts to use it
23176 * as one do not cause exceptions.
23177 */
23178
23179
23180Vhs$1.isSupported = function () {
23181 return videojs__default['default'].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
23182};
23183
23184var Component = videojs__default['default'].getComponent('Component');
23185/**
23186 * The Vhs Handler object, where we orchestrate all of the parts
23187 * of HLS to interact with video.js
23188 *
23189 * @class VhsHandler
23190 * @extends videojs.Component
23191 * @param {Object} source the soruce object
23192 * @param {Tech} tech the parent tech object
23193 * @param {Object} options optional and required options
23194 */
23195
23196var VhsHandler = /*#__PURE__*/function (_Component) {
23197 _inheritsLoose__default['default'](VhsHandler, _Component);
23198
23199 function VhsHandler(source, tech, options) {
23200 var _this;
23201
23202 _this = _Component.call(this, tech, videojs__default['default'].mergeOptions(options.hls, options.vhs)) || this;
23203
23204 if (options.hls && Object.keys(options.hls).length) {
23205 videojs__default['default'].log.warn('Using hls options is deprecated. Use vhs instead.');
23206 }
23207
23208 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
23209 // backwards-compatibility
23210
23211 if (tech.options_ && tech.options_.playerId) {
23212 var _player = videojs__default['default'](tech.options_.playerId);
23213
23214 if (!_player.hasOwnProperty('hls')) {
23215 Object.defineProperty(_player, 'hls', {
23216 get: function get() {
23217 videojs__default['default'].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
23218 tech.trigger({
23219 type: 'usage',
23220 name: 'hls-player-access'
23221 });
23222 return _assertThisInitialized__default['default'](_this);
23223 },
23224 configurable: true
23225 });
23226 }
23227
23228 if (!_player.hasOwnProperty('vhs')) {
23229 Object.defineProperty(_player, 'vhs', {
23230 get: function get() {
23231 videojs__default['default'].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
23232 tech.trigger({
23233 type: 'usage',
23234 name: 'vhs-player-access'
23235 });
23236 return _assertThisInitialized__default['default'](_this);
23237 },
23238 configurable: true
23239 });
23240 }
23241
23242 if (!_player.hasOwnProperty('dash')) {
23243 Object.defineProperty(_player, 'dash', {
23244 get: function get() {
23245 videojs__default['default'].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
23246 return _assertThisInitialized__default['default'](_this);
23247 },
23248 configurable: true
23249 });
23250 }
23251
23252 _this.player_ = _player;
23253 }
23254
23255 _this.tech_ = tech;
23256 _this.source_ = source;
23257 _this.stats = {};
23258 _this.ignoreNextSeekingEvent_ = false;
23259
23260 _this.setOptions_();
23261
23262 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
23263 tech.overrideNativeAudioTracks(true);
23264 tech.overrideNativeVideoTracks(true);
23265 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
23266 // overriding native HLS only works if audio tracks have been emulated
23267 // error early if we're misconfigured
23268 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
23269 } // listen for fullscreenchange events for this player so that we
23270 // can adjust our quality selection quickly
23271
23272
23273 _this.on(document__default['default'], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
23274 var fullscreenElement = document__default['default'].fullscreenElement || document__default['default'].webkitFullscreenElement || document__default['default'].mozFullScreenElement || document__default['default'].msFullscreenElement;
23275
23276 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
23277 _this.masterPlaylistController_.smoothQualityChange_();
23278 }
23279 });
23280
23281 _this.on(_this.tech_, 'seeking', function () {
23282 if (this.ignoreNextSeekingEvent_) {
23283 this.ignoreNextSeekingEvent_ = false;
23284 return;
23285 }
23286
23287 this.setCurrentTime(this.tech_.currentTime());
23288 });
23289
23290 _this.on(_this.tech_, 'error', function () {
23291 // verify that the error was real and we are loaded
23292 // enough to have mpc loaded.
23293 if (this.tech_.error() && this.masterPlaylistController_) {
23294 this.masterPlaylistController_.pauseLoading();
23295 }
23296 });
23297
23298 _this.on(_this.tech_, 'play', _this.play);
23299
23300 return _this;
23301 }
23302
23303 var _proto = VhsHandler.prototype;
23304
23305 _proto.setOptions_ = function setOptions_() {
23306 var _this2 = this;
23307
23308 // defaults
23309 this.options_.withCredentials = this.options_.withCredentials || false;
23310 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
23311 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
23312 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
23313 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
23314 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
23315 this.options_.customTagParsers = this.options_.customTagParsers || [];
23316 this.options_.customTagMappers = this.options_.customTagMappers || [];
23317 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
23318 this.options_.handlePartialData = this.options_.handlePartialData || false;
23319
23320 if (typeof this.options_.blacklistDuration !== 'number') {
23321 this.options_.blacklistDuration = 5 * 60;
23322 }
23323
23324 if (typeof this.options_.bandwidth !== 'number') {
23325 if (this.options_.useBandwidthFromLocalStorage) {
23326 var storedObject = getVhsLocalStorage();
23327
23328 if (storedObject && storedObject.bandwidth) {
23329 this.options_.bandwidth = storedObject.bandwidth;
23330 this.tech_.trigger({
23331 type: 'usage',
23332 name: 'vhs-bandwidth-from-local-storage'
23333 });
23334 this.tech_.trigger({
23335 type: 'usage',
23336 name: 'hls-bandwidth-from-local-storage'
23337 });
23338 }
23339
23340 if (storedObject && storedObject.throughput) {
23341 this.options_.throughput = storedObject.throughput;
23342 this.tech_.trigger({
23343 type: 'usage',
23344 name: 'vhs-throughput-from-local-storage'
23345 });
23346 this.tech_.trigger({
23347 type: 'usage',
23348 name: 'hls-throughput-from-local-storage'
23349 });
23350 }
23351 }
23352 } // if bandwidth was not set by options or pulled from local storage, start playlist
23353 // selection at a reasonable bandwidth
23354
23355
23356 if (typeof this.options_.bandwidth !== 'number') {
23357 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
23358 } // If the bandwidth number is unchanged from the initial setting
23359 // then this takes precedence over the enableLowInitialPlaylist option
23360
23361
23362 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
23363
23364 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'handlePartialData', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta'].forEach(function (option) {
23365 if (typeof _this2.source_[option] !== 'undefined') {
23366 _this2.options_[option] = _this2.source_[option];
23367 }
23368 });
23369 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
23370 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
23371 }
23372 /**
23373 * called when player.src gets called, handle a new source
23374 *
23375 * @param {Object} src the source object to handle
23376 */
23377 ;
23378
23379 _proto.src = function src(_src, type) {
23380 var _this3 = this;
23381
23382 // do nothing if the src is falsey
23383 if (!_src) {
23384 return;
23385 }
23386
23387 this.setOptions_(); // add master playlist controller options
23388
23389 this.options_.src = expandDataUri(this.source_.src);
23390 this.options_.tech = this.tech_;
23391 this.options_.externVhs = Vhs$1;
23392 this.options_.sourceType = mediaTypes_js.simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
23393
23394 this.options_.seekTo = function (time) {
23395 _this3.tech_.setCurrentTime(time);
23396 };
23397
23398 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
23399 var playbackWatcherOptions = videojs__default['default'].mergeOptions({
23400 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
23401 }, this.options_, {
23402 seekable: function seekable() {
23403 return _this3.seekable();
23404 },
23405 media: function media() {
23406 return _this3.masterPlaylistController_.media();
23407 },
23408 masterPlaylistController: this.masterPlaylistController_
23409 });
23410 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
23411 this.masterPlaylistController_.on('error', function () {
23412 var player = videojs__default['default'].players[_this3.tech_.options_.playerId];
23413 var error = _this3.masterPlaylistController_.error;
23414
23415 if (typeof error === 'object' && !error.code) {
23416 error.code = 3;
23417 } else if (typeof error === 'string') {
23418 error = {
23419 message: error,
23420 code: 3
23421 };
23422 }
23423
23424 player.error(error);
23425 });
23426 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs$1.movingAverageBandwidthSelector(0.55) : Vhs$1.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
23427 // compatibility with < v2
23428
23429 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
23430 this.masterPlaylistController_.selectInitialPlaylist = Vhs$1.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
23431
23432 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
23433 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
23434 // controller. Using a custom property for backwards compatibility
23435 // with < v2
23436
23437 Object.defineProperties(this, {
23438 selectPlaylist: {
23439 get: function get() {
23440 return this.masterPlaylistController_.selectPlaylist;
23441 },
23442 set: function set(selectPlaylist) {
23443 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
23444 }
23445 },
23446 throughput: {
23447 get: function get() {
23448 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
23449 },
23450 set: function set(throughput) {
23451 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
23452 // for the cumulative average
23453
23454 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
23455 }
23456 },
23457 bandwidth: {
23458 get: function get() {
23459 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
23460 },
23461 set: function set(bandwidth) {
23462 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
23463 // `count` is set to zero that current value of `rate` isn't included
23464 // in the cumulative average
23465
23466 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
23467 rate: 0,
23468 count: 0
23469 };
23470 }
23471 },
23472
23473 /**
23474 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
23475 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
23476 * the entire process after that - decryption, transmuxing, and appending - provided
23477 * by `throughput`.
23478 *
23479 * Since the two process are serial, the overall system bandwidth is given by:
23480 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
23481 */
23482 systemBandwidth: {
23483 get: function get() {
23484 var invBandwidth = 1 / (this.bandwidth || 1);
23485 var invThroughput;
23486
23487 if (this.throughput > 0) {
23488 invThroughput = 1 / this.throughput;
23489 } else {
23490 invThroughput = 0;
23491 }
23492
23493 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
23494 return systemBitrate;
23495 },
23496 set: function set() {
23497 videojs__default['default'].log.error('The "systemBandwidth" property is read-only');
23498 }
23499 }
23500 });
23501
23502 if (this.options_.bandwidth) {
23503 this.bandwidth = this.options_.bandwidth;
23504 }
23505
23506 if (this.options_.throughput) {
23507 this.throughput = this.options_.throughput;
23508 }
23509
23510 Object.defineProperties(this.stats, {
23511 bandwidth: {
23512 get: function get() {
23513 return _this3.bandwidth || 0;
23514 },
23515 enumerable: true
23516 },
23517 mediaRequests: {
23518 get: function get() {
23519 return _this3.masterPlaylistController_.mediaRequests_() || 0;
23520 },
23521 enumerable: true
23522 },
23523 mediaRequestsAborted: {
23524 get: function get() {
23525 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
23526 },
23527 enumerable: true
23528 },
23529 mediaRequestsTimedout: {
23530 get: function get() {
23531 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
23532 },
23533 enumerable: true
23534 },
23535 mediaRequestsErrored: {
23536 get: function get() {
23537 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
23538 },
23539 enumerable: true
23540 },
23541 mediaTransferDuration: {
23542 get: function get() {
23543 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
23544 },
23545 enumerable: true
23546 },
23547 mediaBytesTransferred: {
23548 get: function get() {
23549 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
23550 },
23551 enumerable: true
23552 },
23553 mediaSecondsLoaded: {
23554 get: function get() {
23555 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
23556 },
23557 enumerable: true
23558 },
23559 buffered: {
23560 get: function get() {
23561 return timeRangesToArray(_this3.tech_.buffered());
23562 },
23563 enumerable: true
23564 },
23565 currentTime: {
23566 get: function get() {
23567 return _this3.tech_.currentTime();
23568 },
23569 enumerable: true
23570 },
23571 currentSource: {
23572 get: function get() {
23573 return _this3.tech_.currentSource_;
23574 },
23575 enumerable: true
23576 },
23577 currentTech: {
23578 get: function get() {
23579 return _this3.tech_.name_;
23580 },
23581 enumerable: true
23582 },
23583 duration: {
23584 get: function get() {
23585 return _this3.tech_.duration();
23586 },
23587 enumerable: true
23588 },
23589 master: {
23590 get: function get() {
23591 return _this3.playlists.master;
23592 },
23593 enumerable: true
23594 },
23595 playerDimensions: {
23596 get: function get() {
23597 return _this3.tech_.currentDimensions();
23598 },
23599 enumerable: true
23600 },
23601 seekable: {
23602 get: function get() {
23603 return timeRangesToArray(_this3.tech_.seekable());
23604 },
23605 enumerable: true
23606 },
23607 timestamp: {
23608 get: function get() {
23609 return Date.now();
23610 },
23611 enumerable: true
23612 },
23613 videoPlaybackQuality: {
23614 get: function get() {
23615 return _this3.tech_.getVideoPlaybackQuality();
23616 },
23617 enumerable: true
23618 }
23619 });
23620 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
23621 this.tech_.on('bandwidthupdate', function () {
23622 if (_this3.options_.useBandwidthFromLocalStorage) {
23623 updateVhsLocalStorage({
23624 bandwidth: _this3.bandwidth,
23625 throughput: Math.round(_this3.throughput)
23626 });
23627 }
23628 });
23629 this.masterPlaylistController_.on('selectedinitialmedia', function () {
23630 // Add the manual rendition mix-in to VhsHandler
23631 renditionSelectionMixin(_this3);
23632 });
23633 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
23634 _this3.setupEme_();
23635 }); // the bandwidth of the primary segment loader is our best
23636 // estimate of overall bandwidth
23637
23638 this.on(this.masterPlaylistController_, 'progress', function () {
23639 this.tech_.trigger('progress');
23640 }); // In the live case, we need to ignore the very first `seeking` event since
23641 // that will be the result of the seek-to-live behavior
23642
23643 this.on(this.masterPlaylistController_, 'firstplay', function () {
23644 this.ignoreNextSeekingEvent_ = true;
23645 });
23646 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
23647 // this can occur if someone sets the src in player.ready(), for instance
23648
23649 if (!this.tech_.el()) {
23650 return;
23651 }
23652
23653 this.mediaSourceUrl_ = window__default['default'].URL.createObjectURL(this.masterPlaylistController_.mediaSource);
23654 this.tech_.src(this.mediaSourceUrl_);
23655 }
23656 /**
23657 * If necessary and EME is available, sets up EME options and waits for key session
23658 * creation.
23659 *
23660 * This function also updates the source updater so taht it can be used, as for some
23661 * browsers, EME must be configured before content is appended (if appending unencrypted
23662 * content before encrypted content).
23663 */
23664 ;
23665
23666 _proto.setupEme_ = function setupEme_() {
23667 var _this4 = this;
23668
23669 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
23670 var didSetupEmeOptions = setupEmeOptions({
23671 player: this.player_,
23672 sourceKeySystems: this.source_.keySystems,
23673 media: this.playlists.media(),
23674 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
23675 }); // In IE11 this is too early to initialize media keys, and IE11 does not support
23676 // promises.
23677
23678 if (videojs__default['default'].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
23679 // If EME options were not set up, we've done all we could to initialize EME.
23680 this.masterPlaylistController_.sourceUpdater_.initializedEme();
23681 return;
23682 }
23683
23684 this.logger_('waiting for EME key session creation');
23685 waitForKeySessionCreation({
23686 player: this.player_,
23687 sourceKeySystems: this.source_.keySystems,
23688 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
23689 mainPlaylists: this.playlists.master.playlists
23690 }).then(function () {
23691 _this4.logger_('created EME key session');
23692
23693 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
23694 }).catch(function (err) {
23695 _this4.logger_('error while creating EME key session', err);
23696
23697 _this4.player_.error({
23698 message: 'Failed to initialize media keys for EME',
23699 code: 3
23700 });
23701 });
23702 }
23703 /**
23704 * Initializes the quality levels and sets listeners to update them.
23705 *
23706 * @method setupQualityLevels_
23707 * @private
23708 */
23709 ;
23710
23711 _proto.setupQualityLevels_ = function setupQualityLevels_() {
23712 var _this5 = this;
23713
23714 var player = videojs__default['default'].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
23715 // or qualityLevels_ listeners have already been setup, do nothing.
23716
23717 if (!player || !player.qualityLevels || this.qualityLevels_) {
23718 return;
23719 }
23720
23721 this.qualityLevels_ = player.qualityLevels();
23722 this.masterPlaylistController_.on('selectedinitialmedia', function () {
23723 handleVhsLoadedMetadata(_this5.qualityLevels_, _this5);
23724 });
23725 this.playlists.on('mediachange', function () {
23726 handleVhsMediaChange(_this5.qualityLevels_, _this5.playlists);
23727 });
23728 }
23729 /**
23730 * return the version
23731 */
23732 ;
23733
23734 VhsHandler.version = function version$5() {
23735 return {
23736 '@videojs/http-streaming': version,
23737 'mux.js': version$1,
23738 'mpd-parser': version$2,
23739 'm3u8-parser': version$3,
23740 'aes-decrypter': version$4
23741 };
23742 }
23743 /**
23744 * return the version
23745 */
23746 ;
23747
23748 _proto.version = function version() {
23749 return this.constructor.version();
23750 };
23751
23752 _proto.canChangeType = function canChangeType() {
23753 return SourceUpdater.canChangeType();
23754 }
23755 /**
23756 * Begin playing the video.
23757 */
23758 ;
23759
23760 _proto.play = function play() {
23761 this.masterPlaylistController_.play();
23762 }
23763 /**
23764 * a wrapper around the function in MasterPlaylistController
23765 */
23766 ;
23767
23768 _proto.setCurrentTime = function setCurrentTime(currentTime) {
23769 this.masterPlaylistController_.setCurrentTime(currentTime);
23770 }
23771 /**
23772 * a wrapper around the function in MasterPlaylistController
23773 */
23774 ;
23775
23776 _proto.duration = function duration() {
23777 return this.masterPlaylistController_.duration();
23778 }
23779 /**
23780 * a wrapper around the function in MasterPlaylistController
23781 */
23782 ;
23783
23784 _proto.seekable = function seekable() {
23785 return this.masterPlaylistController_.seekable();
23786 }
23787 /**
23788 * Abort all outstanding work and cleanup.
23789 */
23790 ;
23791
23792 _proto.dispose = function dispose() {
23793 if (this.playbackWatcher_) {
23794 this.playbackWatcher_.dispose();
23795 }
23796
23797 if (this.masterPlaylistController_) {
23798 this.masterPlaylistController_.dispose();
23799 }
23800
23801 if (this.qualityLevels_) {
23802 this.qualityLevels_.dispose();
23803 }
23804
23805 if (this.player_) {
23806 delete this.player_.vhs;
23807 delete this.player_.dash;
23808 delete this.player_.hls;
23809 }
23810
23811 if (this.tech_ && this.tech_.vhs) {
23812 delete this.tech_.vhs;
23813 } // don't check this.tech_.hls as it will log a deprecated warning
23814
23815
23816 if (this.tech_) {
23817 delete this.tech_.hls;
23818 }
23819
23820 if (this.mediaSourceUrl_ && window__default['default'].URL.revokeObjectURL) {
23821 window__default['default'].URL.revokeObjectURL(this.mediaSourceUrl_);
23822 this.mediaSourceUrl_ = null;
23823 }
23824
23825 _Component.prototype.dispose.call(this);
23826 };
23827
23828 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
23829 return getProgramTime({
23830 playlist: this.masterPlaylistController_.media(),
23831 time: time,
23832 callback: callback
23833 });
23834 } // the player must be playing before calling this
23835 ;
23836
23837 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
23838 if (pauseAfterSeek === void 0) {
23839 pauseAfterSeek = true;
23840 }
23841
23842 if (retryCount === void 0) {
23843 retryCount = 2;
23844 }
23845
23846 return seekToProgramTime({
23847 programTime: programTime,
23848 playlist: this.masterPlaylistController_.media(),
23849 retryCount: retryCount,
23850 pauseAfterSeek: pauseAfterSeek,
23851 seekTo: this.options_.seekTo,
23852 tech: this.options_.tech,
23853 callback: callback
23854 });
23855 };
23856
23857 return VhsHandler;
23858}(Component);
23859/**
23860 * The Source Handler object, which informs video.js what additional
23861 * MIME types are supported and sets up playback. It is registered
23862 * automatically to the appropriate tech based on the capabilities of
23863 * the browser it is running in. It is not necessary to use or modify
23864 * this object in normal usage.
23865 */
23866
23867
23868var VhsSourceHandler = {
23869 name: 'videojs-http-streaming',
23870 VERSION: version,
23871 canHandleSource: function canHandleSource(srcObj, options) {
23872 if (options === void 0) {
23873 options = {};
23874 }
23875
23876 var localOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options);
23877 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
23878 },
23879 handleSource: function handleSource(source, tech, options) {
23880 if (options === void 0) {
23881 options = {};
23882 }
23883
23884 var localOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options);
23885 tech.vhs = new VhsHandler(source, tech, localOptions);
23886
23887 if (!videojs__default['default'].hasOwnProperty('hls')) {
23888 Object.defineProperty(tech, 'hls', {
23889 get: function get() {
23890 videojs__default['default'].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
23891 return tech.vhs;
23892 },
23893 configurable: true
23894 });
23895 }
23896
23897 tech.vhs.xhr = xhrFactory();
23898 tech.vhs.src(source.src, source.type);
23899 return tech.vhs;
23900 },
23901 canPlayType: function canPlayType(type, options) {
23902 if (options === void 0) {
23903 options = {};
23904 }
23905
23906 var _videojs$mergeOptions = videojs__default['default'].mergeOptions(videojs__default['default'].options, options),
23907 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs.overrideNative,
23908 overrideNative = _videojs$mergeOptions2 === void 0 ? !videojs__default['default'].browser.IS_ANY_SAFARI : _videojs$mergeOptions2;
23909
23910 var supportedType = mediaTypes_js.simpleTypeFromSourceType(type);
23911 var canUseMsePlayback = supportedType && (!Vhs$1.supportsTypeNatively(supportedType) || overrideNative);
23912 return canUseMsePlayback ? 'maybe' : '';
23913 }
23914};
23915/**
23916 * Check to see if the native MediaSource object exists and supports
23917 * an MP4 container with both H.264 video and AAC-LC audio.
23918 *
23919 * @return {boolean} if native media sources are supported
23920 */
23921
23922var supportsNativeMediaSources = function supportsNativeMediaSources() {
23923 return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2');
23924}; // register source handlers with the appropriate techs
23925
23926
23927if (supportsNativeMediaSources()) {
23928 videojs__default['default'].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
23929}
23930
23931videojs__default['default'].VhsHandler = VhsHandler;
23932Object.defineProperty(videojs__default['default'], 'HlsHandler', {
23933 get: function get() {
23934 videojs__default['default'].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
23935 return VhsHandler;
23936 },
23937 configurable: true
23938});
23939videojs__default['default'].VhsSourceHandler = VhsSourceHandler;
23940Object.defineProperty(videojs__default['default'], 'HlsSourceHandler', {
23941 get: function get() {
23942 videojs__default['default'].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
23943 return VhsSourceHandler;
23944 },
23945 configurable: true
23946});
23947videojs__default['default'].Vhs = Vhs$1;
23948Object.defineProperty(videojs__default['default'], 'Hls', {
23949 get: function get() {
23950 videojs__default['default'].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
23951 return Vhs$1;
23952 },
23953 configurable: true
23954});
23955
23956if (!videojs__default['default'].use) {
23957 videojs__default['default'].registerComponent('Hls', Vhs$1);
23958 videojs__default['default'].registerComponent('Vhs', Vhs$1);
23959}
23960
23961videojs__default['default'].options.vhs = videojs__default['default'].options.vhs || {};
23962videojs__default['default'].options.hls = videojs__default['default'].options.hls || {};
23963
23964if (videojs__default['default'].registerPlugin) {
23965 videojs__default['default'].registerPlugin('reloadSourceOnError', reloadSourceOnError);
23966} else {
23967 videojs__default['default'].plugin('reloadSourceOnError', reloadSourceOnError);
23968}
23969
23970Object.defineProperty(exports, 'simpleTypeFromSourceType', {
23971 enumerable: true,
23972 get: function () {
23973 return mediaTypes_js.simpleTypeFromSourceType;
23974 }
23975});
23976exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
23977exports.Vhs = Vhs$1;
23978exports.VhsHandler = VhsHandler;
23979exports.VhsSourceHandler = VhsSourceHandler;
23980exports.emeKeySystems = emeKeySystems;
23981exports.expandDataUri = expandDataUri;
23982exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
23983exports.setupEmeOptions = setupEmeOptions;
23984exports.waitForKeySessionCreation = waitForKeySessionCreation;