UNPKG

897 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.16.0 @license Apache-2.0 */
2'use strict';
3
4Object.defineProperty(exports, '__esModule', { value: true });
5
6var _assertThisInitialized = require('@babel/runtime/helpers/assertThisInitialized');
7var _inheritsLoose = require('@babel/runtime/helpers/inheritsLoose');
8var document = require('global/document');
9var window$1 = require('global/window');
10var _resolveUrl = require('@videojs/vhs-utils/cjs/resolve-url.js');
11var videojs = require('video.js');
12var _extends = require('@babel/runtime/helpers/extends');
13var m3u8Parser = require('m3u8-parser');
14var codecs_js = require('@videojs/vhs-utils/cjs/codecs.js');
15var mediaTypes_js = require('@videojs/vhs-utils/cjs/media-types.js');
16var byteHelpers = require('@videojs/vhs-utils/cjs/byte-helpers');
17var mpdParser = require('mpd-parser');
18var parseSidx = require('mux.js/lib/tools/parse-sidx');
19var id3Helpers = require('@videojs/vhs-utils/cjs/id3-helpers');
20var containers = require('@videojs/vhs-utils/cjs/containers');
21var clock = require('mux.js/lib/utils/clock');
22var _wrapNativeSuper = require('@babel/runtime/helpers/wrapNativeSuper');
23
24function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
25
26var _assertThisInitialized__default = /*#__PURE__*/_interopDefaultLegacy(_assertThisInitialized);
27var _inheritsLoose__default = /*#__PURE__*/_interopDefaultLegacy(_inheritsLoose);
28var document__default = /*#__PURE__*/_interopDefaultLegacy(document);
29var window__default = /*#__PURE__*/_interopDefaultLegacy(window$1);
30var _resolveUrl__default = /*#__PURE__*/_interopDefaultLegacy(_resolveUrl);
31var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
32var _extends__default = /*#__PURE__*/_interopDefaultLegacy(_extends);
33var parseSidx__default = /*#__PURE__*/_interopDefaultLegacy(parseSidx);
34var _wrapNativeSuper__default = /*#__PURE__*/_interopDefaultLegacy(_wrapNativeSuper);
35
36/**
37 * @file resolve-url.js - Handling how URLs are resolved and manipulated
38 */
39var resolveUrl = _resolveUrl__default["default"];
40/**
41 * Checks whether xhr request was redirected and returns correct url depending
42 * on `handleManifestRedirects` option
43 *
44 * @api private
45 *
46 * @param {string} url - an url being requested
47 * @param {XMLHttpRequest} req - xhr request result
48 *
49 * @return {string}
50 */
51
52var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
53 // To understand how the responseURL below is set and generated:
54 // - https://fetch.spec.whatwg.org/#concept-response-url
55 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
56 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
57 return req.responseURL;
58 }
59
60 return url;
61};
62
63var logger = function logger(source) {
64 if (videojs__default["default"].log.debug) {
65 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
66 }
67
68 return function () {};
69};
70
71/**
72 * ranges
73 *
74 * Utilities for working with TimeRanges.
75 *
76 */
77
78var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
79// can be misleading because of precision differences or when the current media has poorly
80// aligned audio and video, which can cause values to be slightly off from what you would
81// expect. This value is what we consider to be safe to use in such comparisons to account
82// for these scenarios.
83
84var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
85
86var filterRanges = function filterRanges(timeRanges, predicate) {
87 var results = [];
88 var i;
89
90 if (timeRanges && timeRanges.length) {
91 // Search for ranges that match the predicate
92 for (i = 0; i < timeRanges.length; i++) {
93 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
94 results.push([timeRanges.start(i), timeRanges.end(i)]);
95 }
96 }
97 }
98
99 return videojs__default["default"].createTimeRanges(results);
100};
101/**
102 * Attempts to find the buffered TimeRange that contains the specified
103 * time.
104 *
105 * @param {TimeRanges} buffered - the TimeRanges object to query
106 * @param {number} time - the time to filter on.
107 * @return {TimeRanges} a new TimeRanges object
108 */
109
110
111var findRange = function findRange(buffered, time) {
112 return filterRanges(buffered, function (start, end) {
113 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
114 });
115};
116/**
117 * Returns the TimeRanges that begin later than the specified time.
118 *
119 * @param {TimeRanges} timeRanges - the TimeRanges object to query
120 * @param {number} time - the time to filter on.
121 * @return {TimeRanges} a new TimeRanges object.
122 */
123
124var findNextRange = function findNextRange(timeRanges, time) {
125 return filterRanges(timeRanges, function (start) {
126 return start - TIME_FUDGE_FACTOR >= time;
127 });
128};
129/**
130 * Returns gaps within a list of TimeRanges
131 *
132 * @param {TimeRanges} buffered - the TimeRanges object
133 * @return {TimeRanges} a TimeRanges object of gaps
134 */
135
136var findGaps = function findGaps(buffered) {
137 if (buffered.length < 2) {
138 return videojs__default["default"].createTimeRanges();
139 }
140
141 var ranges = [];
142
143 for (var i = 1; i < buffered.length; i++) {
144 var start = buffered.end(i - 1);
145 var end = buffered.start(i);
146 ranges.push([start, end]);
147 }
148
149 return videojs__default["default"].createTimeRanges(ranges);
150};
151/**
152 * Calculate the intersection of two TimeRanges
153 *
154 * @param {TimeRanges} bufferA
155 * @param {TimeRanges} bufferB
156 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
157 */
158
159var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
160 var start = null;
161 var end = null;
162 var arity = 0;
163 var extents = [];
164 var ranges = [];
165
166 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
167 return videojs__default["default"].createTimeRange();
168 } // Handle the case where we have both buffers and create an
169 // intersection of the two
170
171
172 var count = bufferA.length; // A) Gather up all start and end times
173
174 while (count--) {
175 extents.push({
176 time: bufferA.start(count),
177 type: 'start'
178 });
179 extents.push({
180 time: bufferA.end(count),
181 type: 'end'
182 });
183 }
184
185 count = bufferB.length;
186
187 while (count--) {
188 extents.push({
189 time: bufferB.start(count),
190 type: 'start'
191 });
192 extents.push({
193 time: bufferB.end(count),
194 type: 'end'
195 });
196 } // B) Sort them by time
197
198
199 extents.sort(function (a, b) {
200 return a.time - b.time;
201 }); // C) Go along one by one incrementing arity for start and decrementing
202 // arity for ends
203
204 for (count = 0; count < extents.length; count++) {
205 if (extents[count].type === 'start') {
206 arity++; // D) If arity is ever incremented to 2 we are entering an
207 // overlapping range
208
209 if (arity === 2) {
210 start = extents[count].time;
211 }
212 } else if (extents[count].type === 'end') {
213 arity--; // E) If arity is ever decremented to 1 we leaving an
214 // overlapping range
215
216 if (arity === 1) {
217 end = extents[count].time;
218 }
219 } // F) Record overlapping ranges
220
221
222 if (start !== null && end !== null) {
223 ranges.push([start, end]);
224 start = null;
225 end = null;
226 }
227 }
228
229 return videojs__default["default"].createTimeRanges(ranges);
230};
231/**
232 * Gets a human readable string for a TimeRange
233 *
234 * @param {TimeRange} range
235 * @return {string} a human readable string
236 */
237
238var printableRange = function printableRange(range) {
239 var strArr = [];
240
241 if (!range || !range.length) {
242 return '';
243 }
244
245 for (var i = 0; i < range.length; i++) {
246 strArr.push(range.start(i) + ' => ' + range.end(i));
247 }
248
249 return strArr.join(', ');
250};
251/**
252 * Calculates the amount of time left in seconds until the player hits the end of the
253 * buffer and causes a rebuffer
254 *
255 * @param {TimeRange} buffered
256 * The state of the buffer
257 * @param {Numnber} currentTime
258 * The current time of the player
259 * @param {number} playbackRate
260 * The current playback rate of the player. Defaults to 1.
261 * @return {number}
262 * Time until the player has to start rebuffering in seconds.
263 * @function timeUntilRebuffer
264 */
265
266var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
267 if (playbackRate === void 0) {
268 playbackRate = 1;
269 }
270
271 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
272 return (bufferedEnd - currentTime) / playbackRate;
273};
274/**
275 * Converts a TimeRanges object into an array representation
276 *
277 * @param {TimeRanges} timeRanges
278 * @return {Array}
279 */
280
281var timeRangesToArray = function timeRangesToArray(timeRanges) {
282 var timeRangesList = [];
283
284 for (var i = 0; i < timeRanges.length; i++) {
285 timeRangesList.push({
286 start: timeRanges.start(i),
287 end: timeRanges.end(i)
288 });
289 }
290
291 return timeRangesList;
292};
293/**
294 * Determines if two time range objects are different.
295 *
296 * @param {TimeRange} a
297 * the first time range object to check
298 *
299 * @param {TimeRange} b
300 * the second time range object to check
301 *
302 * @return {Boolean}
303 * Whether the time range objects differ
304 */
305
306var isRangeDifferent = function isRangeDifferent(a, b) {
307 // same object
308 if (a === b) {
309 return false;
310 } // one or the other is undefined
311
312
313 if (!a && b || !b && a) {
314 return true;
315 } // length is different
316
317
318 if (a.length !== b.length) {
319 return true;
320 } // see if any start/end pair is different
321
322
323 for (var i = 0; i < a.length; i++) {
324 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
325 return true;
326 }
327 } // if the length and every pair is the same
328 // this is the same time range
329
330
331 return false;
332};
333var lastBufferedEnd = function lastBufferedEnd(a) {
334 if (!a || !a.length || !a.end) {
335 return;
336 }
337
338 return a.end(a.length - 1);
339};
340/**
341 * A utility function to add up the amount of time in a timeRange
342 * after a specified startTime.
343 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
344 * would return 40 as there are 40s seconds after 0 in the timeRange
345 *
346 * @param {TimeRange} range
347 * The range to check against
348 * @param {number} startTime
349 * The time in the time range that you should start counting from
350 *
351 * @return {number}
352 * The number of seconds in the buffer passed the specified time.
353 */
354
355var timeAheadOf = function timeAheadOf(range, startTime) {
356 var time = 0;
357
358 if (!range || !range.length) {
359 return time;
360 }
361
362 for (var i = 0; i < range.length; i++) {
363 var start = range.start(i);
364 var end = range.end(i); // startTime is after this range entirely
365
366 if (startTime > end) {
367 continue;
368 } // startTime is within this range
369
370
371 if (startTime > start && startTime <= end) {
372 time += end - startTime;
373 continue;
374 } // startTime is before this range.
375
376
377 time += end - start;
378 }
379
380 return time;
381};
382
383/**
384 * @file playlist.js
385 *
386 * Playlist related utilities.
387 */
388var createTimeRange = videojs__default["default"].createTimeRange;
389/**
390 * Get the duration of a segment, with special cases for
391 * llhls segments that do not have a duration yet.
392 *
393 * @param {Object} playlist
394 * the playlist that the segment belongs to.
395 * @param {Object} segment
396 * the segment to get a duration for.
397 *
398 * @return {number}
399 * the segment duration
400 */
401
402var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
403 // if this isn't a preload segment
404 // then we will have a segment duration that is accurate.
405 if (!segment.preload) {
406 return segment.duration;
407 } // otherwise we have to add up parts and preload hints
408 // to get an up to date duration.
409
410
411 var result = 0;
412 (segment.parts || []).forEach(function (p) {
413 result += p.duration;
414 }); // for preload hints we have to use partTargetDuration
415 // as they won't even have a duration yet.
416
417 (segment.preloadHints || []).forEach(function (p) {
418 if (p.type === 'PART') {
419 result += playlist.partTargetDuration;
420 }
421 });
422 return result;
423};
424/**
425 * A function to get a combined list of parts and segments with durations
426 * and indexes.
427 *
428 * @param {Playlist} playlist the playlist to get the list for.
429 *
430 * @return {Array} The part/segment list.
431 */
432
433var getPartsAndSegments = function getPartsAndSegments(playlist) {
434 return (playlist.segments || []).reduce(function (acc, segment, si) {
435 if (segment.parts) {
436 segment.parts.forEach(function (part, pi) {
437 acc.push({
438 duration: part.duration,
439 segmentIndex: si,
440 partIndex: pi,
441 part: part,
442 segment: segment
443 });
444 });
445 } else {
446 acc.push({
447 duration: segment.duration,
448 segmentIndex: si,
449 partIndex: null,
450 segment: segment,
451 part: null
452 });
453 }
454
455 return acc;
456 }, []);
457};
458var getLastParts = function getLastParts(media) {
459 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
460 return lastSegment && lastSegment.parts || [];
461};
462var getKnownPartCount = function getKnownPartCount(_ref) {
463 var preloadSegment = _ref.preloadSegment;
464
465 if (!preloadSegment) {
466 return;
467 }
468
469 var parts = preloadSegment.parts,
470 preloadHints = preloadSegment.preloadHints;
471 var partCount = (preloadHints || []).reduce(function (count, hint) {
472 return count + (hint.type === 'PART' ? 1 : 0);
473 }, 0);
474 partCount += parts && parts.length ? parts.length : 0;
475 return partCount;
476};
477/**
478 * Get the number of seconds to delay from the end of a
479 * live playlist.
480 *
481 * @param {Playlist} master the master playlist
482 * @param {Playlist} media the media playlist
483 * @return {number} the hold back in seconds.
484 */
485
486var liveEdgeDelay = function liveEdgeDelay(master, media) {
487 if (media.endList) {
488 return 0;
489 } // dash suggestedPresentationDelay trumps everything
490
491
492 if (master && master.suggestedPresentationDelay) {
493 return master.suggestedPresentationDelay;
494 }
495
496 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
497
498 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
499 return media.serverControl.partHoldBack;
500 } else if (hasParts && media.partTargetDuration) {
501 return media.partTargetDuration * 3; // finally look for full segment delays
502 } else if (media.serverControl && media.serverControl.holdBack) {
503 return media.serverControl.holdBack;
504 } else if (media.targetDuration) {
505 return media.targetDuration * 3;
506 }
507
508 return 0;
509};
510/**
511 * walk backward until we find a duration we can use
512 * or return a failure
513 *
514 * @param {Playlist} playlist the playlist to walk through
515 * @param {Number} endSequence the mediaSequence to stop walking on
516 */
517
518var backwardDuration = function backwardDuration(playlist, endSequence) {
519 var result = 0;
520 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
521 // the interval, use it
522
523 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
524 // information that is earlier than endSequence
525
526 if (segment) {
527 if (typeof segment.start !== 'undefined') {
528 return {
529 result: segment.start,
530 precise: true
531 };
532 }
533
534 if (typeof segment.end !== 'undefined') {
535 return {
536 result: segment.end - segment.duration,
537 precise: true
538 };
539 }
540 }
541
542 while (i--) {
543 segment = playlist.segments[i];
544
545 if (typeof segment.end !== 'undefined') {
546 return {
547 result: result + segment.end,
548 precise: true
549 };
550 }
551
552 result += segmentDurationWithParts(playlist, segment);
553
554 if (typeof segment.start !== 'undefined') {
555 return {
556 result: result + segment.start,
557 precise: true
558 };
559 }
560 }
561
562 return {
563 result: result,
564 precise: false
565 };
566};
567/**
568 * walk forward until we find a duration we can use
569 * or return a failure
570 *
571 * @param {Playlist} playlist the playlist to walk through
572 * @param {number} endSequence the mediaSequence to stop walking on
573 */
574
575
576var forwardDuration = function forwardDuration(playlist, endSequence) {
577 var result = 0;
578 var segment;
579 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
580 // information
581
582 for (; i < playlist.segments.length; i++) {
583 segment = playlist.segments[i];
584
585 if (typeof segment.start !== 'undefined') {
586 return {
587 result: segment.start - result,
588 precise: true
589 };
590 }
591
592 result += segmentDurationWithParts(playlist, segment);
593
594 if (typeof segment.end !== 'undefined') {
595 return {
596 result: segment.end - result,
597 precise: true
598 };
599 }
600 } // indicate we didn't find a useful duration estimate
601
602
603 return {
604 result: -1,
605 precise: false
606 };
607};
608/**
609 * Calculate the media duration from the segments associated with a
610 * playlist. The duration of a subinterval of the available segments
611 * may be calculated by specifying an end index.
612 *
613 * @param {Object} playlist a media playlist object
614 * @param {number=} endSequence an exclusive upper boundary
615 * for the playlist. Defaults to playlist length.
616 * @param {number} expired the amount of time that has dropped
617 * off the front of the playlist in a live scenario
618 * @return {number} the duration between the first available segment
619 * and end index.
620 */
621
622
623var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
624 if (typeof endSequence === 'undefined') {
625 endSequence = playlist.mediaSequence + playlist.segments.length;
626 }
627
628 if (endSequence < playlist.mediaSequence) {
629 return 0;
630 } // do a backward walk to estimate the duration
631
632
633 var backward = backwardDuration(playlist, endSequence);
634
635 if (backward.precise) {
636 // if we were able to base our duration estimate on timing
637 // information provided directly from the Media Source, return
638 // it
639 return backward.result;
640 } // walk forward to see if a precise duration estimate can be made
641 // that way
642
643
644 var forward = forwardDuration(playlist, endSequence);
645
646 if (forward.precise) {
647 // we found a segment that has been buffered and so it's
648 // position is known precisely
649 return forward.result;
650 } // return the less-precise, playlist-based duration estimate
651
652
653 return backward.result + expired;
654};
655/**
656 * Calculates the duration of a playlist. If a start and end index
657 * are specified, the duration will be for the subset of the media
658 * timeline between those two indices. The total duration for live
659 * playlists is always Infinity.
660 *
661 * @param {Object} playlist a media playlist object
662 * @param {number=} endSequence an exclusive upper
663 * boundary for the playlist. Defaults to the playlist media
664 * sequence number plus its length.
665 * @param {number=} expired the amount of time that has
666 * dropped off the front of the playlist in a live scenario
667 * @return {number} the duration between the start index and end
668 * index.
669 */
670
671
672var duration = function duration(playlist, endSequence, expired) {
673 if (!playlist) {
674 return 0;
675 }
676
677 if (typeof expired !== 'number') {
678 expired = 0;
679 } // if a slice of the total duration is not requested, use
680 // playlist-level duration indicators when they're present
681
682
683 if (typeof endSequence === 'undefined') {
684 // if present, use the duration specified in the playlist
685 if (playlist.totalDuration) {
686 return playlist.totalDuration;
687 } // duration should be Infinity for live playlists
688
689
690 if (!playlist.endList) {
691 return window__default["default"].Infinity;
692 }
693 } // calculate the total duration based on the segment durations
694
695
696 return intervalDuration(playlist, endSequence, expired);
697};
698/**
699 * Calculate the time between two indexes in the current playlist
700 * neight the start- nor the end-index need to be within the current
701 * playlist in which case, the targetDuration of the playlist is used
702 * to approximate the durations of the segments
703 *
704 * @param {Array} options.durationList list to iterate over for durations.
705 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
706 * @param {number} options.startIndex partsAndSegments index to start
707 * @param {number} options.endIndex partsAndSegments index to end.
708 * @return {number} the number of seconds between startIndex and endIndex
709 */
710
711var sumDurations = function sumDurations(_ref2) {
712 var defaultDuration = _ref2.defaultDuration,
713 durationList = _ref2.durationList,
714 startIndex = _ref2.startIndex,
715 endIndex = _ref2.endIndex;
716 var durations = 0;
717
718 if (startIndex > endIndex) {
719 var _ref3 = [endIndex, startIndex];
720 startIndex = _ref3[0];
721 endIndex = _ref3[1];
722 }
723
724 if (startIndex < 0) {
725 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
726 durations += defaultDuration;
727 }
728
729 startIndex = 0;
730 }
731
732 for (var _i = startIndex; _i < endIndex; _i++) {
733 durations += durationList[_i].duration;
734 }
735
736 return durations;
737};
738/**
739 * Calculates the playlist end time
740 *
741 * @param {Object} playlist a media playlist object
742 * @param {number=} expired the amount of time that has
743 * dropped off the front of the playlist in a live scenario
744 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
745 * playlist end calculation should consider the safe live end
746 * (truncate the playlist end by three segments). This is normally
747 * used for calculating the end of the playlist's seekable range.
748 * This takes into account the value of liveEdgePadding.
749 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
750 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
751 * If this is provided, it is used in the safe live end calculation.
752 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
753 * Corresponds to suggestedPresentationDelay in DASH manifests.
754 * @return {number} the end time of playlist
755 * @function playlistEnd
756 */
757
758var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
759 if (!playlist || !playlist.segments) {
760 return null;
761 }
762
763 if (playlist.endList) {
764 return duration(playlist);
765 }
766
767 if (expired === null) {
768 return null;
769 }
770
771 expired = expired || 0;
772 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
773
774 if (useSafeLiveEnd) {
775 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
776 lastSegmentEndTime -= liveEdgePadding;
777 } // don't return a time less than zero
778
779
780 return Math.max(0, lastSegmentEndTime);
781};
782/**
783 * Calculates the interval of time that is currently seekable in a
784 * playlist. The returned time ranges are relative to the earliest
785 * moment in the specified playlist that is still available. A full
786 * seekable implementation for live streams would need to offset
787 * these values by the duration of content that has expired from the
788 * stream.
789 *
790 * @param {Object} playlist a media playlist object
791 * dropped off the front of the playlist in a live scenario
792 * @param {number=} expired the amount of time that has
793 * dropped off the front of the playlist in a live scenario
794 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
795 * Corresponds to suggestedPresentationDelay in DASH manifests.
796 * @return {TimeRanges} the periods of time that are valid targets
797 * for seeking
798 */
799
800var seekable = function seekable(playlist, expired, liveEdgePadding) {
801 var useSafeLiveEnd = true;
802 var seekableStart = expired || 0;
803 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
804
805 if (seekableEnd === null) {
806 return createTimeRange();
807 }
808
809 return createTimeRange(seekableStart, seekableEnd);
810};
811/**
812 * Determine the index and estimated starting time of the segment that
813 * contains a specified playback position in a media playlist.
814 *
815 * @param {Object} options.playlist the media playlist to query
816 * @param {number} options.currentTime The number of seconds since the earliest
817 * possible position to determine the containing segment for
818 * @param {number} options.startTime the time when the segment/part starts
819 * @param {number} options.startingSegmentIndex the segment index to start looking at.
820 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
821 *
822 * @return {Object} an object with partIndex, segmentIndex, and startTime.
823 */
824
825var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
826 var playlist = _ref4.playlist,
827 currentTime = _ref4.currentTime,
828 startingSegmentIndex = _ref4.startingSegmentIndex,
829 startingPartIndex = _ref4.startingPartIndex,
830 startTime = _ref4.startTime,
831 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
832 var time = currentTime - startTime;
833 var partsAndSegments = getPartsAndSegments(playlist);
834 var startIndex = 0;
835
836 for (var i = 0; i < partsAndSegments.length; i++) {
837 var partAndSegment = partsAndSegments[i];
838
839 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
840 continue;
841 } // skip this if part index does not match.
842
843
844 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
845 continue;
846 }
847
848 startIndex = i;
849 break;
850 }
851
852 if (time < 0) {
853 // Walk backward from startIndex in the playlist, adding durations
854 // until we find a segment that contains `time` and return it
855 if (startIndex > 0) {
856 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
857 var _partAndSegment = partsAndSegments[_i2];
858 time += _partAndSegment.duration;
859
860 if (experimentalExactManifestTimings) {
861 if (time < 0) {
862 continue;
863 }
864 } else if (time + TIME_FUDGE_FACTOR <= 0) {
865 continue;
866 }
867
868 return {
869 partIndex: _partAndSegment.partIndex,
870 segmentIndex: _partAndSegment.segmentIndex,
871 startTime: startTime - sumDurations({
872 defaultDuration: playlist.targetDuration,
873 durationList: partsAndSegments,
874 startIndex: startIndex,
875 endIndex: _i2
876 })
877 };
878 }
879 } // We were unable to find a good segment within the playlist
880 // so select the first segment
881
882
883 return {
884 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
885 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
886 startTime: currentTime
887 };
888 } // When startIndex is negative, we first walk forward to first segment
889 // adding target durations. If we "run out of time" before getting to
890 // the first segment, return the first segment
891
892
893 if (startIndex < 0) {
894 for (var _i3 = startIndex; _i3 < 0; _i3++) {
895 time -= playlist.targetDuration;
896
897 if (time < 0) {
898 return {
899 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
900 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
901 startTime: currentTime
902 };
903 }
904 }
905
906 startIndex = 0;
907 } // Walk forward from startIndex in the playlist, subtracting durations
908 // until we find a segment that contains `time` and return it
909
910
911 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
912 var _partAndSegment2 = partsAndSegments[_i4];
913 time -= _partAndSegment2.duration;
914
915 if (experimentalExactManifestTimings) {
916 if (time > 0) {
917 continue;
918 }
919 } else if (time - TIME_FUDGE_FACTOR >= 0) {
920 continue;
921 }
922
923 return {
924 partIndex: _partAndSegment2.partIndex,
925 segmentIndex: _partAndSegment2.segmentIndex,
926 startTime: startTime + sumDurations({
927 defaultDuration: playlist.targetDuration,
928 durationList: partsAndSegments,
929 startIndex: startIndex,
930 endIndex: _i4
931 })
932 };
933 } // We are out of possible candidates so load the last one...
934
935
936 return {
937 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
938 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
939 startTime: currentTime
940 };
941};
942/**
943 * Check whether the playlist is blacklisted or not.
944 *
945 * @param {Object} playlist the media playlist object
946 * @return {boolean} whether the playlist is blacklisted or not
947 * @function isBlacklisted
948 */
949
950var isBlacklisted = function isBlacklisted(playlist) {
951 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
952};
953/**
954 * Check whether the playlist is compatible with current playback configuration or has
955 * been blacklisted permanently for being incompatible.
956 *
957 * @param {Object} playlist the media playlist object
958 * @return {boolean} whether the playlist is incompatible or not
959 * @function isIncompatible
960 */
961
962var isIncompatible = function isIncompatible(playlist) {
963 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
964};
965/**
966 * Check whether the playlist is enabled or not.
967 *
968 * @param {Object} playlist the media playlist object
969 * @return {boolean} whether the playlist is enabled or not
970 * @function isEnabled
971 */
972
973var isEnabled = function isEnabled(playlist) {
974 var blacklisted = isBlacklisted(playlist);
975 return !playlist.disabled && !blacklisted;
976};
977/**
978 * Check whether the playlist has been manually disabled through the representations api.
979 *
980 * @param {Object} playlist the media playlist object
981 * @return {boolean} whether the playlist is disabled manually or not
982 * @function isDisabled
983 */
984
985var isDisabled = function isDisabled(playlist) {
986 return playlist.disabled;
987};
988/**
989 * Returns whether the current playlist is an AES encrypted HLS stream
990 *
991 * @return {boolean} true if it's an AES encrypted HLS stream
992 */
993
994var isAes = function isAes(media) {
995 for (var i = 0; i < media.segments.length; i++) {
996 if (media.segments[i].key) {
997 return true;
998 }
999 }
1000
1001 return false;
1002};
1003/**
1004 * Checks if the playlist has a value for the specified attribute
1005 *
1006 * @param {string} attr
1007 * Attribute to check for
1008 * @param {Object} playlist
1009 * The media playlist object
1010 * @return {boolean}
1011 * Whether the playlist contains a value for the attribute or not
1012 * @function hasAttribute
1013 */
1014
1015var hasAttribute = function hasAttribute(attr, playlist) {
1016 return playlist.attributes && playlist.attributes[attr];
1017};
1018/**
1019 * Estimates the time required to complete a segment download from the specified playlist
1020 *
1021 * @param {number} segmentDuration
1022 * Duration of requested segment
1023 * @param {number} bandwidth
1024 * Current measured bandwidth of the player
1025 * @param {Object} playlist
1026 * The media playlist object
1027 * @param {number=} bytesReceived
1028 * Number of bytes already received for the request. Defaults to 0
1029 * @return {number|NaN}
1030 * The estimated time to request the segment. NaN if bandwidth information for
1031 * the given playlist is unavailable
1032 * @function estimateSegmentRequestTime
1033 */
1034
1035var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
1036 if (bytesReceived === void 0) {
1037 bytesReceived = 0;
1038 }
1039
1040 if (!hasAttribute('BANDWIDTH', playlist)) {
1041 return NaN;
1042 }
1043
1044 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1045 return (size - bytesReceived * 8) / bandwidth;
1046};
1047/*
1048 * Returns whether the current playlist is the lowest rendition
1049 *
1050 * @return {Boolean} true if on lowest rendition
1051 */
1052
1053var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1054 if (master.playlists.length === 1) {
1055 return true;
1056 }
1057
1058 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1059 return master.playlists.filter(function (playlist) {
1060 if (!isEnabled(playlist)) {
1061 return false;
1062 }
1063
1064 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1065 }).length === 0;
1066};
1067var playlistMatch = function playlistMatch(a, b) {
1068 // both playlits are null
1069 // or only one playlist is non-null
1070 // no match
1071 if (!a && !b || !a && b || a && !b) {
1072 return false;
1073 } // playlist objects are the same, match
1074
1075
1076 if (a === b) {
1077 return true;
1078 } // first try to use id as it should be the most
1079 // accurate
1080
1081
1082 if (a.id && b.id && a.id === b.id) {
1083 return true;
1084 } // next try to use reslovedUri as it should be the
1085 // second most accurate.
1086
1087
1088 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
1089 return true;
1090 } // finally try to use uri as it should be accurate
1091 // but might miss a few cases for relative uris
1092
1093
1094 if (a.uri && b.uri && a.uri === b.uri) {
1095 return true;
1096 }
1097
1098 return false;
1099};
1100
1101var someAudioVariant = function someAudioVariant(master, callback) {
1102 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
1103 var found = false;
1104
1105 for (var groupName in AUDIO) {
1106 for (var label in AUDIO[groupName]) {
1107 found = callback(AUDIO[groupName][label]);
1108
1109 if (found) {
1110 break;
1111 }
1112 }
1113
1114 if (found) {
1115 break;
1116 }
1117 }
1118
1119 return !!found;
1120};
1121
1122var isAudioOnly = function isAudioOnly(master) {
1123 // we are audio only if we have no main playlists but do
1124 // have media group playlists.
1125 if (!master || !master.playlists || !master.playlists.length) {
1126 // without audio variants or playlists this
1127 // is not an audio only master.
1128 var found = someAudioVariant(master, function (variant) {
1129 return variant.playlists && variant.playlists.length || variant.uri;
1130 });
1131 return found;
1132 } // if every playlist has only an audio codec it is audio only
1133
1134
1135 var _loop = function _loop(i) {
1136 var playlist = master.playlists[i];
1137 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
1138
1139 if (CODECS && CODECS.split(',').every(function (c) {
1140 return codecs_js.isAudioCodec(c);
1141 })) {
1142 return "continue";
1143 } // playlist is in an audio group it is audio only
1144
1145
1146 var found = someAudioVariant(master, function (variant) {
1147 return playlistMatch(playlist, variant);
1148 });
1149
1150 if (found) {
1151 return "continue";
1152 } // if we make it here this playlist isn't audio and we
1153 // are not audio only
1154
1155
1156 return {
1157 v: false
1158 };
1159 };
1160
1161 for (var i = 0; i < master.playlists.length; i++) {
1162 var _ret = _loop(i);
1163
1164 if (_ret === "continue") continue;
1165 if (typeof _ret === "object") return _ret.v;
1166 } // if we make it past every playlist without returning, then
1167 // this is an audio only playlist.
1168
1169
1170 return true;
1171}; // exports
1172
1173var Playlist = {
1174 liveEdgeDelay: liveEdgeDelay,
1175 duration: duration,
1176 seekable: seekable,
1177 getMediaInfoForTime: getMediaInfoForTime,
1178 isEnabled: isEnabled,
1179 isDisabled: isDisabled,
1180 isBlacklisted: isBlacklisted,
1181 isIncompatible: isIncompatible,
1182 playlistEnd: playlistEnd,
1183 isAes: isAes,
1184 hasAttribute: hasAttribute,
1185 estimateSegmentRequestTime: estimateSegmentRequestTime,
1186 isLowestEnabledRendition: isLowestEnabledRendition,
1187 isAudioOnly: isAudioOnly,
1188 playlistMatch: playlistMatch,
1189 segmentDurationWithParts: segmentDurationWithParts
1190};
1191
1192var log = videojs__default["default"].log;
1193var createPlaylistID = function createPlaylistID(index, uri) {
1194 return index + "-" + uri;
1195};
1196/**
1197 * Parses a given m3u8 playlist
1198 *
1199 * @param {Function} [onwarn]
1200 * a function to call when the parser triggers a warning event.
1201 * @param {Function} [oninfo]
1202 * a function to call when the parser triggers an info event.
1203 * @param {string} manifestString
1204 * The downloaded manifest string
1205 * @param {Object[]} [customTagParsers]
1206 * An array of custom tag parsers for the m3u8-parser instance
1207 * @param {Object[]} [customTagMappers]
1208 * An array of custom tag mappers for the m3u8-parser instance
1209 * @param {boolean} [experimentalLLHLS=false]
1210 * Whether to keep ll-hls features in the manifest after parsing.
1211 * @return {Object}
1212 * The manifest object
1213 */
1214
1215var parseManifest = function parseManifest(_ref) {
1216 var onwarn = _ref.onwarn,
1217 oninfo = _ref.oninfo,
1218 manifestString = _ref.manifestString,
1219 _ref$customTagParsers = _ref.customTagParsers,
1220 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
1221 _ref$customTagMappers = _ref.customTagMappers,
1222 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
1223 experimentalLLHLS = _ref.experimentalLLHLS;
1224 var parser = new m3u8Parser.Parser();
1225
1226 if (onwarn) {
1227 parser.on('warn', onwarn);
1228 }
1229
1230 if (oninfo) {
1231 parser.on('info', oninfo);
1232 }
1233
1234 customTagParsers.forEach(function (customParser) {
1235 return parser.addParser(customParser);
1236 });
1237 customTagMappers.forEach(function (mapper) {
1238 return parser.addTagMapper(mapper);
1239 });
1240 parser.push(manifestString);
1241 parser.end();
1242 var manifest = parser.manifest; // remove llhls features from the parsed manifest
1243 // if we don't want llhls support.
1244
1245 if (!experimentalLLHLS) {
1246 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
1247 if (manifest.hasOwnProperty(k)) {
1248 delete manifest[k];
1249 }
1250 });
1251
1252 if (manifest.segments) {
1253 manifest.segments.forEach(function (segment) {
1254 ['parts', 'preloadHints'].forEach(function (k) {
1255 if (segment.hasOwnProperty(k)) {
1256 delete segment[k];
1257 }
1258 });
1259 });
1260 }
1261 }
1262
1263 if (!manifest.targetDuration) {
1264 var targetDuration = 10;
1265
1266 if (manifest.segments && manifest.segments.length) {
1267 targetDuration = manifest.segments.reduce(function (acc, s) {
1268 return Math.max(acc, s.duration);
1269 }, 0);
1270 }
1271
1272 if (onwarn) {
1273 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
1274 }
1275
1276 manifest.targetDuration = targetDuration;
1277 }
1278
1279 var parts = getLastParts(manifest);
1280
1281 if (parts.length && !manifest.partTargetDuration) {
1282 var partTargetDuration = parts.reduce(function (acc, p) {
1283 return Math.max(acc, p.duration);
1284 }, 0);
1285
1286 if (onwarn) {
1287 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
1288 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
1289 }
1290
1291 manifest.partTargetDuration = partTargetDuration;
1292 }
1293
1294 return manifest;
1295};
1296/**
1297 * Loops through all supported media groups in master and calls the provided
1298 * callback for each group
1299 *
1300 * @param {Object} master
1301 * The parsed master manifest object
1302 * @param {Function} callback
1303 * Callback to call for each media group
1304 */
1305
1306var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1307 if (!master.mediaGroups) {
1308 return;
1309 }
1310
1311 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1312 if (!master.mediaGroups[mediaType]) {
1313 return;
1314 }
1315
1316 for (var groupKey in master.mediaGroups[mediaType]) {
1317 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1318 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1319 callback(mediaProperties, mediaType, groupKey, labelKey);
1320 }
1321 }
1322 });
1323};
1324/**
1325 * Adds properties and attributes to the playlist to keep consistent functionality for
1326 * playlists throughout VHS.
1327 *
1328 * @param {Object} config
1329 * Arguments object
1330 * @param {Object} config.playlist
1331 * The media playlist
1332 * @param {string} [config.uri]
1333 * The uri to the media playlist (if media playlist is not from within a master
1334 * playlist)
1335 * @param {string} id
1336 * ID to use for the playlist
1337 */
1338
1339var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
1340 var playlist = _ref2.playlist,
1341 uri = _ref2.uri,
1342 id = _ref2.id;
1343 playlist.id = id;
1344 playlist.playlistErrors_ = 0;
1345
1346 if (uri) {
1347 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
1348 // playlists do not contain their own source URI, but one is needed for consistency in
1349 // VHS.
1350 playlist.uri = uri;
1351 } // For HLS master playlists, even though certain attributes MUST be defined, the
1352 // stream may still be played without them.
1353 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
1354 // manifest.
1355 //
1356 // To avoid undefined reference errors through the project, and make the code easier
1357 // to write/read, add an empty attributes object for these cases.
1358
1359
1360 playlist.attributes = playlist.attributes || {};
1361};
1362/**
1363 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
1364 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
1365 * playlist references to the playlists array.
1366 *
1367 * @param {Object} master
1368 * The master playlist
1369 */
1370
1371var setupMediaPlaylists = function setupMediaPlaylists(master) {
1372 var i = master.playlists.length;
1373
1374 while (i--) {
1375 var playlist = master.playlists[i];
1376 setupMediaPlaylist({
1377 playlist: playlist,
1378 id: createPlaylistID(i, playlist.uri)
1379 });
1380 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1381 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
1382
1383 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
1384 // the stream can be played without it. Although an attributes property may have been
1385 // added to the playlist to prevent undefined references, issue a warning to fix the
1386 // manifest.
1387
1388 if (!playlist.attributes.BANDWIDTH) {
1389 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1390 }
1391 }
1392};
1393/**
1394 * Adds resolvedUri properties to each media group.
1395 *
1396 * @param {Object} master
1397 * The master playlist
1398 */
1399
1400var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1401 forEachMediaGroup(master, function (properties) {
1402 if (properties.uri) {
1403 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1404 }
1405 });
1406};
1407/**
1408 * Creates a master playlist wrapper to insert a sole media playlist into.
1409 *
1410 * @param {Object} media
1411 * Media playlist
1412 * @param {string} uri
1413 * The media URI
1414 *
1415 * @return {Object}
1416 * Master playlist
1417 */
1418
1419var masterForMedia = function masterForMedia(media, uri) {
1420 var id = createPlaylistID(0, uri);
1421 var master = {
1422 mediaGroups: {
1423 'AUDIO': {},
1424 'VIDEO': {},
1425 'CLOSED-CAPTIONS': {},
1426 'SUBTITLES': {}
1427 },
1428 uri: window__default["default"].location.href,
1429 resolvedUri: window__default["default"].location.href,
1430 playlists: [{
1431 uri: uri,
1432 id: id,
1433 resolvedUri: uri,
1434 // m3u8-parser does not attach an attributes property to media playlists so make
1435 // sure that the property is attached to avoid undefined reference errors
1436 attributes: {}
1437 }]
1438 }; // set up ID reference
1439
1440 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
1441
1442 master.playlists[uri] = master.playlists[0];
1443 return master;
1444};
1445/**
1446 * Does an in-place update of the master manifest to add updated playlist URI references
1447 * as well as other properties needed by VHS that aren't included by the parser.
1448 *
1449 * @param {Object} master
1450 * Master manifest object
1451 * @param {string} uri
1452 * The source URI
1453 */
1454
1455var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
1456 master.uri = uri;
1457
1458 for (var i = 0; i < master.playlists.length; i++) {
1459 if (!master.playlists[i].uri) {
1460 // Set up phony URIs for the playlists since playlists are referenced by their URIs
1461 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
1462 // TODO: consider adding dummy URIs in mpd-parser
1463 var phonyUri = "placeholder-uri-" + i;
1464 master.playlists[i].uri = phonyUri;
1465 }
1466 }
1467
1468 var audioOnlyMaster = isAudioOnly(master);
1469 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1470 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
1471
1472 if (!properties.playlists || !properties.playlists.length) {
1473 // If the manifest is audio only and this media group does not have a uri, check
1474 // if the media group is located in the main list of playlists. If it is, don't add
1475 // placeholder properties as it shouldn't be considered an alternate audio track.
1476 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
1477 for (var _i = 0; _i < master.playlists.length; _i++) {
1478 var p = master.playlists[_i];
1479
1480 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
1481 return;
1482 }
1483 }
1484 }
1485
1486 properties.playlists = [_extends__default["default"]({}, properties)];
1487 }
1488
1489 properties.playlists.forEach(function (p, i) {
1490 var id = createPlaylistID(i, groupId);
1491
1492 if (p.uri) {
1493 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
1494 } else {
1495 // DEPRECATED, this has been added to prevent a breaking change.
1496 // previously we only ever had a single media group playlist, so
1497 // we mark the first playlist uri without prepending the index as we used to
1498 // ideally we would do all of the playlists the same way.
1499 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
1500 // the placeholder again
1501
1502 p.resolvedUri = p.uri;
1503 }
1504
1505 p.id = p.id || id; // add an empty attributes object, all playlists are
1506 // expected to have this.
1507
1508 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
1509
1510 master.playlists[p.id] = p;
1511 master.playlists[p.uri] = p;
1512 });
1513 });
1514 setupMediaPlaylists(master);
1515 resolveMediaGroupUris(master);
1516};
1517
1518var mergeOptions$2 = videojs__default["default"].mergeOptions,
1519 EventTarget$1 = videojs__default["default"].EventTarget;
1520
1521var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
1522 if (media.endList || !media.serverControl) {
1523 return uri;
1524 }
1525
1526 var parameters = {};
1527
1528 if (media.serverControl.canBlockReload) {
1529 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
1530
1531 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
1532 // that we are going to request a part of that preload segment.
1533 // the logic below is used to determine that.
1534
1535 if (preloadSegment) {
1536 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
1537
1538 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
1539 // length of parts, then we know we had part preload hints
1540 // and we need to add the _HLS_part= query
1541
1542 if (nextPart > -1 && nextPart !== parts.length - 1) {
1543 // add existing parts to our preload hints
1544 // eslint-disable-next-line
1545 parameters._HLS_part = nextPart;
1546 } // this if statement makes sure that we request the msn
1547 // of the preload segment if:
1548 // 1. the preload segment had parts (and was not yet a full segment)
1549 // but was added to our segments array
1550 // 2. the preload segment had preload hints for parts that are not in
1551 // the manifest yet.
1552 // in all other cases we want the segment after the preload segment
1553 // which will be given by using media.segments.length because it is 1 based
1554 // rather than 0 based.
1555
1556
1557 if (nextPart > -1 || parts.length) {
1558 nextMSN--;
1559 }
1560 } // add _HLS_msn= in front of any _HLS_part query
1561 // eslint-disable-next-line
1562
1563
1564 parameters._HLS_msn = nextMSN;
1565 }
1566
1567 if (media.serverControl && media.serverControl.canSkipUntil) {
1568 // add _HLS_skip= infront of all other queries.
1569 // eslint-disable-next-line
1570 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
1571 }
1572
1573 if (Object.keys(parameters).length) {
1574 var parsedUri = new window__default["default"].URL(uri);
1575 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
1576 if (!parameters.hasOwnProperty(name)) {
1577 return;
1578 }
1579
1580 parsedUri.searchParams.set(name, parameters[name]);
1581 });
1582 uri = parsedUri.toString();
1583 }
1584
1585 return uri;
1586};
1587/**
1588 * Returns a new segment object with properties and
1589 * the parts array merged.
1590 *
1591 * @param {Object} a the old segment
1592 * @param {Object} b the new segment
1593 *
1594 * @return {Object} the merged segment
1595 */
1596
1597
1598var updateSegment = function updateSegment(a, b) {
1599 if (!a) {
1600 return b;
1601 }
1602
1603 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
1604 // and the new one does not, remove preload hints.
1605
1606 if (a.preloadHints && !b.preloadHints) {
1607 delete result.preloadHints;
1608 } // if only the old segment has parts
1609 // then the parts are no longer valid
1610
1611
1612 if (a.parts && !b.parts) {
1613 delete result.parts; // if both segments have parts
1614 // copy part propeties from the old segment
1615 // to the new one.
1616 } else if (a.parts && b.parts) {
1617 for (var i = 0; i < b.parts.length; i++) {
1618 if (a.parts && a.parts[i]) {
1619 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
1620 }
1621 }
1622 } // set skipped to false for segments that have
1623 // have had information merged from the old segment.
1624
1625
1626 if (!a.skipped && b.skipped) {
1627 result.skipped = false;
1628 } // set preload to false for segments that have
1629 // had information added in the new segment.
1630
1631
1632 if (a.preload && !b.preload) {
1633 result.preload = false;
1634 }
1635
1636 return result;
1637};
1638/**
1639 * Returns a new array of segments that is the result of merging
1640 * properties from an older list of segments onto an updated
1641 * list. No properties on the updated playlist will be ovewritten.
1642 *
1643 * @param {Array} original the outdated list of segments
1644 * @param {Array} update the updated list of segments
1645 * @param {number=} offset the index of the first update
1646 * segment in the original segment list. For non-live playlists,
1647 * this should always be zero and does not need to be
1648 * specified. For live playlists, it should be the difference
1649 * between the media sequence numbers in the original and updated
1650 * playlists.
1651 * @return {Array} a list of merged segment objects
1652 */
1653
1654var updateSegments = function updateSegments(original, update, offset) {
1655 var oldSegments = original.slice();
1656 var newSegments = update.slice();
1657 offset = offset || 0;
1658 var result = [];
1659 var currentMap;
1660
1661 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
1662 var oldSegment = oldSegments[newIndex + offset];
1663 var newSegment = newSegments[newIndex];
1664
1665 if (oldSegment) {
1666 currentMap = oldSegment.map || currentMap;
1667 result.push(updateSegment(oldSegment, newSegment));
1668 } else {
1669 // carry over map to new segment if it is missing
1670 if (currentMap && !newSegment.map) {
1671 newSegment.map = currentMap;
1672 }
1673
1674 result.push(newSegment);
1675 }
1676 }
1677
1678 return result;
1679};
1680var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1681 // preloadSegment will not have a uri at all
1682 // as the segment isn't actually in the manifest yet, only parts
1683 if (!segment.resolvedUri && segment.uri) {
1684 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1685 }
1686
1687 if (segment.key && !segment.key.resolvedUri) {
1688 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1689 }
1690
1691 if (segment.map && !segment.map.resolvedUri) {
1692 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1693 }
1694
1695 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
1696 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
1697 }
1698
1699 if (segment.parts && segment.parts.length) {
1700 segment.parts.forEach(function (p) {
1701 if (p.resolvedUri) {
1702 return;
1703 }
1704
1705 p.resolvedUri = resolveUrl(baseUri, p.uri);
1706 });
1707 }
1708
1709 if (segment.preloadHints && segment.preloadHints.length) {
1710 segment.preloadHints.forEach(function (p) {
1711 if (p.resolvedUri) {
1712 return;
1713 }
1714
1715 p.resolvedUri = resolveUrl(baseUri, p.uri);
1716 });
1717 }
1718};
1719
1720var getAllSegments = function getAllSegments(media) {
1721 var segments = media.segments || [];
1722 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
1723 // a usable segment, only include a preloadSegment that has
1724 // parts.
1725
1726 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
1727 // if preloadHints has a MAP that means that the
1728 // init segment is going to change. We cannot use any of the parts
1729 // from this preload segment.
1730 if (preloadSegment.preloadHints) {
1731 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
1732 if (preloadSegment.preloadHints[i].type === 'MAP') {
1733 return segments;
1734 }
1735 }
1736 } // set the duration for our preload segment to target duration.
1737
1738
1739 preloadSegment.duration = media.targetDuration;
1740 preloadSegment.preload = true;
1741 segments.push(preloadSegment);
1742 }
1743
1744 return segments;
1745}; // consider the playlist unchanged if the playlist object is the same or
1746// the number of segments is equal, the media sequence number is unchanged,
1747// and this playlist hasn't become the end of the playlist
1748
1749
1750var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
1751 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
1752};
1753/**
1754 * Returns a new master playlist that is the result of merging an
1755 * updated media playlist into the original version. If the
1756 * updated media playlist does not match any of the playlist
1757 * entries in the original master playlist, null is returned.
1758 *
1759 * @param {Object} master a parsed master M3U8 object
1760 * @param {Object} media a parsed media M3U8 object
1761 * @return {Object} a new object that represents the original
1762 * master playlist with the updated media playlist merged in, or
1763 * null if the merge produced no change.
1764 */
1765
1766var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
1767 if (unchangedCheck === void 0) {
1768 unchangedCheck = isPlaylistUnchanged;
1769 }
1770
1771 var result = mergeOptions$2(master, {});
1772 var oldMedia = result.playlists[newMedia.id];
1773
1774 if (!oldMedia) {
1775 return null;
1776 }
1777
1778 if (unchangedCheck(oldMedia, newMedia)) {
1779 return null;
1780 }
1781
1782 newMedia.segments = getAllSegments(newMedia);
1783 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
1784
1785 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
1786 delete mergedPlaylist.preloadSegment;
1787 } // if the update could overlap existing segment information, merge the two segment lists
1788
1789
1790 if (oldMedia.segments) {
1791 if (newMedia.skip) {
1792 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
1793 // old properties into the new segments
1794
1795 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
1796 newMedia.segments.unshift({
1797 skipped: true
1798 });
1799 }
1800 }
1801
1802 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
1803 } // resolve any segment URIs to prevent us from having to do it later
1804
1805
1806 mergedPlaylist.segments.forEach(function (segment) {
1807 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1808 }); // TODO Right now in the playlists array there are two references to each playlist, one
1809 // that is referenced by index, and one by URI. The index reference may no longer be
1810 // necessary.
1811
1812 for (var _i = 0; _i < result.playlists.length; _i++) {
1813 if (result.playlists[_i].id === newMedia.id) {
1814 result.playlists[_i] = mergedPlaylist;
1815 }
1816 }
1817
1818 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
1819
1820 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
1821
1822 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1823 if (!properties.playlists) {
1824 return;
1825 }
1826
1827 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
1828 if (newMedia.id === properties.playlists[_i2].id) {
1829 properties.playlists[_i2] = mergedPlaylist;
1830 }
1831 }
1832 });
1833 return result;
1834};
1835/**
1836 * Calculates the time to wait before refreshing a live playlist
1837 *
1838 * @param {Object} media
1839 * The current media
1840 * @param {boolean} update
1841 * True if there were any updates from the last refresh, false otherwise
1842 * @return {number}
1843 * The time in ms to wait before refreshing the live playlist
1844 */
1845
1846var refreshDelay = function refreshDelay(media, update) {
1847 var segments = media.segments || [];
1848 var lastSegment = segments[segments.length - 1];
1849 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
1850 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
1851
1852 if (update && lastDuration) {
1853 return lastDuration * 1000;
1854 } // if the playlist is unchanged since the last reload or last segment duration
1855 // cannot be determined, try again after half the target duration
1856
1857
1858 return (media.partTargetDuration || media.targetDuration || 10) * 500;
1859};
1860/**
1861 * Load a playlist from a remote location
1862 *
1863 * @class PlaylistLoader
1864 * @extends Stream
1865 * @param {string|Object} src url or object of manifest
1866 * @param {boolean} withCredentials the withCredentials xhr option
1867 * @class
1868 */
1869
1870var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
1871 _inheritsLoose__default["default"](PlaylistLoader, _EventTarget);
1872
1873 function PlaylistLoader(src, vhs, options) {
1874 var _this;
1875
1876 if (options === void 0) {
1877 options = {};
1878 }
1879
1880 _this = _EventTarget.call(this) || this;
1881
1882 if (!src) {
1883 throw new Error('A non-empty playlist URL or object is required');
1884 }
1885
1886 _this.logger_ = logger('PlaylistLoader');
1887 var _options = options,
1888 _options$withCredenti = _options.withCredentials,
1889 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
1890 _options$handleManife = _options.handleManifestRedirects,
1891 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
1892 _this.src = src;
1893 _this.vhs_ = vhs;
1894 _this.withCredentials = withCredentials;
1895 _this.handleManifestRedirects = handleManifestRedirects;
1896 var vhsOptions = vhs.options_;
1897 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
1898 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
1899 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
1900
1901 if (videojs__default["default"].browser.IE_VERSION) {
1902 _this.experimentalLLHLS = false;
1903 } // initialize the loader state
1904
1905
1906 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
1907
1908 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized__default["default"](_this));
1909
1910 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
1911
1912 return _this;
1913 }
1914
1915 var _proto = PlaylistLoader.prototype;
1916
1917 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
1918 var _this2 = this;
1919
1920 if (this.state !== 'HAVE_METADATA') {
1921 // only refresh the media playlist if no other activity is going on
1922 return;
1923 }
1924
1925 var media = this.media();
1926 var uri = resolveUrl(this.master.uri, media.uri);
1927
1928 if (this.experimentalLLHLS) {
1929 uri = addLLHLSQueryDirectives(uri, media);
1930 }
1931
1932 this.state = 'HAVE_CURRENT_METADATA';
1933 this.request = this.vhs_.xhr({
1934 uri: uri,
1935 withCredentials: this.withCredentials
1936 }, function (error, req) {
1937 // disposed
1938 if (!_this2.request) {
1939 return;
1940 }
1941
1942 if (error) {
1943 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
1944 }
1945
1946 _this2.haveMetadata({
1947 playlistString: _this2.request.responseText,
1948 url: _this2.media().uri,
1949 id: _this2.media().id
1950 });
1951 });
1952 };
1953
1954 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
1955 var uri = playlist.uri,
1956 id = playlist.id; // any in-flight request is now finished
1957
1958 this.request = null;
1959
1960 if (startingState) {
1961 this.state = startingState;
1962 }
1963
1964 this.error = {
1965 playlist: this.master.playlists[id],
1966 status: xhr.status,
1967 message: "HLS playlist request error at URL: " + uri + ".",
1968 responseText: xhr.responseText,
1969 code: xhr.status >= 500 ? 4 : 2
1970 };
1971 this.trigger('error');
1972 };
1973
1974 _proto.parseManifest_ = function parseManifest_(_ref) {
1975 var _this3 = this;
1976
1977 var url = _ref.url,
1978 manifestString = _ref.manifestString;
1979 return parseManifest({
1980 onwarn: function onwarn(_ref2) {
1981 var message = _ref2.message;
1982 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
1983 },
1984 oninfo: function oninfo(_ref3) {
1985 var message = _ref3.message;
1986 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
1987 },
1988 manifestString: manifestString,
1989 customTagParsers: this.customTagParsers,
1990 customTagMappers: this.customTagMappers,
1991 experimentalLLHLS: this.experimentalLLHLS
1992 });
1993 }
1994 /**
1995 * Update the playlist loader's state in response to a new or updated playlist.
1996 *
1997 * @param {string} [playlistString]
1998 * Playlist string (if playlistObject is not provided)
1999 * @param {Object} [playlistObject]
2000 * Playlist object (if playlistString is not provided)
2001 * @param {string} url
2002 * URL of playlist
2003 * @param {string} id
2004 * ID to use for playlist
2005 */
2006 ;
2007
2008 _proto.haveMetadata = function haveMetadata(_ref4) {
2009 var playlistString = _ref4.playlistString,
2010 playlistObject = _ref4.playlistObject,
2011 url = _ref4.url,
2012 id = _ref4.id;
2013 // any in-flight request is now finished
2014 this.request = null;
2015 this.state = 'HAVE_METADATA';
2016 var playlist = playlistObject || this.parseManifest_({
2017 url: url,
2018 manifestString: playlistString
2019 });
2020 playlist.lastRequest = Date.now();
2021 setupMediaPlaylist({
2022 playlist: playlist,
2023 uri: url,
2024 id: id
2025 }); // merge this playlist into the master
2026
2027 var update = updateMaster$1(this.master, playlist);
2028 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
2029 this.pendingMedia_ = null;
2030
2031 if (update) {
2032 this.master = update;
2033 this.media_ = this.master.playlists[id];
2034 } else {
2035 this.trigger('playlistunchanged');
2036 }
2037
2038 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
2039 this.trigger('loadedplaylist');
2040 }
2041 /**
2042 * Abort any outstanding work and clean up.
2043 */
2044 ;
2045
2046 _proto.dispose = function dispose() {
2047 this.trigger('dispose');
2048 this.stopRequest();
2049 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2050 window__default["default"].clearTimeout(this.finalRenditionTimeout);
2051 this.off();
2052 };
2053
2054 _proto.stopRequest = function stopRequest() {
2055 if (this.request) {
2056 var oldRequest = this.request;
2057 this.request = null;
2058 oldRequest.onreadystatechange = null;
2059 oldRequest.abort();
2060 }
2061 }
2062 /**
2063 * When called without any arguments, returns the currently
2064 * active media playlist. When called with a single argument,
2065 * triggers the playlist loader to asynchronously switch to the
2066 * specified media playlist. Calling this method while the
2067 * loader is in the HAVE_NOTHING causes an error to be emitted
2068 * but otherwise has no effect.
2069 *
2070 * @param {Object=} playlist the parsed media playlist
2071 * object to switch to
2072 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
2073 *
2074 * @return {Playlist} the current loaded media
2075 */
2076 ;
2077
2078 _proto.media = function media(playlist, shouldDelay) {
2079 var _this4 = this;
2080
2081 // getter
2082 if (!playlist) {
2083 return this.media_;
2084 } // setter
2085
2086
2087 if (this.state === 'HAVE_NOTHING') {
2088 throw new Error('Cannot switch media playlist from ' + this.state);
2089 } // find the playlist object if the target playlist has been
2090 // specified by URI
2091
2092
2093 if (typeof playlist === 'string') {
2094 if (!this.master.playlists[playlist]) {
2095 throw new Error('Unknown playlist URI: ' + playlist);
2096 }
2097
2098 playlist = this.master.playlists[playlist];
2099 }
2100
2101 window__default["default"].clearTimeout(this.finalRenditionTimeout);
2102
2103 if (shouldDelay) {
2104 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
2105 this.finalRenditionTimeout = window__default["default"].setTimeout(this.media.bind(this, playlist, false), delay);
2106 return;
2107 }
2108
2109 var startingState = this.state;
2110 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2111 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
2112
2113 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
2114 // media playlist or, for the case of demuxed audio, a resolved audio media group)
2115 playlist.endList && playlist.segments.length) {
2116 // abort outstanding playlist requests
2117 if (this.request) {
2118 this.request.onreadystatechange = null;
2119 this.request.abort();
2120 this.request = null;
2121 }
2122
2123 this.state = 'HAVE_METADATA';
2124 this.media_ = playlist; // trigger media change if the active media has been updated
2125
2126 if (mediaChange) {
2127 this.trigger('mediachanging');
2128
2129 if (startingState === 'HAVE_MASTER') {
2130 // The initial playlist was a master manifest, and the first media selected was
2131 // also provided (in the form of a resolved playlist object) as part of the
2132 // source object (rather than just a URL). Therefore, since the media playlist
2133 // doesn't need to be requested, loadedmetadata won't trigger as part of the
2134 // normal flow, and needs an explicit trigger here.
2135 this.trigger('loadedmetadata');
2136 } else {
2137 this.trigger('mediachange');
2138 }
2139 }
2140
2141 return;
2142 } // We update/set the timeout here so that live playlists
2143 // that are not a media change will "start" the loader as expected.
2144 // We expect that this function will start the media update timeout
2145 // cycle again. This also prevents a playlist switch failure from
2146 // causing us to stall during live.
2147
2148
2149 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
2150
2151 if (!mediaChange) {
2152 return;
2153 }
2154
2155 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
2156
2157 if (this.request) {
2158 if (playlist.resolvedUri === this.request.url) {
2159 // requesting to switch to the same playlist multiple times
2160 // has no effect after the first
2161 return;
2162 }
2163
2164 this.request.onreadystatechange = null;
2165 this.request.abort();
2166 this.request = null;
2167 } // request the new playlist
2168
2169
2170 if (this.media_) {
2171 this.trigger('mediachanging');
2172 }
2173
2174 this.pendingMedia_ = playlist;
2175 this.request = this.vhs_.xhr({
2176 uri: playlist.resolvedUri,
2177 withCredentials: this.withCredentials
2178 }, function (error, req) {
2179 // disposed
2180 if (!_this4.request) {
2181 return;
2182 }
2183
2184 playlist.lastRequest = Date.now();
2185 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
2186
2187 if (error) {
2188 return _this4.playlistRequestError(_this4.request, playlist, startingState);
2189 }
2190
2191 _this4.haveMetadata({
2192 playlistString: req.responseText,
2193 url: playlist.uri,
2194 id: playlist.id
2195 }); // fire loadedmetadata the first time a media playlist is loaded
2196
2197
2198 if (startingState === 'HAVE_MASTER') {
2199 _this4.trigger('loadedmetadata');
2200 } else {
2201 _this4.trigger('mediachange');
2202 }
2203 });
2204 }
2205 /**
2206 * pause loading of the playlist
2207 */
2208 ;
2209
2210 _proto.pause = function pause() {
2211 if (this.mediaUpdateTimeout) {
2212 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2213 this.mediaUpdateTimeout = null;
2214 }
2215
2216 this.stopRequest();
2217
2218 if (this.state === 'HAVE_NOTHING') {
2219 // If we pause the loader before any data has been retrieved, its as if we never
2220 // started, so reset to an unstarted state.
2221 this.started = false;
2222 } // Need to restore state now that no activity is happening
2223
2224
2225 if (this.state === 'SWITCHING_MEDIA') {
2226 // if the loader was in the process of switching media, it should either return to
2227 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2228 // playlist yet. This is determined by the existence of loader.media_
2229 if (this.media_) {
2230 this.state = 'HAVE_METADATA';
2231 } else {
2232 this.state = 'HAVE_MASTER';
2233 }
2234 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2235 this.state = 'HAVE_METADATA';
2236 }
2237 }
2238 /**
2239 * start loading of the playlist
2240 */
2241 ;
2242
2243 _proto.load = function load(shouldDelay) {
2244 var _this5 = this;
2245
2246 if (this.mediaUpdateTimeout) {
2247 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2248 this.mediaUpdateTimeout = null;
2249 }
2250
2251 var media = this.media();
2252
2253 if (shouldDelay) {
2254 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
2255 this.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
2256 _this5.mediaUpdateTimeout = null;
2257
2258 _this5.load();
2259 }, delay);
2260 return;
2261 }
2262
2263 if (!this.started) {
2264 this.start();
2265 return;
2266 }
2267
2268 if (media && !media.endList) {
2269 this.trigger('mediaupdatetimeout');
2270 } else {
2271 this.trigger('loadedplaylist');
2272 }
2273 };
2274
2275 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
2276 var _this6 = this;
2277
2278 if (this.mediaUpdateTimeout) {
2279 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2280 this.mediaUpdateTimeout = null;
2281 } // we only have use mediaupdatetimeout for live playlists.
2282
2283
2284 if (!this.media() || this.media().endList) {
2285 return;
2286 }
2287
2288 this.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
2289 _this6.mediaUpdateTimeout = null;
2290
2291 _this6.trigger('mediaupdatetimeout');
2292
2293 _this6.updateMediaUpdateTimeout_(delay);
2294 }, delay);
2295 }
2296 /**
2297 * start loading of the playlist
2298 */
2299 ;
2300
2301 _proto.start = function start() {
2302 var _this7 = this;
2303
2304 this.started = true;
2305
2306 if (typeof this.src === 'object') {
2307 // in the case of an entirely constructed manifest object (meaning there's no actual
2308 // manifest on a server), default the uri to the page's href
2309 if (!this.src.uri) {
2310 this.src.uri = window__default["default"].location.href;
2311 } // resolvedUri is added on internally after the initial request. Since there's no
2312 // request for pre-resolved manifests, add on resolvedUri here.
2313
2314
2315 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
2316 // request can be skipped (since the top level of the manifest, at a minimum, is
2317 // already available as a parsed manifest object). However, if the manifest object
2318 // represents a master playlist, some media playlists may need to be resolved before
2319 // the starting segment list is available. Therefore, go directly to setup of the
2320 // initial playlist, and let the normal flow continue from there.
2321 //
2322 // Note that the call to setup is asynchronous, as other sections of VHS may assume
2323 // that the first request is asynchronous.
2324
2325 setTimeout(function () {
2326 _this7.setupInitialPlaylist(_this7.src);
2327 }, 0);
2328 return;
2329 } // request the specified URL
2330
2331
2332 this.request = this.vhs_.xhr({
2333 uri: this.src,
2334 withCredentials: this.withCredentials
2335 }, function (error, req) {
2336 // disposed
2337 if (!_this7.request) {
2338 return;
2339 } // clear the loader's request reference
2340
2341
2342 _this7.request = null;
2343
2344 if (error) {
2345 _this7.error = {
2346 status: req.status,
2347 message: "HLS playlist request error at URL: " + _this7.src + ".",
2348 responseText: req.responseText,
2349 // MEDIA_ERR_NETWORK
2350 code: 2
2351 };
2352
2353 if (_this7.state === 'HAVE_NOTHING') {
2354 _this7.started = false;
2355 }
2356
2357 return _this7.trigger('error');
2358 }
2359
2360 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
2361
2362 var manifest = _this7.parseManifest_({
2363 manifestString: req.responseText,
2364 url: _this7.src
2365 });
2366
2367 _this7.setupInitialPlaylist(manifest);
2368 });
2369 };
2370
2371 _proto.srcUri = function srcUri() {
2372 return typeof this.src === 'string' ? this.src : this.src.uri;
2373 }
2374 /**
2375 * Given a manifest object that's either a master or media playlist, trigger the proper
2376 * events and set the state of the playlist loader.
2377 *
2378 * If the manifest object represents a master playlist, `loadedplaylist` will be
2379 * triggered to allow listeners to select a playlist. If none is selected, the loader
2380 * will default to the first one in the playlists array.
2381 *
2382 * If the manifest object represents a media playlist, `loadedplaylist` will be
2383 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
2384 *
2385 * In the case of a media playlist, a master playlist object wrapper with one playlist
2386 * will be created so that all logic can handle playlists in the same fashion (as an
2387 * assumed manifest object schema).
2388 *
2389 * @param {Object} manifest
2390 * The parsed manifest object
2391 */
2392 ;
2393
2394 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
2395 this.state = 'HAVE_MASTER';
2396
2397 if (manifest.playlists) {
2398 this.master = manifest;
2399 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
2400 // then resolve URIs in advance, as they are usually done after a playlist request,
2401 // which may not happen if the playlist is resolved.
2402
2403 manifest.playlists.forEach(function (playlist) {
2404 playlist.segments = getAllSegments(playlist);
2405 playlist.segments.forEach(function (segment) {
2406 resolveSegmentUris(segment, playlist.resolvedUri);
2407 });
2408 });
2409 this.trigger('loadedplaylist');
2410
2411 if (!this.request) {
2412 // no media playlist was specifically selected so start
2413 // from the first listed one
2414 this.media(this.master.playlists[0]);
2415 }
2416
2417 return;
2418 } // In order to support media playlists passed in as vhs-json, the case where the uri
2419 // is not provided as part of the manifest should be considered, and an appropriate
2420 // default used.
2421
2422
2423 var uri = this.srcUri() || window__default["default"].location.href;
2424 this.master = masterForMedia(manifest, uri);
2425 this.haveMetadata({
2426 playlistObject: manifest,
2427 url: uri,
2428 id: this.master.playlists[0].id
2429 });
2430 this.trigger('loadedmetadata');
2431 };
2432
2433 return PlaylistLoader;
2434}(EventTarget$1);
2435
2436/**
2437 * @file xhr.js
2438 */
2439var videojsXHR = videojs__default["default"].xhr,
2440 mergeOptions$1 = videojs__default["default"].mergeOptions;
2441
2442var callbackWrapper = function callbackWrapper(request, error, response, callback) {
2443 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
2444
2445 if (!error && reqResponse) {
2446 request.responseTime = Date.now();
2447 request.roundTripTime = request.responseTime - request.requestTime;
2448 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2449
2450 if (!request.bandwidth) {
2451 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2452 }
2453 }
2454
2455 if (response.headers) {
2456 request.responseHeaders = response.headers;
2457 } // videojs.xhr now uses a specific code on the error
2458 // object to signal that a request has timed out instead
2459 // of setting a boolean on the request object
2460
2461
2462 if (error && error.code === 'ETIMEDOUT') {
2463 request.timedout = true;
2464 } // videojs.xhr no longer considers status codes outside of 200 and 0
2465 // (for file uris) to be errors, but the old XHR did, so emulate that
2466 // behavior. Status 206 may be used in response to byterange requests.
2467
2468
2469 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2470 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2471 }
2472
2473 callback(error, request);
2474};
2475
2476var xhrFactory = function xhrFactory() {
2477 var xhr = function XhrFunction(options, callback) {
2478 // Add a default timeout
2479 options = mergeOptions$1({
2480 timeout: 45e3
2481 }, options); // Allow an optional user-specified function to modify the option
2482 // object before we construct the xhr request
2483
2484 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
2485
2486 if (beforeRequest && typeof beforeRequest === 'function') {
2487 var newOptions = beforeRequest(options);
2488
2489 if (newOptions) {
2490 options = newOptions;
2491 }
2492 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
2493 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
2494
2495
2496 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
2497 var request = xhrMethod(options, function (error, response) {
2498 return callbackWrapper(request, error, response, callback);
2499 });
2500 var originalAbort = request.abort;
2501
2502 request.abort = function () {
2503 request.aborted = true;
2504 return originalAbort.apply(request, arguments);
2505 };
2506
2507 request.uri = options.uri;
2508 request.requestTime = Date.now();
2509 return request;
2510 };
2511
2512 xhr.original = true;
2513 return xhr;
2514};
2515/**
2516 * Turns segment byterange into a string suitable for use in
2517 * HTTP Range requests
2518 *
2519 * @param {Object} byterange - an object with two values defining the start and end
2520 * of a byte-range
2521 */
2522
2523
2524var byterangeStr = function byterangeStr(byterange) {
2525 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2526 // header uses inclusive ranges
2527 var byterangeEnd;
2528 var byterangeStart = byterange.offset;
2529
2530 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
2531 byterangeEnd = window__default["default"].BigInt(byterange.offset) + window__default["default"].BigInt(byterange.length) - window__default["default"].BigInt(1);
2532 } else {
2533 byterangeEnd = byterange.offset + byterange.length - 1;
2534 }
2535
2536 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2537};
2538/**
2539 * Defines headers for use in the xhr request for a particular segment.
2540 *
2541 * @param {Object} segment - a simplified copy of the segmentInfo object
2542 * from SegmentLoader
2543 */
2544
2545var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2546 var headers = {};
2547
2548 if (segment.byterange) {
2549 headers.Range = byterangeStr(segment.byterange);
2550 }
2551
2552 return headers;
2553};
2554
2555/**
2556 * @file bin-utils.js
2557 */
2558
2559/**
2560 * convert a TimeRange to text
2561 *
2562 * @param {TimeRange} range the timerange to use for conversion
2563 * @param {number} i the iterator on the range to convert
2564 * @return {string} the range in string format
2565 */
2566
2567var textRange = function textRange(range, i) {
2568 return range.start(i) + '-' + range.end(i);
2569};
2570/**
2571 * format a number as hex string
2572 *
2573 * @param {number} e The number
2574 * @param {number} i the iterator
2575 * @return {string} the hex formatted number as a string
2576 */
2577
2578
2579var formatHexString = function formatHexString(e, i) {
2580 var value = e.toString(16);
2581 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
2582};
2583
2584var formatAsciiString = function formatAsciiString(e) {
2585 if (e >= 0x20 && e < 0x7e) {
2586 return String.fromCharCode(e);
2587 }
2588
2589 return '.';
2590};
2591/**
2592 * Creates an object for sending to a web worker modifying properties that are TypedArrays
2593 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
2594 *
2595 * @param {Object} message
2596 * Object of properties and values to send to the web worker
2597 * @return {Object}
2598 * Modified message with TypedArray values expanded
2599 * @function createTransferableMessage
2600 */
2601
2602
2603var createTransferableMessage = function createTransferableMessage(message) {
2604 var transferable = {};
2605 Object.keys(message).forEach(function (key) {
2606 var value = message[key];
2607
2608 if (byteHelpers.isArrayBufferView(value)) {
2609 transferable[key] = {
2610 bytes: value.buffer,
2611 byteOffset: value.byteOffset,
2612 byteLength: value.byteLength
2613 };
2614 } else {
2615 transferable[key] = value;
2616 }
2617 });
2618 return transferable;
2619};
2620/**
2621 * Returns a unique string identifier for a media initialization
2622 * segment.
2623 *
2624 * @param {Object} initSegment
2625 * the init segment object.
2626 *
2627 * @return {string} the generated init segment id
2628 */
2629
2630var initSegmentId = function initSegmentId(initSegment) {
2631 var byterange = initSegment.byterange || {
2632 length: Infinity,
2633 offset: 0
2634 };
2635 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
2636};
2637/**
2638 * Returns a unique string identifier for a media segment key.
2639 *
2640 * @param {Object} key the encryption key
2641 * @return {string} the unique id for the media segment key.
2642 */
2643
2644var segmentKeyId = function segmentKeyId(key) {
2645 return key.resolvedUri;
2646};
2647/**
2648 * utils to help dump binary data to the console
2649 *
2650 * @param {Array|TypedArray} data
2651 * data to dump to a string
2652 *
2653 * @return {string} the data as a hex string.
2654 */
2655
2656var hexDump = function hexDump(data) {
2657 var bytes = Array.prototype.slice.call(data);
2658 var step = 16;
2659 var result = '';
2660 var hex;
2661 var ascii;
2662
2663 for (var j = 0; j < bytes.length / step; j++) {
2664 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
2665 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
2666 result += hex + ' ' + ascii + '\n';
2667 }
2668
2669 return result;
2670};
2671var tagDump = function tagDump(_ref) {
2672 var bytes = _ref.bytes;
2673 return hexDump(bytes);
2674};
2675var textRanges = function textRanges(ranges) {
2676 var result = '';
2677 var i;
2678
2679 for (i = 0; i < ranges.length; i++) {
2680 result += textRange(ranges, i) + ' ';
2681 }
2682
2683 return result;
2684};
2685
2686var utils = /*#__PURE__*/Object.freeze({
2687 __proto__: null,
2688 createTransferableMessage: createTransferableMessage,
2689 initSegmentId: initSegmentId,
2690 segmentKeyId: segmentKeyId,
2691 hexDump: hexDump,
2692 tagDump: tagDump,
2693 textRanges: textRanges
2694});
2695
2696// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2697// 25% was arbitrarily chosen, and may need to be refined over time.
2698
2699var SEGMENT_END_FUDGE_PERCENT = 0.25;
2700/**
2701 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2702 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2703 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2704 *
2705 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2706 * point" (a point where we have a mapping from program time to player time, with player
2707 * time being the post transmux start of the segment).
2708 *
2709 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2710 *
2711 * @param {number} playerTime the player time
2712 * @param {Object} segment the segment which contains the player time
2713 * @return {Date} program time
2714 */
2715
2716var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2717 if (!segment.dateTimeObject) {
2718 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2719 // be used to map the start of a segment with a real world time).
2720 return null;
2721 }
2722
2723 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2724 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2725
2726 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2727 var offsetFromSegmentStart = playerTime - startOfSegment;
2728 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2729};
2730var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2731 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2732};
2733/**
2734 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2735 * returned segment might be an estimate or an accurate match.
2736 *
2737 * @param {string} programTime The ISO-8601 programTime to find a match for
2738 * @param {Object} playlist A playlist object to search within
2739 */
2740
2741var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2742 // Assumptions:
2743 // - verifyProgramDateTimeTags has already been run
2744 // - live streams have been started
2745 var dateTimeObject;
2746
2747 try {
2748 dateTimeObject = new Date(programTime);
2749 } catch (e) {
2750 return null;
2751 }
2752
2753 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2754 return null;
2755 }
2756
2757 var segment = playlist.segments[0];
2758
2759 if (dateTimeObject < segment.dateTimeObject) {
2760 // Requested time is before stream start.
2761 return null;
2762 }
2763
2764 for (var i = 0; i < playlist.segments.length - 1; i++) {
2765 segment = playlist.segments[i];
2766 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2767
2768 if (dateTimeObject < nextSegmentStart) {
2769 break;
2770 }
2771 }
2772
2773 var lastSegment = playlist.segments[playlist.segments.length - 1];
2774 var lastSegmentStart = lastSegment.dateTimeObject;
2775 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2776 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2777
2778 if (dateTimeObject > lastSegmentEnd) {
2779 // Beyond the end of the stream, or our best guess of the end of the stream.
2780 return null;
2781 }
2782
2783 if (dateTimeObject > lastSegmentStart) {
2784 segment = lastSegment;
2785 }
2786
2787 return {
2788 segment: segment,
2789 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2790 // Although, given that all segments have accurate date time objects, the segment
2791 // selected should be accurate, unless the video has been transmuxed at some point
2792 // (determined by the presence of the videoTimingInfo object), the segment's "player
2793 // time" (the start time in the player) can't be considered accurate.
2794 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2795 };
2796};
2797/**
2798 * Finds a segment that contains the given player time(in seconds).
2799 *
2800 * @param {number} time The player time to find a match for
2801 * @param {Object} playlist A playlist object to search within
2802 */
2803
2804var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2805 // Assumptions:
2806 // - there will always be a segment.duration
2807 // - we can start from zero
2808 // - segments are in time order
2809 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2810 return null;
2811 }
2812
2813 var segmentEnd = 0;
2814 var segment;
2815
2816 for (var i = 0; i < playlist.segments.length; i++) {
2817 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2818 // should contain the most accurate values we have for the segment's player times.
2819 //
2820 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2821 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2822 // calculate an end value.
2823
2824 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2825
2826 if (time <= segmentEnd) {
2827 break;
2828 }
2829 }
2830
2831 var lastSegment = playlist.segments[playlist.segments.length - 1];
2832
2833 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2834 // The time requested is beyond the stream end.
2835 return null;
2836 }
2837
2838 if (time > segmentEnd) {
2839 // The time is within or beyond the last segment.
2840 //
2841 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2842 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2843 // Technically, because the duration value is only an estimate, the time may still
2844 // exist in the last segment, however, there isn't enough information to make even
2845 // a reasonable estimate.
2846 return null;
2847 }
2848
2849 segment = lastSegment;
2850 }
2851
2852 return {
2853 segment: segment,
2854 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2855 // Because videoTimingInfo is only set after transmux, it is the only way to get
2856 // accurate timing values.
2857 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2858 };
2859};
2860/**
2861 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2862 * If the offset returned is positive, the programTime occurs after the
2863 * comparisonTimestamp.
2864 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2865 *
2866 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2867 * @param {string} programTime The programTime as an ISO-8601 string
2868 * @return {number} offset
2869 */
2870
2871var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2872 var segmentDateTime;
2873 var programDateTime;
2874
2875 try {
2876 segmentDateTime = new Date(comparisonTimeStamp);
2877 programDateTime = new Date(programTime);
2878 } catch (e) {// TODO handle error
2879 }
2880
2881 var segmentTimeEpoch = segmentDateTime.getTime();
2882 var programTimeEpoch = programDateTime.getTime();
2883 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2884};
2885/**
2886 * Checks that all segments in this playlist have programDateTime tags.
2887 *
2888 * @param {Object} playlist A playlist object
2889 */
2890
2891var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2892 if (!playlist.segments || playlist.segments.length === 0) {
2893 return false;
2894 }
2895
2896 for (var i = 0; i < playlist.segments.length; i++) {
2897 var segment = playlist.segments[i];
2898
2899 if (!segment.dateTimeObject) {
2900 return false;
2901 }
2902 }
2903
2904 return true;
2905};
2906/**
2907 * Returns the programTime of the media given a playlist and a playerTime.
2908 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2909 * If the segments containing the time requested have not been buffered yet, an estimate
2910 * may be returned to the callback.
2911 *
2912 * @param {Object} args
2913 * @param {Object} args.playlist A playlist object to search within
2914 * @param {number} time A playerTime in seconds
2915 * @param {Function} callback(err, programTime)
2916 * @return {string} err.message A detailed error message
2917 * @return {Object} programTime
2918 * @return {number} programTime.mediaSeconds The streamTime in seconds
2919 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2920 */
2921
2922var getProgramTime = function getProgramTime(_ref) {
2923 var playlist = _ref.playlist,
2924 _ref$time = _ref.time,
2925 time = _ref$time === void 0 ? undefined : _ref$time,
2926 callback = _ref.callback;
2927
2928 if (!callback) {
2929 throw new Error('getProgramTime: callback must be provided');
2930 }
2931
2932 if (!playlist || time === undefined) {
2933 return callback({
2934 message: 'getProgramTime: playlist and time must be provided'
2935 });
2936 }
2937
2938 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2939
2940 if (!matchedSegment) {
2941 return callback({
2942 message: 'valid programTime was not found'
2943 });
2944 }
2945
2946 if (matchedSegment.type === 'estimate') {
2947 return callback({
2948 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2949 seekTime: matchedSegment.estimatedStart
2950 });
2951 }
2952
2953 var programTimeObject = {
2954 mediaSeconds: time
2955 };
2956 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2957
2958 if (programTime) {
2959 programTimeObject.programDateTime = programTime.toISOString();
2960 }
2961
2962 return callback(null, programTimeObject);
2963};
2964/**
2965 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2966 *
2967 * @param {Object} args
2968 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2969 * @param {Object} args.playlist A playlist to look within
2970 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2971 * @param {Function} args.seekTo A method to perform a seek
2972 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2973 * @param {Object} args.tech The tech to seek on
2974 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2975 * @return {string} err.message A detailed error message
2976 * @return {number} newTime The exact time that was seeked to in seconds
2977 */
2978
2979var seekToProgramTime = function seekToProgramTime(_ref2) {
2980 var programTime = _ref2.programTime,
2981 playlist = _ref2.playlist,
2982 _ref2$retryCount = _ref2.retryCount,
2983 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2984 seekTo = _ref2.seekTo,
2985 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2986 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2987 tech = _ref2.tech,
2988 callback = _ref2.callback;
2989
2990 if (!callback) {
2991 throw new Error('seekToProgramTime: callback must be provided');
2992 }
2993
2994 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2995 return callback({
2996 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2997 });
2998 }
2999
3000 if (!playlist.endList && !tech.hasStarted_) {
3001 return callback({
3002 message: 'player must be playing a live stream to start buffering'
3003 });
3004 }
3005
3006 if (!verifyProgramDateTimeTags(playlist)) {
3007 return callback({
3008 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
3009 });
3010 }
3011
3012 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
3013
3014 if (!matchedSegment) {
3015 return callback({
3016 message: programTime + " was not found in the stream"
3017 });
3018 }
3019
3020 var segment = matchedSegment.segment;
3021 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
3022
3023 if (matchedSegment.type === 'estimate') {
3024 // we've run out of retries
3025 if (retryCount === 0) {
3026 return callback({
3027 message: programTime + " is not buffered yet. Try again"
3028 });
3029 }
3030
3031 seekTo(matchedSegment.estimatedStart + mediaOffset);
3032 tech.one('seeked', function () {
3033 seekToProgramTime({
3034 programTime: programTime,
3035 playlist: playlist,
3036 retryCount: retryCount - 1,
3037 seekTo: seekTo,
3038 pauseAfterSeek: pauseAfterSeek,
3039 tech: tech,
3040 callback: callback
3041 });
3042 });
3043 return;
3044 } // Since the segment.start value is determined from the buffered end or ending time
3045 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
3046 // modifications.
3047
3048
3049 var seekToTime = segment.start + mediaOffset;
3050
3051 var seekedCallback = function seekedCallback() {
3052 return callback(null, tech.currentTime());
3053 }; // listen for seeked event
3054
3055
3056 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
3057
3058 if (pauseAfterSeek) {
3059 tech.pause();
3060 }
3061
3062 seekTo(seekToTime);
3063};
3064
3065// which will only happen if the request is complete.
3066
3067var callbackOnCompleted = function callbackOnCompleted(request, cb) {
3068 if (request.readyState === 4) {
3069 return cb();
3070 }
3071
3072 return;
3073};
3074
3075var containerRequest = function containerRequest(uri, xhr, cb) {
3076 var bytes = [];
3077 var id3Offset;
3078 var finished = false;
3079
3080 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
3081 req.abort();
3082 finished = true;
3083 return cb(err, req, type, _bytes);
3084 };
3085
3086 var progressListener = function progressListener(error, request) {
3087 if (finished) {
3088 return;
3089 }
3090
3091 if (error) {
3092 return endRequestAndCallback(error, request, '', bytes);
3093 } // grap the new part of content that was just downloaded
3094
3095
3096 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
3097
3098 bytes = byteHelpers.concatTypedArrays(bytes, byteHelpers.stringToBytes(newPart, true));
3099 id3Offset = id3Offset || id3Helpers.getId3Offset(bytes); // we need at least 10 bytes to determine a type
3100 // or we need at least two bytes after an id3Offset
3101
3102 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
3103 return callbackOnCompleted(request, function () {
3104 return endRequestAndCallback(error, request, '', bytes);
3105 });
3106 }
3107
3108 var type = containers.detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
3109 // to see the second sync byte, wait until we have enough data
3110 // before declaring it ts
3111
3112 if (type === 'ts' && bytes.length < 188) {
3113 return callbackOnCompleted(request, function () {
3114 return endRequestAndCallback(error, request, '', bytes);
3115 });
3116 } // this may be an unsynced ts segment
3117 // wait for 376 bytes before detecting no container
3118
3119
3120 if (!type && bytes.length < 376) {
3121 return callbackOnCompleted(request, function () {
3122 return endRequestAndCallback(error, request, '', bytes);
3123 });
3124 }
3125
3126 return endRequestAndCallback(null, request, type, bytes);
3127 };
3128
3129 var options = {
3130 uri: uri,
3131 beforeSend: function beforeSend(request) {
3132 // this forces the browser to pass the bytes to us unprocessed
3133 request.overrideMimeType('text/plain; charset=x-user-defined');
3134 request.addEventListener('progress', function (_ref) {
3135 _ref.total;
3136 _ref.loaded;
3137 return callbackWrapper(request, null, {
3138 statusCode: request.status
3139 }, progressListener);
3140 });
3141 }
3142 };
3143 var request = xhr(options, function (error, response) {
3144 return callbackWrapper(request, error, response, progressListener);
3145 });
3146 return request;
3147};
3148
3149var EventTarget = videojs__default["default"].EventTarget,
3150 mergeOptions = videojs__default["default"].mergeOptions;
3151
3152var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
3153 if (!isPlaylistUnchanged(a, b)) {
3154 return false;
3155 } // for dash the above check will often return true in scenarios where
3156 // the playlist actually has changed because mediaSequence isn't a
3157 // dash thing, and we often set it to 1. So if the playlists have the same amount
3158 // of segments we return true.
3159 // So for dash we need to make sure that the underlying segments are different.
3160 // if sidx changed then the playlists are different.
3161
3162
3163 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
3164 return false;
3165 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
3166 return false;
3167 } // one or the other does not have segments
3168 // there was a change.
3169
3170
3171 if (a.segments && !b.segments || !a.segments && b.segments) {
3172 return false;
3173 } // neither has segments nothing changed
3174
3175
3176 if (!a.segments && !b.segments) {
3177 return true;
3178 } // check segments themselves
3179
3180
3181 for (var i = 0; i < a.segments.length; i++) {
3182 var aSegment = a.segments[i];
3183 var bSegment = b.segments[i]; // if uris are different between segments there was a change
3184
3185 if (aSegment.uri !== bSegment.uri) {
3186 return false;
3187 } // neither segment has a byterange, there will be no byterange change.
3188
3189
3190 if (!aSegment.byterange && !bSegment.byterange) {
3191 continue;
3192 }
3193
3194 var aByterange = aSegment.byterange;
3195 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
3196
3197 if (aByterange && !bByterange || !aByterange && bByterange) {
3198 return false;
3199 } // if both segments have byterange with different offsets, there was a change.
3200
3201
3202 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
3203 return false;
3204 }
3205 } // if everything was the same with segments, this is the same playlist.
3206
3207
3208 return true;
3209};
3210/**
3211 * Parses the master XML string and updates playlist URI references.
3212 *
3213 * @param {Object} config
3214 * Object of arguments
3215 * @param {string} config.masterXml
3216 * The mpd XML
3217 * @param {string} config.srcUrl
3218 * The mpd URL
3219 * @param {Date} config.clientOffset
3220 * A time difference between server and client
3221 * @param {Object} config.sidxMapping
3222 * SIDX mappings for moof/mdat URIs and byte ranges
3223 * @return {Object}
3224 * The parsed mpd manifest object
3225 */
3226
3227
3228var parseMasterXml = function parseMasterXml(_ref) {
3229 var masterXml = _ref.masterXml,
3230 srcUrl = _ref.srcUrl,
3231 clientOffset = _ref.clientOffset,
3232 sidxMapping = _ref.sidxMapping,
3233 previousManifest = _ref.previousManifest;
3234 var manifest = mpdParser.parse(masterXml, {
3235 manifestUri: srcUrl,
3236 clientOffset: clientOffset,
3237 sidxMapping: sidxMapping,
3238 previousManifest: previousManifest
3239 });
3240 addPropertiesToMaster(manifest, srcUrl);
3241 return manifest;
3242};
3243/**
3244 * Returns a new master manifest that is the result of merging an updated master manifest
3245 * into the original version.
3246 *
3247 * @param {Object} oldMaster
3248 * The old parsed mpd object
3249 * @param {Object} newMaster
3250 * The updated parsed mpd object
3251 * @return {Object}
3252 * A new object representing the original master manifest with the updated media
3253 * playlists merged in
3254 */
3255
3256var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
3257 var noChanges = true;
3258 var update = mergeOptions(oldMaster, {
3259 // These are top level properties that can be updated
3260 duration: newMaster.duration,
3261 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
3262 timelineStarts: newMaster.timelineStarts
3263 }); // First update the playlists in playlist list
3264
3265 for (var i = 0; i < newMaster.playlists.length; i++) {
3266 var playlist = newMaster.playlists[i];
3267
3268 if (playlist.sidx) {
3269 var sidxKey = mpdParser.generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
3270
3271 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
3272 mpdParser.addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
3273 }
3274 }
3275
3276 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
3277
3278 if (playlistUpdate) {
3279 update = playlistUpdate;
3280 noChanges = false;
3281 }
3282 } // Then update media group playlists
3283
3284
3285 forEachMediaGroup(newMaster, function (properties, type, group, label) {
3286 if (properties.playlists && properties.playlists.length) {
3287 var id = properties.playlists[0].id;
3288
3289 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
3290
3291 if (_playlistUpdate) {
3292 update = _playlistUpdate; // update the playlist reference within media groups
3293
3294 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
3295 noChanges = false;
3296 }
3297 }
3298 });
3299
3300 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3301 noChanges = false;
3302 }
3303
3304 if (noChanges) {
3305 return null;
3306 }
3307
3308 return update;
3309}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
3310// If the SIDXs have maps, the two maps should match,
3311// both `a` and `b` missing SIDXs is considered matching.
3312// If `a` or `b` but not both have a map, they aren't matching.
3313
3314var equivalentSidx = function equivalentSidx(a, b) {
3315 var neitherMap = Boolean(!a.map && !b.map);
3316 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
3317 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
3318}; // exported for testing
3319
3320
3321var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
3322 var newSidxMapping = {};
3323
3324 for (var id in playlists) {
3325 var playlist = playlists[id];
3326 var currentSidxInfo = playlist.sidx;
3327
3328 if (currentSidxInfo) {
3329 var key = mpdParser.generateSidxKey(currentSidxInfo);
3330
3331 if (!oldSidxMapping[key]) {
3332 break;
3333 }
3334
3335 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
3336
3337 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
3338 newSidxMapping[key] = oldSidxMapping[key];
3339 }
3340 }
3341 }
3342
3343 return newSidxMapping;
3344};
3345/**
3346 * A function that filters out changed items as they need to be requested separately.
3347 *
3348 * The method is exported for testing
3349 *
3350 * @param {Object} master the parsed mpd XML returned via mpd-parser
3351 * @param {Object} oldSidxMapping the SIDX to compare against
3352 */
3353
3354var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
3355 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
3356 var mediaGroupSidx = videoSidx;
3357 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3358 if (properties.playlists && properties.playlists.length) {
3359 var playlists = properties.playlists;
3360 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
3361 }
3362 });
3363 return mediaGroupSidx;
3364};
3365
3366var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
3367 _inheritsLoose__default["default"](DashPlaylistLoader, _EventTarget);
3368
3369 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
3370 // playlist loader setups from media groups will expect to be able to pass a playlist
3371 // (since there aren't external URLs to media playlists with DASH)
3372 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
3373 var _this;
3374
3375 if (options === void 0) {
3376 options = {};
3377 }
3378
3379 _this = _EventTarget.call(this) || this;
3380 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized__default["default"](_this);
3381
3382 if (!masterPlaylistLoader) {
3383 _this.isMaster_ = true;
3384 }
3385
3386 var _options = options,
3387 _options$withCredenti = _options.withCredentials,
3388 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
3389 _options$handleManife = _options.handleManifestRedirects,
3390 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
3391 _this.vhs_ = vhs;
3392 _this.withCredentials = withCredentials;
3393 _this.handleManifestRedirects = handleManifestRedirects;
3394
3395 if (!srcUrlOrPlaylist) {
3396 throw new Error('A non-empty playlist URL or object is required');
3397 } // event naming?
3398
3399
3400 _this.on('minimumUpdatePeriod', function () {
3401 _this.refreshXml_();
3402 }); // live playlist staleness timeout
3403
3404
3405 _this.on('mediaupdatetimeout', function () {
3406 _this.refreshMedia_(_this.media().id);
3407 });
3408
3409 _this.state = 'HAVE_NOTHING';
3410 _this.loadedPlaylists_ = {};
3411 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
3412 // The masterPlaylistLoader will be created with a string
3413
3414 if (_this.isMaster_) {
3415 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
3416 // once multi-period is refactored
3417
3418 _this.masterPlaylistLoader_.sidxMapping_ = {};
3419 } else {
3420 _this.childPlaylist_ = srcUrlOrPlaylist;
3421 }
3422
3423 return _this;
3424 }
3425
3426 var _proto = DashPlaylistLoader.prototype;
3427
3428 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
3429 // disposed
3430 if (!this.request) {
3431 return true;
3432 } // pending request is cleared
3433
3434
3435 this.request = null;
3436
3437 if (err) {
3438 // use the provided error object or create one
3439 // based on the request/response
3440 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
3441 status: request.status,
3442 message: 'DASH request error at URL: ' + request.uri,
3443 response: request.response,
3444 // MEDIA_ERR_NETWORK
3445 code: 2
3446 };
3447
3448 if (startingState) {
3449 this.state = startingState;
3450 }
3451
3452 this.trigger('error');
3453 return true;
3454 }
3455 }
3456 /**
3457 * Verify that the container of the sidx segment can be parsed
3458 * and if it can, get and parse that segment.
3459 */
3460 ;
3461
3462 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
3463 var _this2 = this;
3464
3465 var sidxKey = playlist.sidx && mpdParser.generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
3466
3467 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
3468 // keep this function async
3469 this.mediaRequest_ = window__default["default"].setTimeout(function () {
3470 return cb(false);
3471 }, 0);
3472 return;
3473 } // resolve the segment URL relative to the playlist
3474
3475
3476 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
3477
3478 var fin = function fin(err, request) {
3479 if (_this2.requestErrored_(err, request, startingState)) {
3480 return;
3481 }
3482
3483 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
3484 var sidx;
3485
3486 try {
3487 sidx = parseSidx__default["default"](byteHelpers.toUint8(request.response).subarray(8));
3488 } catch (e) {
3489 // sidx parsing failed.
3490 _this2.requestErrored_(e, request, startingState);
3491
3492 return;
3493 }
3494
3495 sidxMapping[sidxKey] = {
3496 sidxInfo: playlist.sidx,
3497 sidx: sidx
3498 };
3499 mpdParser.addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
3500 return cb(true);
3501 };
3502
3503 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
3504 if (err) {
3505 return fin(err, request);
3506 }
3507
3508 if (!container || container !== 'mp4') {
3509 return fin({
3510 status: request.status,
3511 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
3512 // response is just bytes in this case
3513 // but we really don't want to return that.
3514 response: '',
3515 playlist: playlist,
3516 internal: true,
3517 blacklistDuration: Infinity,
3518 // MEDIA_ERR_NETWORK
3519 code: 2
3520 }, request);
3521 } // if we already downloaded the sidx bytes in the container request, use them
3522
3523
3524 var _playlist$sidx$bytera = playlist.sidx.byterange,
3525 offset = _playlist$sidx$bytera.offset,
3526 length = _playlist$sidx$bytera.length;
3527
3528 if (bytes.length >= length + offset) {
3529 return fin(err, {
3530 response: bytes.subarray(offset, offset + length),
3531 status: request.status,
3532 uri: request.uri
3533 });
3534 } // otherwise request sidx bytes
3535
3536
3537 _this2.request = _this2.vhs_.xhr({
3538 uri: uri,
3539 responseType: 'arraybuffer',
3540 headers: segmentXhrHeaders({
3541 byterange: playlist.sidx.byterange
3542 })
3543 }, fin);
3544 });
3545 };
3546
3547 _proto.dispose = function dispose() {
3548 this.trigger('dispose');
3549 this.stopRequest();
3550 this.loadedPlaylists_ = {};
3551 window__default["default"].clearTimeout(this.minimumUpdatePeriodTimeout_);
3552 window__default["default"].clearTimeout(this.mediaRequest_);
3553 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
3554 this.mediaUpdateTimeout = null;
3555 this.mediaRequest_ = null;
3556 this.minimumUpdatePeriodTimeout_ = null;
3557
3558 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3559 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3560 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3561 }
3562
3563 this.off();
3564 };
3565
3566 _proto.hasPendingRequest = function hasPendingRequest() {
3567 return this.request || this.mediaRequest_;
3568 };
3569
3570 _proto.stopRequest = function stopRequest() {
3571 if (this.request) {
3572 var oldRequest = this.request;
3573 this.request = null;
3574 oldRequest.onreadystatechange = null;
3575 oldRequest.abort();
3576 }
3577 };
3578
3579 _proto.media = function media(playlist) {
3580 var _this3 = this;
3581
3582 // getter
3583 if (!playlist) {
3584 return this.media_;
3585 } // setter
3586
3587
3588 if (this.state === 'HAVE_NOTHING') {
3589 throw new Error('Cannot switch media playlist from ' + this.state);
3590 }
3591
3592 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
3593
3594 if (typeof playlist === 'string') {
3595 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
3596 throw new Error('Unknown playlist URI: ' + playlist);
3597 }
3598
3599 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
3600 }
3601
3602 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
3603
3604 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
3605 this.state = 'HAVE_METADATA';
3606 this.media_ = playlist; // trigger media change if the active media has been updated
3607
3608 if (mediaChange) {
3609 this.trigger('mediachanging');
3610 this.trigger('mediachange');
3611 }
3612
3613 return;
3614 } // switching to the active playlist is a no-op
3615
3616
3617 if (!mediaChange) {
3618 return;
3619 } // switching from an already loaded playlist
3620
3621
3622 if (this.media_) {
3623 this.trigger('mediachanging');
3624 }
3625
3626 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
3627 // everything is ready just continue to haveMetadata
3628 _this3.haveMetadata({
3629 startingState: startingState,
3630 playlist: playlist
3631 });
3632 });
3633 };
3634
3635 _proto.haveMetadata = function haveMetadata(_ref2) {
3636 var startingState = _ref2.startingState,
3637 playlist = _ref2.playlist;
3638 this.state = 'HAVE_METADATA';
3639 this.loadedPlaylists_[playlist.id] = playlist;
3640 this.mediaRequest_ = null; // This will trigger loadedplaylist
3641
3642 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
3643 // to resolve setup of media groups
3644
3645 if (startingState === 'HAVE_MASTER') {
3646 this.trigger('loadedmetadata');
3647 } else {
3648 // trigger media change if the active media has been updated
3649 this.trigger('mediachange');
3650 }
3651 };
3652
3653 _proto.pause = function pause() {
3654 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3655 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3656 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3657 }
3658
3659 this.stopRequest();
3660 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
3661 this.mediaUpdateTimeout = null;
3662
3663 if (this.isMaster_) {
3664 window__default["default"].clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
3665 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
3666 }
3667
3668 if (this.state === 'HAVE_NOTHING') {
3669 // If we pause the loader before any data has been retrieved, its as if we never
3670 // started, so reset to an unstarted state.
3671 this.started = false;
3672 }
3673 };
3674
3675 _proto.load = function load(isFinalRendition) {
3676 var _this4 = this;
3677
3678 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
3679 this.mediaUpdateTimeout = null;
3680 var media = this.media();
3681
3682 if (isFinalRendition) {
3683 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3684 this.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
3685 return _this4.load();
3686 }, delay);
3687 return;
3688 } // because the playlists are internal to the manifest, load should either load the
3689 // main manifest, or do nothing but trigger an event
3690
3691
3692 if (!this.started) {
3693 this.start();
3694 return;
3695 }
3696
3697 if (media && !media.endList) {
3698 // Check to see if this is the master loader and the MUP was cleared (this happens
3699 // when the loader was paused). `media` should be set at this point since one is always
3700 // set during `start()`.
3701 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
3702 // Trigger minimumUpdatePeriod to refresh the master manifest
3703 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
3704
3705 this.updateMinimumUpdatePeriodTimeout_();
3706 }
3707
3708 this.trigger('mediaupdatetimeout');
3709 } else {
3710 this.trigger('loadedplaylist');
3711 }
3712 };
3713
3714 _proto.start = function start() {
3715 var _this5 = this;
3716
3717 this.started = true; // We don't need to request the master manifest again
3718 // Call this asynchronously to match the xhr request behavior below
3719
3720 if (!this.isMaster_) {
3721 this.mediaRequest_ = window__default["default"].setTimeout(function () {
3722 return _this5.haveMaster_();
3723 }, 0);
3724 return;
3725 }
3726
3727 this.requestMaster_(function (req, masterChanged) {
3728 _this5.haveMaster_();
3729
3730 if (!_this5.hasPendingRequest() && !_this5.media_) {
3731 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3732 }
3733 });
3734 };
3735
3736 _proto.requestMaster_ = function requestMaster_(cb) {
3737 var _this6 = this;
3738
3739 this.request = this.vhs_.xhr({
3740 uri: this.masterPlaylistLoader_.srcUrl,
3741 withCredentials: this.withCredentials
3742 }, function (error, req) {
3743 if (_this6.requestErrored_(error, req)) {
3744 if (_this6.state === 'HAVE_NOTHING') {
3745 _this6.started = false;
3746 }
3747
3748 return;
3749 }
3750
3751 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3752 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3753
3754 if (req.responseHeaders && req.responseHeaders.date) {
3755 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3756 } else {
3757 _this6.masterLoaded_ = Date.now();
3758 }
3759
3760 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3761
3762 if (masterChanged) {
3763 _this6.handleMaster_();
3764
3765 _this6.syncClientServerClock_(function () {
3766 return cb(req, masterChanged);
3767 });
3768
3769 return;
3770 }
3771
3772 return cb(req, masterChanged);
3773 });
3774 }
3775 /**
3776 * Parses the master xml for UTCTiming node to sync the client clock to the server
3777 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3778 *
3779 * @param {Function} done
3780 * Function to call when clock sync has completed
3781 */
3782 ;
3783
3784 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3785 var _this7 = this;
3786
3787 var utcTiming = mpdParser.parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3788 // server clock
3789
3790 if (utcTiming === null) {
3791 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3792 return done();
3793 }
3794
3795 if (utcTiming.method === 'DIRECT') {
3796 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3797 return done();
3798 }
3799
3800 this.request = this.vhs_.xhr({
3801 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3802 method: utcTiming.method,
3803 withCredentials: this.withCredentials
3804 }, function (error, req) {
3805 // disposed
3806 if (!_this7.request) {
3807 return;
3808 }
3809
3810 if (error) {
3811 // sync request failed, fall back to using date header from mpd
3812 // TODO: log warning
3813 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3814 return done();
3815 }
3816
3817 var serverTime;
3818
3819 if (utcTiming.method === 'HEAD') {
3820 if (!req.responseHeaders || !req.responseHeaders.date) {
3821 // expected date header not preset, fall back to using date header from mpd
3822 // TODO: log warning
3823 serverTime = _this7.masterLoaded_;
3824 } else {
3825 serverTime = Date.parse(req.responseHeaders.date);
3826 }
3827 } else {
3828 serverTime = Date.parse(req.responseText);
3829 }
3830
3831 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3832 done();
3833 });
3834 };
3835
3836 _proto.haveMaster_ = function haveMaster_() {
3837 this.state = 'HAVE_MASTER';
3838
3839 if (this.isMaster_) {
3840 // We have the master playlist at this point, so
3841 // trigger this to allow MasterPlaylistController
3842 // to make an initial playlist selection
3843 this.trigger('loadedplaylist');
3844 } else if (!this.media_) {
3845 // no media playlist was specifically selected so select
3846 // the one the child playlist loader was created with
3847 this.media(this.childPlaylist_);
3848 }
3849 };
3850
3851 _proto.handleMaster_ = function handleMaster_() {
3852 // clear media request
3853 this.mediaRequest_ = null;
3854 var oldMaster = this.masterPlaylistLoader_.master;
3855 var newMaster = parseMasterXml({
3856 masterXml: this.masterPlaylistLoader_.masterXml_,
3857 srcUrl: this.masterPlaylistLoader_.srcUrl,
3858 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3859 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
3860 previousManifest: oldMaster
3861 }); // if we have an old master to compare the new master against
3862
3863 if (oldMaster) {
3864 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3865 } // only update master if we have a new master
3866
3867
3868 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3869 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3870
3871 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3872 this.masterPlaylistLoader_.srcUrl = location;
3873 }
3874
3875 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3876 this.updateMinimumUpdatePeriodTimeout_();
3877 }
3878
3879 return Boolean(newMaster);
3880 };
3881
3882 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3883 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
3884 // a new one will be added if needed.
3885
3886 if (mpl.createMupOnMedia_) {
3887 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
3888 mpl.createMupOnMedia_ = null;
3889 } // clear any pending timeouts
3890
3891
3892 if (mpl.minimumUpdatePeriodTimeout_) {
3893 window__default["default"].clearTimeout(mpl.minimumUpdatePeriodTimeout_);
3894 mpl.minimumUpdatePeriodTimeout_ = null;
3895 }
3896
3897 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3898 // MPD has no future validity, so a new one will need to be acquired when new
3899 // media segments are to be made available. Thus, we use the target duration
3900 // in this case
3901
3902 if (mup === 0) {
3903 if (mpl.media()) {
3904 mup = mpl.media().targetDuration * 1000;
3905 } else {
3906 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
3907 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
3908 }
3909 } // if minimumUpdatePeriod is invalid or <= zero, which
3910 // can happen when a live video becomes VOD. skip timeout
3911 // creation.
3912
3913
3914 if (typeof mup !== 'number' || mup <= 0) {
3915 if (mup < 0) {
3916 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
3917 }
3918
3919 return;
3920 }
3921
3922 this.createMUPTimeout_(mup);
3923 };
3924
3925 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
3926 var mpl = this.masterPlaylistLoader_;
3927 mpl.minimumUpdatePeriodTimeout_ = window__default["default"].setTimeout(function () {
3928 mpl.minimumUpdatePeriodTimeout_ = null;
3929 mpl.trigger('minimumUpdatePeriod');
3930 mpl.createMUPTimeout_(mup);
3931 }, mup);
3932 }
3933 /**
3934 * Sends request to refresh the master xml and updates the parsed master manifest
3935 */
3936 ;
3937
3938 _proto.refreshXml_ = function refreshXml_() {
3939 var _this8 = this;
3940
3941 this.requestMaster_(function (req, masterChanged) {
3942 if (!masterChanged) {
3943 return;
3944 }
3945
3946 if (_this8.media_) {
3947 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
3948 } // This will filter out updated sidx info from the mapping
3949
3950
3951 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
3952
3953 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
3954 // TODO: do we need to reload the current playlist?
3955 _this8.refreshMedia_(_this8.media().id);
3956 });
3957 });
3958 }
3959 /**
3960 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3961 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3962 * from the master loader.
3963 */
3964 ;
3965
3966 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3967 var _this9 = this;
3968
3969 if (!mediaID) {
3970 throw new Error('refreshMedia_ must take a media id');
3971 } // for master we have to reparse the master xml
3972 // to re-create segments based on current timing values
3973 // which may change media. We only skip updating master
3974 // if this is the first time this.media_ is being set.
3975 // as master was just parsed in that case.
3976
3977
3978 if (this.media_ && this.isMaster_) {
3979 this.handleMaster_();
3980 }
3981
3982 var playlists = this.masterPlaylistLoader_.master.playlists;
3983 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3984
3985 if (mediaChanged) {
3986 this.media_ = playlists[mediaID];
3987 } else {
3988 this.trigger('playlistunchanged');
3989 }
3990
3991 if (!this.mediaUpdateTimeout) {
3992 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
3993 if (_this9.media().endList) {
3994 return;
3995 }
3996
3997 _this9.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
3998 _this9.trigger('mediaupdatetimeout');
3999
4000 createMediaUpdateTimeout();
4001 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
4002 };
4003
4004 createMediaUpdateTimeout();
4005 }
4006
4007 this.trigger('loadedplaylist');
4008 };
4009
4010 return DashPlaylistLoader;
4011}(EventTarget);
4012
4013var Config = {
4014 GOAL_BUFFER_LENGTH: 30,
4015 MAX_GOAL_BUFFER_LENGTH: 60,
4016 BACK_BUFFER_LENGTH: 30,
4017 GOAL_BUFFER_LENGTH_RATE: 1,
4018 // 0.5 MB/s
4019 INITIAL_BANDWIDTH: 4194304,
4020 // A fudge factor to apply to advertised playlist bitrates to account for
4021 // temporary flucations in client bandwidth
4022 BANDWIDTH_VARIANCE: 1.2,
4023 // How much of the buffer must be filled before we consider upswitching
4024 BUFFER_LOW_WATER_LINE: 0,
4025 MAX_BUFFER_LOW_WATER_LINE: 30,
4026 // TODO: Remove this when experimentalBufferBasedABR is removed
4027 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
4028 BUFFER_LOW_WATER_LINE_RATE: 1,
4029 // If the buffer is greater than the high water line, we won't switch down
4030 BUFFER_HIGH_WATER_LINE: 30
4031};
4032
4033var stringToArrayBuffer = function stringToArrayBuffer(string) {
4034 var view = new Uint8Array(new ArrayBuffer(string.length));
4035
4036 for (var i = 0; i < string.length; i++) {
4037 view[i] = string.charCodeAt(i);
4038 }
4039
4040 return view.buffer;
4041};
4042
4043/* global Blob, BlobBuilder, Worker */
4044// unify worker interface
4045var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
4046 // node only supports on/off
4047 workerObj.on = workerObj.addEventListener;
4048 workerObj.off = workerObj.removeEventListener;
4049 return workerObj;
4050};
4051
4052var createObjectURL = function createObjectURL(str) {
4053 try {
4054 return URL.createObjectURL(new Blob([str], {
4055 type: 'application/javascript'
4056 }));
4057 } catch (e) {
4058 var blob = new BlobBuilder();
4059 blob.append(str);
4060 return URL.createObjectURL(blob.getBlob());
4061 }
4062};
4063
4064var factory = function factory(code) {
4065 return function () {
4066 var objectUrl = createObjectURL(code);
4067 var worker = browserWorkerPolyFill(new Worker(objectUrl));
4068 worker.objURL = objectUrl;
4069 var terminate = worker.terminate;
4070 worker.on = worker.addEventListener;
4071 worker.off = worker.removeEventListener;
4072
4073 worker.terminate = function () {
4074 URL.revokeObjectURL(objectUrl);
4075 return terminate.call(this);
4076 };
4077
4078 return worker;
4079 };
4080};
4081var transform = function transform(code) {
4082 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
4083};
4084
4085var getWorkerString = function getWorkerString(fn) {
4086 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
4087};
4088
4089/* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
4090var workerCode$1 = transform(getWorkerString(function () {
4091 /**
4092 * mux.js
4093 *
4094 * Copyright (c) Brightcove
4095 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4096 *
4097 * A lightweight readable stream implemention that handles event dispatching.
4098 * Objects that inherit from streams should call init in their constructors.
4099 */
4100
4101 var Stream = function Stream() {
4102 this.init = function () {
4103 var listeners = {};
4104 /**
4105 * Add a listener for a specified event type.
4106 * @param type {string} the event name
4107 * @param listener {function} the callback to be invoked when an event of
4108 * the specified type occurs
4109 */
4110
4111 this.on = function (type, listener) {
4112 if (!listeners[type]) {
4113 listeners[type] = [];
4114 }
4115
4116 listeners[type] = listeners[type].concat(listener);
4117 };
4118 /**
4119 * Remove a listener for a specified event type.
4120 * @param type {string} the event name
4121 * @param listener {function} a function previously registered for this
4122 * type of event through `on`
4123 */
4124
4125
4126 this.off = function (type, listener) {
4127 var index;
4128
4129 if (!listeners[type]) {
4130 return false;
4131 }
4132
4133 index = listeners[type].indexOf(listener);
4134 listeners[type] = listeners[type].slice();
4135 listeners[type].splice(index, 1);
4136 return index > -1;
4137 };
4138 /**
4139 * Trigger an event of the specified type on this stream. Any additional
4140 * arguments to this function are passed as parameters to event listeners.
4141 * @param type {string} the event name
4142 */
4143
4144
4145 this.trigger = function (type) {
4146 var callbacks, i, length, args;
4147 callbacks = listeners[type];
4148
4149 if (!callbacks) {
4150 return;
4151 } // Slicing the arguments on every invocation of this method
4152 // can add a significant amount of overhead. Avoid the
4153 // intermediate object creation for the common case of a
4154 // single callback argument
4155
4156
4157 if (arguments.length === 2) {
4158 length = callbacks.length;
4159
4160 for (i = 0; i < length; ++i) {
4161 callbacks[i].call(this, arguments[1]);
4162 }
4163 } else {
4164 args = [];
4165 i = arguments.length;
4166
4167 for (i = 1; i < arguments.length; ++i) {
4168 args.push(arguments[i]);
4169 }
4170
4171 length = callbacks.length;
4172
4173 for (i = 0; i < length; ++i) {
4174 callbacks[i].apply(this, args);
4175 }
4176 }
4177 };
4178 /**
4179 * Destroys the stream and cleans up.
4180 */
4181
4182
4183 this.dispose = function () {
4184 listeners = {};
4185 };
4186 };
4187 };
4188 /**
4189 * Forwards all `data` events on this stream to the destination stream. The
4190 * destination stream should provide a method `push` to receive the data
4191 * events as they arrive.
4192 * @param destination {stream} the stream that will receive all `data` events
4193 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4194 * when the current stream emits a 'done' event
4195 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4196 */
4197
4198
4199 Stream.prototype.pipe = function (destination) {
4200 this.on('data', function (data) {
4201 destination.push(data);
4202 });
4203 this.on('done', function (flushSource) {
4204 destination.flush(flushSource);
4205 });
4206 this.on('partialdone', function (flushSource) {
4207 destination.partialFlush(flushSource);
4208 });
4209 this.on('endedtimeline', function (flushSource) {
4210 destination.endTimeline(flushSource);
4211 });
4212 this.on('reset', function (flushSource) {
4213 destination.reset(flushSource);
4214 });
4215 return destination;
4216 }; // Default stream functions that are expected to be overridden to perform
4217 // actual work. These are provided by the prototype as a sort of no-op
4218 // implementation so that we don't have to check for their existence in the
4219 // `pipe` function above.
4220
4221
4222 Stream.prototype.push = function (data) {
4223 this.trigger('data', data);
4224 };
4225
4226 Stream.prototype.flush = function (flushSource) {
4227 this.trigger('done', flushSource);
4228 };
4229
4230 Stream.prototype.partialFlush = function (flushSource) {
4231 this.trigger('partialdone', flushSource);
4232 };
4233
4234 Stream.prototype.endTimeline = function (flushSource) {
4235 this.trigger('endedtimeline', flushSource);
4236 };
4237
4238 Stream.prototype.reset = function (flushSource) {
4239 this.trigger('reset', flushSource);
4240 };
4241
4242 var stream = Stream;
4243 var MAX_UINT32$1 = Math.pow(2, 32);
4244
4245 var getUint64$2 = function getUint64(uint8) {
4246 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
4247 var value;
4248
4249 if (dv.getBigUint64) {
4250 value = dv.getBigUint64(0);
4251
4252 if (value < Number.MAX_SAFE_INTEGER) {
4253 return Number(value);
4254 }
4255
4256 return value;
4257 }
4258
4259 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
4260 };
4261
4262 var numbers = {
4263 getUint64: getUint64$2,
4264 MAX_UINT32: MAX_UINT32$1
4265 };
4266 var MAX_UINT32 = numbers.MAX_UINT32;
4267 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
4268
4269 (function () {
4270 var i;
4271 types = {
4272 avc1: [],
4273 // codingname
4274 avcC: [],
4275 btrt: [],
4276 dinf: [],
4277 dref: [],
4278 esds: [],
4279 ftyp: [],
4280 hdlr: [],
4281 mdat: [],
4282 mdhd: [],
4283 mdia: [],
4284 mfhd: [],
4285 minf: [],
4286 moof: [],
4287 moov: [],
4288 mp4a: [],
4289 // codingname
4290 mvex: [],
4291 mvhd: [],
4292 pasp: [],
4293 sdtp: [],
4294 smhd: [],
4295 stbl: [],
4296 stco: [],
4297 stsc: [],
4298 stsd: [],
4299 stsz: [],
4300 stts: [],
4301 styp: [],
4302 tfdt: [],
4303 tfhd: [],
4304 traf: [],
4305 trak: [],
4306 trun: [],
4307 trex: [],
4308 tkhd: [],
4309 vmhd: []
4310 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4311 // don't throw an error
4312
4313 if (typeof Uint8Array === 'undefined') {
4314 return;
4315 }
4316
4317 for (i in types) {
4318 if (types.hasOwnProperty(i)) {
4319 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4320 }
4321 }
4322
4323 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4324 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4325 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4326 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4327 0x00, 0x00, 0x00, // flags
4328 0x00, 0x00, 0x00, 0x00, // pre_defined
4329 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4330 0x00, 0x00, 0x00, 0x00, // reserved
4331 0x00, 0x00, 0x00, 0x00, // reserved
4332 0x00, 0x00, 0x00, 0x00, // reserved
4333 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4334 ]);
4335 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4336 0x00, 0x00, 0x00, // flags
4337 0x00, 0x00, 0x00, 0x00, // pre_defined
4338 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4339 0x00, 0x00, 0x00, 0x00, // reserved
4340 0x00, 0x00, 0x00, 0x00, // reserved
4341 0x00, 0x00, 0x00, 0x00, // reserved
4342 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4343 ]);
4344 HDLR_TYPES = {
4345 video: VIDEO_HDLR,
4346 audio: AUDIO_HDLR
4347 };
4348 DREF = new Uint8Array([0x00, // version 0
4349 0x00, 0x00, 0x00, // flags
4350 0x00, 0x00, 0x00, 0x01, // entry_count
4351 0x00, 0x00, 0x00, 0x0c, // entry_size
4352 0x75, 0x72, 0x6c, 0x20, // 'url' type
4353 0x00, // version 0
4354 0x00, 0x00, 0x01 // entry_flags
4355 ]);
4356 SMHD = new Uint8Array([0x00, // version
4357 0x00, 0x00, 0x00, // flags
4358 0x00, 0x00, // balance, 0 means centered
4359 0x00, 0x00 // reserved
4360 ]);
4361 STCO = new Uint8Array([0x00, // version
4362 0x00, 0x00, 0x00, // flags
4363 0x00, 0x00, 0x00, 0x00 // entry_count
4364 ]);
4365 STSC = STCO;
4366 STSZ = new Uint8Array([0x00, // version
4367 0x00, 0x00, 0x00, // flags
4368 0x00, 0x00, 0x00, 0x00, // sample_size
4369 0x00, 0x00, 0x00, 0x00 // sample_count
4370 ]);
4371 STTS = STCO;
4372 VMHD = new Uint8Array([0x00, // version
4373 0x00, 0x00, 0x01, // flags
4374 0x00, 0x00, // graphicsmode
4375 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4376 ]);
4377 })();
4378
4379 box = function box(type) {
4380 var payload = [],
4381 size = 0,
4382 i,
4383 result,
4384 view;
4385
4386 for (i = 1; i < arguments.length; i++) {
4387 payload.push(arguments[i]);
4388 }
4389
4390 i = payload.length; // calculate the total size we need to allocate
4391
4392 while (i--) {
4393 size += payload[i].byteLength;
4394 }
4395
4396 result = new Uint8Array(size + 8);
4397 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4398 view.setUint32(0, result.byteLength);
4399 result.set(type, 4); // copy the payload into the result
4400
4401 for (i = 0, size = 8; i < payload.length; i++) {
4402 result.set(payload[i], size);
4403 size += payload[i].byteLength;
4404 }
4405
4406 return result;
4407 };
4408
4409 dinf = function dinf() {
4410 return box(types.dinf, box(types.dref, DREF));
4411 };
4412
4413 esds = function esds(track) {
4414 return box(types.esds, new Uint8Array([0x00, // version
4415 0x00, 0x00, 0x00, // flags
4416 // ES_Descriptor
4417 0x03, // tag, ES_DescrTag
4418 0x19, // length
4419 0x00, 0x00, // ES_ID
4420 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4421 // DecoderConfigDescriptor
4422 0x04, // tag, DecoderConfigDescrTag
4423 0x11, // length
4424 0x40, // object type
4425 0x15, // streamType
4426 0x00, 0x06, 0x00, // bufferSizeDB
4427 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4428 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4429 // DecoderSpecificInfo
4430 0x05, // tag, DecoderSpecificInfoTag
4431 0x02, // length
4432 // ISO/IEC 14496-3, AudioSpecificConfig
4433 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4434 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4435 ]));
4436 };
4437
4438 ftyp = function ftyp() {
4439 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4440 };
4441
4442 hdlr = function hdlr(type) {
4443 return box(types.hdlr, HDLR_TYPES[type]);
4444 };
4445
4446 mdat = function mdat(data) {
4447 return box(types.mdat, data);
4448 };
4449
4450 mdhd = function mdhd(track) {
4451 var result = new Uint8Array([0x00, // version 0
4452 0x00, 0x00, 0x00, // flags
4453 0x00, 0x00, 0x00, 0x02, // creation_time
4454 0x00, 0x00, 0x00, 0x03, // modification_time
4455 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4456 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4457 0x55, 0xc4, // 'und' language (undetermined)
4458 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
4459 // defined. The sample rate can be parsed out of an ADTS header, for
4460 // instance.
4461
4462 if (track.samplerate) {
4463 result[12] = track.samplerate >>> 24 & 0xFF;
4464 result[13] = track.samplerate >>> 16 & 0xFF;
4465 result[14] = track.samplerate >>> 8 & 0xFF;
4466 result[15] = track.samplerate & 0xFF;
4467 }
4468
4469 return box(types.mdhd, result);
4470 };
4471
4472 mdia = function mdia(track) {
4473 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4474 };
4475
4476 mfhd = function mfhd(sequenceNumber) {
4477 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4478 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4479 ]));
4480 };
4481
4482 minf = function minf(track) {
4483 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4484 };
4485
4486 moof = function moof(sequenceNumber, tracks) {
4487 var trackFragments = [],
4488 i = tracks.length; // build traf boxes for each track fragment
4489
4490 while (i--) {
4491 trackFragments[i] = traf(tracks[i]);
4492 }
4493
4494 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4495 };
4496 /**
4497 * Returns a movie box.
4498 * @param tracks {array} the tracks associated with this movie
4499 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4500 */
4501
4502
4503 moov = function moov(tracks) {
4504 var i = tracks.length,
4505 boxes = [];
4506
4507 while (i--) {
4508 boxes[i] = trak(tracks[i]);
4509 }
4510
4511 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4512 };
4513
4514 mvex = function mvex(tracks) {
4515 var i = tracks.length,
4516 boxes = [];
4517
4518 while (i--) {
4519 boxes[i] = trex(tracks[i]);
4520 }
4521
4522 return box.apply(null, [types.mvex].concat(boxes));
4523 };
4524
4525 mvhd = function mvhd(duration) {
4526 var bytes = new Uint8Array([0x00, // version 0
4527 0x00, 0x00, 0x00, // flags
4528 0x00, 0x00, 0x00, 0x01, // creation_time
4529 0x00, 0x00, 0x00, 0x02, // modification_time
4530 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4531 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
4532 0x00, 0x01, 0x00, 0x00, // 1.0 rate
4533 0x01, 0x00, // 1.0 volume
4534 0x00, 0x00, // reserved
4535 0x00, 0x00, 0x00, 0x00, // reserved
4536 0x00, 0x00, 0x00, 0x00, // reserved
4537 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4538 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4539 0xff, 0xff, 0xff, 0xff // next_track_ID
4540 ]);
4541 return box(types.mvhd, bytes);
4542 };
4543
4544 sdtp = function sdtp(track) {
4545 var samples = track.samples || [],
4546 bytes = new Uint8Array(4 + samples.length),
4547 flags,
4548 i; // leave the full box header (4 bytes) all zero
4549 // write the sample table
4550
4551 for (i = 0; i < samples.length; i++) {
4552 flags = samples[i].flags;
4553 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
4554 }
4555
4556 return box(types.sdtp, bytes);
4557 };
4558
4559 stbl = function stbl(track) {
4560 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
4561 };
4562
4563 (function () {
4564 var videoSample, audioSample;
4565
4566 stsd = function stsd(track) {
4567 return box(types.stsd, new Uint8Array([0x00, // version 0
4568 0x00, 0x00, 0x00, // flags
4569 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
4570 };
4571
4572 videoSample = function videoSample(track) {
4573 var sps = track.sps || [],
4574 pps = track.pps || [],
4575 sequenceParameterSets = [],
4576 pictureParameterSets = [],
4577 i,
4578 avc1Box; // assemble the SPSs
4579
4580 for (i = 0; i < sps.length; i++) {
4581 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
4582 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
4583
4584 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
4585 } // assemble the PPSs
4586
4587
4588 for (i = 0; i < pps.length; i++) {
4589 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
4590 pictureParameterSets.push(pps[i].byteLength & 0xFF);
4591 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
4592 }
4593
4594 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4595 0x00, 0x01, // data_reference_index
4596 0x00, 0x00, // pre_defined
4597 0x00, 0x00, // reserved
4598 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4599 (track.width & 0xff00) >> 8, track.width & 0xff, // width
4600 (track.height & 0xff00) >> 8, track.height & 0xff, // height
4601 0x00, 0x48, 0x00, 0x00, // horizresolution
4602 0x00, 0x48, 0x00, 0x00, // vertresolution
4603 0x00, 0x00, 0x00, 0x00, // reserved
4604 0x00, 0x01, // frame_count
4605 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
4606 0x00, 0x18, // depth = 24
4607 0x11, 0x11 // pre_defined = -1
4608 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
4609 track.profileIdc, // AVCProfileIndication
4610 track.profileCompatibility, // profile_compatibility
4611 track.levelIdc, // AVCLevelIndication
4612 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
4613 ].concat([sps.length], // numOfSequenceParameterSets
4614 sequenceParameterSets, // "SPS"
4615 [pps.length], // numOfPictureParameterSets
4616 pictureParameterSets // "PPS"
4617 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
4618 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
4619 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
4620 ]))];
4621
4622 if (track.sarRatio) {
4623 var hSpacing = track.sarRatio[0],
4624 vSpacing = track.sarRatio[1];
4625 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
4626 }
4627
4628 return box.apply(null, avc1Box);
4629 };
4630
4631 audioSample = function audioSample(track) {
4632 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
4633 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4634 0x00, 0x01, // data_reference_index
4635 // AudioSampleEntry, ISO/IEC 14496-12
4636 0x00, 0x00, 0x00, 0x00, // reserved
4637 0x00, 0x00, 0x00, 0x00, // reserved
4638 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
4639 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
4640 0x00, 0x00, // pre_defined
4641 0x00, 0x00, // reserved
4642 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
4643 // MP4AudioSampleEntry, ISO/IEC 14496-14
4644 ]), esds(track));
4645 };
4646 })();
4647
4648 tkhd = function tkhd(track) {
4649 var result = new Uint8Array([0x00, // version 0
4650 0x00, 0x00, 0x07, // flags
4651 0x00, 0x00, 0x00, 0x00, // creation_time
4652 0x00, 0x00, 0x00, 0x00, // modification_time
4653 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4654 0x00, 0x00, 0x00, 0x00, // reserved
4655 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
4656 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4657 0x00, 0x00, // layer
4658 0x00, 0x00, // alternate_group
4659 0x01, 0x00, // non-audio track volume
4660 0x00, 0x00, // reserved
4661 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4662 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
4663 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
4664 ]);
4665 return box(types.tkhd, result);
4666 };
4667 /**
4668 * Generate a track fragment (traf) box. A traf box collects metadata
4669 * about tracks in a movie fragment (moof) box.
4670 */
4671
4672
4673 traf = function traf(track) {
4674 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
4675 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
4676 0x00, 0x00, 0x3a, // flags
4677 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4678 0x00, 0x00, 0x00, 0x01, // sample_description_index
4679 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4680 0x00, 0x00, 0x00, 0x00, // default_sample_size
4681 0x00, 0x00, 0x00, 0x00 // default_sample_flags
4682 ]));
4683 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
4684 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
4685 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
4686 0x00, 0x00, 0x00, // flags
4687 // baseMediaDecodeTime
4688 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
4689 // the containing moof to the first payload byte of the associated
4690 // mdat
4691
4692 dataOffset = 32 + // tfhd
4693 20 + // tfdt
4694 8 + // traf header
4695 16 + // mfhd
4696 8 + // moof header
4697 8; // mdat header
4698 // audio tracks require less metadata
4699
4700 if (track.type === 'audio') {
4701 trackFragmentRun = trun$1(track, dataOffset);
4702 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
4703 } // video tracks should contain an independent and disposable samples
4704 // box (sdtp)
4705 // generate one and adjust offsets to match
4706
4707
4708 sampleDependencyTable = sdtp(track);
4709 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
4710 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
4711 };
4712 /**
4713 * Generate a track box.
4714 * @param track {object} a track definition
4715 * @return {Uint8Array} the track box
4716 */
4717
4718
4719 trak = function trak(track) {
4720 track.duration = track.duration || 0xffffffff;
4721 return box(types.trak, tkhd(track), mdia(track));
4722 };
4723
4724 trex = function trex(track) {
4725 var result = new Uint8Array([0x00, // version 0
4726 0x00, 0x00, 0x00, // flags
4727 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4728 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
4729 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4730 0x00, 0x00, 0x00, 0x00, // default_sample_size
4731 0x00, 0x01, 0x00, 0x01 // default_sample_flags
4732 ]); // the last two bytes of default_sample_flags is the sample
4733 // degradation priority, a hint about the importance of this sample
4734 // relative to others. Lower the degradation priority for all sample
4735 // types other than video.
4736
4737 if (track.type !== 'video') {
4738 result[result.length - 1] = 0x00;
4739 }
4740
4741 return box(types.trex, result);
4742 };
4743
4744 (function () {
4745 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
4746 // duration is present for the first sample, it will be present for
4747 // all subsequent samples.
4748 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
4749
4750 trunHeader = function trunHeader(samples, offset) {
4751 var durationPresent = 0,
4752 sizePresent = 0,
4753 flagsPresent = 0,
4754 compositionTimeOffset = 0; // trun flag constants
4755
4756 if (samples.length) {
4757 if (samples[0].duration !== undefined) {
4758 durationPresent = 0x1;
4759 }
4760
4761 if (samples[0].size !== undefined) {
4762 sizePresent = 0x2;
4763 }
4764
4765 if (samples[0].flags !== undefined) {
4766 flagsPresent = 0x4;
4767 }
4768
4769 if (samples[0].compositionTimeOffset !== undefined) {
4770 compositionTimeOffset = 0x8;
4771 }
4772 }
4773
4774 return [0x00, // version 0
4775 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4776 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4777 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4778 ];
4779 };
4780
4781 videoTrun = function videoTrun(track, offset) {
4782 var bytesOffest, bytes, header, samples, sample, i;
4783 samples = track.samples || [];
4784 offset += 8 + 12 + 16 * samples.length;
4785 header = trunHeader(samples, offset);
4786 bytes = new Uint8Array(header.length + samples.length * 16);
4787 bytes.set(header);
4788 bytesOffest = header.length;
4789
4790 for (i = 0; i < samples.length; i++) {
4791 sample = samples[i];
4792 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4793 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4794 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4795 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4796
4797 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4798 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4799 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4800 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4801
4802 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4803 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4804 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4805 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4806
4807 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4808 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4809 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4810 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4811 }
4812
4813 return box(types.trun, bytes);
4814 };
4815
4816 audioTrun = function audioTrun(track, offset) {
4817 var bytes, bytesOffest, header, samples, sample, i;
4818 samples = track.samples || [];
4819 offset += 8 + 12 + 8 * samples.length;
4820 header = trunHeader(samples, offset);
4821 bytes = new Uint8Array(header.length + samples.length * 8);
4822 bytes.set(header);
4823 bytesOffest = header.length;
4824
4825 for (i = 0; i < samples.length; i++) {
4826 sample = samples[i];
4827 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4828 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4829 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4830 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4831
4832 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4833 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4834 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4835 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4836 }
4837
4838 return box(types.trun, bytes);
4839 };
4840
4841 trun$1 = function trun(track, offset) {
4842 if (track.type === 'audio') {
4843 return audioTrun(track, offset);
4844 }
4845
4846 return videoTrun(track, offset);
4847 };
4848 })();
4849
4850 var mp4Generator = {
4851 ftyp: ftyp,
4852 mdat: mdat,
4853 moof: moof,
4854 moov: moov,
4855 initSegment: function initSegment(tracks) {
4856 var fileType = ftyp(),
4857 movie = moov(tracks),
4858 result;
4859 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4860 result.set(fileType);
4861 result.set(movie, fileType.byteLength);
4862 return result;
4863 }
4864 };
4865 /**
4866 * mux.js
4867 *
4868 * Copyright (c) Brightcove
4869 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4870 */
4871 // Convert an array of nal units into an array of frames with each frame being
4872 // composed of the nal units that make up that frame
4873 // Also keep track of cummulative data about the frame from the nal units such
4874 // as the frame duration, starting pts, etc.
4875
4876 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4877 var i,
4878 currentNal,
4879 currentFrame = [],
4880 frames = []; // TODO added for LHLS, make sure this is OK
4881
4882 frames.byteLength = 0;
4883 frames.nalCount = 0;
4884 frames.duration = 0;
4885 currentFrame.byteLength = 0;
4886
4887 for (i = 0; i < nalUnits.length; i++) {
4888 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4889
4890 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4891 // Since the very first nal unit is expected to be an AUD
4892 // only push to the frames array when currentFrame is not empty
4893 if (currentFrame.length) {
4894 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4895
4896 frames.byteLength += currentFrame.byteLength;
4897 frames.nalCount += currentFrame.length;
4898 frames.duration += currentFrame.duration;
4899 frames.push(currentFrame);
4900 }
4901
4902 currentFrame = [currentNal];
4903 currentFrame.byteLength = currentNal.data.byteLength;
4904 currentFrame.pts = currentNal.pts;
4905 currentFrame.dts = currentNal.dts;
4906 } else {
4907 // Specifically flag key frames for ease of use later
4908 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4909 currentFrame.keyFrame = true;
4910 }
4911
4912 currentFrame.duration = currentNal.dts - currentFrame.dts;
4913 currentFrame.byteLength += currentNal.data.byteLength;
4914 currentFrame.push(currentNal);
4915 }
4916 } // For the last frame, use the duration of the previous frame if we
4917 // have nothing better to go on
4918
4919
4920 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4921 currentFrame.duration = frames[frames.length - 1].duration;
4922 } // Push the final frame
4923 // TODO added for LHLS, make sure this is OK
4924
4925
4926 frames.byteLength += currentFrame.byteLength;
4927 frames.nalCount += currentFrame.length;
4928 frames.duration += currentFrame.duration;
4929 frames.push(currentFrame);
4930 return frames;
4931 }; // Convert an array of frames into an array of Gop with each Gop being composed
4932 // of the frames that make up that Gop
4933 // Also keep track of cummulative data about the Gop from the frames such as the
4934 // Gop duration, starting pts, etc.
4935
4936
4937 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4938 var i,
4939 currentFrame,
4940 currentGop = [],
4941 gops = []; // We must pre-set some of the values on the Gop since we
4942 // keep running totals of these values
4943
4944 currentGop.byteLength = 0;
4945 currentGop.nalCount = 0;
4946 currentGop.duration = 0;
4947 currentGop.pts = frames[0].pts;
4948 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4949
4950 gops.byteLength = 0;
4951 gops.nalCount = 0;
4952 gops.duration = 0;
4953 gops.pts = frames[0].pts;
4954 gops.dts = frames[0].dts;
4955
4956 for (i = 0; i < frames.length; i++) {
4957 currentFrame = frames[i];
4958
4959 if (currentFrame.keyFrame) {
4960 // Since the very first frame is expected to be an keyframe
4961 // only push to the gops array when currentGop is not empty
4962 if (currentGop.length) {
4963 gops.push(currentGop);
4964 gops.byteLength += currentGop.byteLength;
4965 gops.nalCount += currentGop.nalCount;
4966 gops.duration += currentGop.duration;
4967 }
4968
4969 currentGop = [currentFrame];
4970 currentGop.nalCount = currentFrame.length;
4971 currentGop.byteLength = currentFrame.byteLength;
4972 currentGop.pts = currentFrame.pts;
4973 currentGop.dts = currentFrame.dts;
4974 currentGop.duration = currentFrame.duration;
4975 } else {
4976 currentGop.duration += currentFrame.duration;
4977 currentGop.nalCount += currentFrame.length;
4978 currentGop.byteLength += currentFrame.byteLength;
4979 currentGop.push(currentFrame);
4980 }
4981 }
4982
4983 if (gops.length && currentGop.duration <= 0) {
4984 currentGop.duration = gops[gops.length - 1].duration;
4985 }
4986
4987 gops.byteLength += currentGop.byteLength;
4988 gops.nalCount += currentGop.nalCount;
4989 gops.duration += currentGop.duration; // push the final Gop
4990
4991 gops.push(currentGop);
4992 return gops;
4993 };
4994 /*
4995 * Search for the first keyframe in the GOPs and throw away all frames
4996 * until that keyframe. Then extend the duration of the pulled keyframe
4997 * and pull the PTS and DTS of the keyframe so that it covers the time
4998 * range of the frames that were disposed.
4999 *
5000 * @param {Array} gops video GOPs
5001 * @returns {Array} modified video GOPs
5002 */
5003
5004
5005 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
5006 var currentGop;
5007
5008 if (!gops[0][0].keyFrame && gops.length > 1) {
5009 // Remove the first GOP
5010 currentGop = gops.shift();
5011 gops.byteLength -= currentGop.byteLength;
5012 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
5013 // first gop to cover the time period of the
5014 // frames we just removed
5015
5016 gops[0][0].dts = currentGop.dts;
5017 gops[0][0].pts = currentGop.pts;
5018 gops[0][0].duration += currentGop.duration;
5019 }
5020
5021 return gops;
5022 };
5023 /**
5024 * Default sample object
5025 * see ISO/IEC 14496-12:2012, section 8.6.4.3
5026 */
5027
5028
5029 var createDefaultSample = function createDefaultSample() {
5030 return {
5031 size: 0,
5032 flags: {
5033 isLeading: 0,
5034 dependsOn: 1,
5035 isDependedOn: 0,
5036 hasRedundancy: 0,
5037 degradationPriority: 0,
5038 isNonSyncSample: 1
5039 }
5040 };
5041 };
5042 /*
5043 * Collates information from a video frame into an object for eventual
5044 * entry into an MP4 sample table.
5045 *
5046 * @param {Object} frame the video frame
5047 * @param {Number} dataOffset the byte offset to position the sample
5048 * @return {Object} object containing sample table info for a frame
5049 */
5050
5051
5052 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
5053 var sample = createDefaultSample();
5054 sample.dataOffset = dataOffset;
5055 sample.compositionTimeOffset = frame.pts - frame.dts;
5056 sample.duration = frame.duration;
5057 sample.size = 4 * frame.length; // Space for nal unit size
5058
5059 sample.size += frame.byteLength;
5060
5061 if (frame.keyFrame) {
5062 sample.flags.dependsOn = 2;
5063 sample.flags.isNonSyncSample = 0;
5064 }
5065
5066 return sample;
5067 }; // generate the track's sample table from an array of gops
5068
5069
5070 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
5071 var h,
5072 i,
5073 sample,
5074 currentGop,
5075 currentFrame,
5076 dataOffset = baseDataOffset || 0,
5077 samples = [];
5078
5079 for (h = 0; h < gops.length; h++) {
5080 currentGop = gops[h];
5081
5082 for (i = 0; i < currentGop.length; i++) {
5083 currentFrame = currentGop[i];
5084 sample = sampleForFrame(currentFrame, dataOffset);
5085 dataOffset += sample.size;
5086 samples.push(sample);
5087 }
5088 }
5089
5090 return samples;
5091 }; // generate the track's raw mdat data from an array of gops
5092
5093
5094 var concatenateNalData = function concatenateNalData(gops) {
5095 var h,
5096 i,
5097 j,
5098 currentGop,
5099 currentFrame,
5100 currentNal,
5101 dataOffset = 0,
5102 nalsByteLength = gops.byteLength,
5103 numberOfNals = gops.nalCount,
5104 totalByteLength = nalsByteLength + 4 * numberOfNals,
5105 data = new Uint8Array(totalByteLength),
5106 view = new DataView(data.buffer); // For each Gop..
5107
5108 for (h = 0; h < gops.length; h++) {
5109 currentGop = gops[h]; // For each Frame..
5110
5111 for (i = 0; i < currentGop.length; i++) {
5112 currentFrame = currentGop[i]; // For each NAL..
5113
5114 for (j = 0; j < currentFrame.length; j++) {
5115 currentNal = currentFrame[j];
5116 view.setUint32(dataOffset, currentNal.data.byteLength);
5117 dataOffset += 4;
5118 data.set(currentNal.data, dataOffset);
5119 dataOffset += currentNal.data.byteLength;
5120 }
5121 }
5122 }
5123
5124 return data;
5125 }; // generate the track's sample table from a frame
5126
5127
5128 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5129 var sample,
5130 dataOffset = baseDataOffset || 0,
5131 samples = [];
5132 sample = sampleForFrame(frame, dataOffset);
5133 samples.push(sample);
5134 return samples;
5135 }; // generate the track's raw mdat data from a frame
5136
5137
5138 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5139 var i,
5140 currentNal,
5141 dataOffset = 0,
5142 nalsByteLength = frame.byteLength,
5143 numberOfNals = frame.length,
5144 totalByteLength = nalsByteLength + 4 * numberOfNals,
5145 data = new Uint8Array(totalByteLength),
5146 view = new DataView(data.buffer); // For each NAL..
5147
5148 for (i = 0; i < frame.length; i++) {
5149 currentNal = frame[i];
5150 view.setUint32(dataOffset, currentNal.data.byteLength);
5151 dataOffset += 4;
5152 data.set(currentNal.data, dataOffset);
5153 dataOffset += currentNal.data.byteLength;
5154 }
5155
5156 return data;
5157 };
5158
5159 var frameUtils = {
5160 groupNalsIntoFrames: groupNalsIntoFrames,
5161 groupFramesIntoGops: groupFramesIntoGops,
5162 extendFirstKeyFrame: extendFirstKeyFrame,
5163 generateSampleTable: generateSampleTable$1,
5164 concatenateNalData: concatenateNalData,
5165 generateSampleTableForFrame: generateSampleTableForFrame,
5166 concatenateNalDataForFrame: concatenateNalDataForFrame
5167 };
5168 /**
5169 * mux.js
5170 *
5171 * Copyright (c) Brightcove
5172 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5173 */
5174
5175 var highPrefix = [33, 16, 5, 32, 164, 27];
5176 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5177
5178 var zeroFill = function zeroFill(count) {
5179 var a = [];
5180
5181 while (count--) {
5182 a.push(0);
5183 }
5184
5185 return a;
5186 };
5187
5188 var makeTable = function makeTable(metaTable) {
5189 return Object.keys(metaTable).reduce(function (obj, key) {
5190 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5191 return arr.concat(part);
5192 }, []));
5193 return obj;
5194 }, {});
5195 };
5196
5197 var silence;
5198
5199 var silence_1 = function silence_1() {
5200 if (!silence) {
5201 // Frames-of-silence to use for filling in missing AAC frames
5202 var coneOfSilence = {
5203 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5204 88200: [highPrefix, [231], zeroFill(170), [56]],
5205 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5206 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5207 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5208 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5209 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5210 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5211 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5212 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5213 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5214 };
5215 silence = makeTable(coneOfSilence);
5216 }
5217
5218 return silence;
5219 };
5220 /**
5221 * mux.js
5222 *
5223 * Copyright (c) Brightcove
5224 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5225 */
5226
5227
5228 var ONE_SECOND_IN_TS$4 = 90000,
5229 // 90kHz clock
5230 secondsToVideoTs,
5231 secondsToAudioTs,
5232 videoTsToSeconds,
5233 audioTsToSeconds,
5234 audioTsToVideoTs,
5235 videoTsToAudioTs,
5236 metadataTsToSeconds;
5237
5238 secondsToVideoTs = function secondsToVideoTs(seconds) {
5239 return seconds * ONE_SECOND_IN_TS$4;
5240 };
5241
5242 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5243 return seconds * sampleRate;
5244 };
5245
5246 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5247 return timestamp / ONE_SECOND_IN_TS$4;
5248 };
5249
5250 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5251 return timestamp / sampleRate;
5252 };
5253
5254 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5255 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5256 };
5257
5258 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5259 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5260 };
5261 /**
5262 * Adjust ID3 tag or caption timing information by the timeline pts values
5263 * (if keepOriginalTimestamps is false) and convert to seconds
5264 */
5265
5266
5267 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5268 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5269 };
5270
5271 var clock = {
5272 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
5273 secondsToVideoTs: secondsToVideoTs,
5274 secondsToAudioTs: secondsToAudioTs,
5275 videoTsToSeconds: videoTsToSeconds,
5276 audioTsToSeconds: audioTsToSeconds,
5277 audioTsToVideoTs: audioTsToVideoTs,
5278 videoTsToAudioTs: videoTsToAudioTs,
5279 metadataTsToSeconds: metadataTsToSeconds
5280 };
5281 /**
5282 * mux.js
5283 *
5284 * Copyright (c) Brightcove
5285 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5286 */
5287
5288 /**
5289 * Sum the `byteLength` properties of the data in each AAC frame
5290 */
5291
5292 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5293 var i,
5294 currentObj,
5295 sum = 0; // sum the byteLength's all each nal unit in the frame
5296
5297 for (i = 0; i < array.length; i++) {
5298 currentObj = array[i];
5299 sum += currentObj.data.byteLength;
5300 }
5301
5302 return sum;
5303 }; // Possibly pad (prefix) the audio track with silence if appending this track
5304 // would lead to the introduction of a gap in the audio buffer
5305
5306
5307 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5308 var baseMediaDecodeTimeTs,
5309 frameDuration = 0,
5310 audioGapDuration = 0,
5311 audioFillFrameCount = 0,
5312 audioFillDuration = 0,
5313 silentFrame,
5314 i,
5315 firstFrame;
5316
5317 if (!frames.length) {
5318 return;
5319 }
5320
5321 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
5322
5323 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5324
5325 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5326 // insert the shortest possible amount (audio gap or audio to video gap)
5327 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
5328
5329 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5330 audioFillDuration = audioFillFrameCount * frameDuration;
5331 } // don't attempt to fill gaps smaller than a single frame or larger
5332 // than a half second
5333
5334
5335 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5336 return;
5337 }
5338
5339 silentFrame = silence_1()[track.samplerate];
5340
5341 if (!silentFrame) {
5342 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5343 // from the content instead
5344 silentFrame = frames[0].data;
5345 }
5346
5347 for (i = 0; i < audioFillFrameCount; i++) {
5348 firstFrame = frames[0];
5349 frames.splice(0, 0, {
5350 data: silentFrame,
5351 dts: firstFrame.dts - frameDuration,
5352 pts: firstFrame.pts - frameDuration
5353 });
5354 }
5355
5356 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5357 return audioFillDuration;
5358 }; // If the audio segment extends before the earliest allowed dts
5359 // value, remove AAC frames until starts at or after the earliest
5360 // allowed DTS so that we don't end up with a negative baseMedia-
5361 // DecodeTime for the audio track
5362
5363
5364 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5365 if (track.minSegmentDts >= earliestAllowedDts) {
5366 return adtsFrames;
5367 } // We will need to recalculate the earliest segment Dts
5368
5369
5370 track.minSegmentDts = Infinity;
5371 return adtsFrames.filter(function (currentFrame) {
5372 // If this is an allowed frame, keep it and record it's Dts
5373 if (currentFrame.dts >= earliestAllowedDts) {
5374 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5375 track.minSegmentPts = track.minSegmentDts;
5376 return true;
5377 } // Otherwise, discard it
5378
5379
5380 return false;
5381 });
5382 }; // generate the track's raw mdat data from an array of frames
5383
5384
5385 var generateSampleTable = function generateSampleTable(frames) {
5386 var i,
5387 currentFrame,
5388 samples = [];
5389
5390 for (i = 0; i < frames.length; i++) {
5391 currentFrame = frames[i];
5392 samples.push({
5393 size: currentFrame.data.byteLength,
5394 duration: 1024 // For AAC audio, all samples contain 1024 samples
5395
5396 });
5397 }
5398
5399 return samples;
5400 }; // generate the track's sample table from an array of frames
5401
5402
5403 var concatenateFrameData = function concatenateFrameData(frames) {
5404 var i,
5405 currentFrame,
5406 dataOffset = 0,
5407 data = new Uint8Array(sumFrameByteLengths(frames));
5408
5409 for (i = 0; i < frames.length; i++) {
5410 currentFrame = frames[i];
5411 data.set(currentFrame.data, dataOffset);
5412 dataOffset += currentFrame.data.byteLength;
5413 }
5414
5415 return data;
5416 };
5417
5418 var audioFrameUtils = {
5419 prefixWithSilence: prefixWithSilence,
5420 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5421 generateSampleTable: generateSampleTable,
5422 concatenateFrameData: concatenateFrameData
5423 };
5424 /**
5425 * mux.js
5426 *
5427 * Copyright (c) Brightcove
5428 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5429 */
5430
5431 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
5432 /**
5433 * Store information about the start and end of the track and the
5434 * duration for each frame/sample we process in order to calculate
5435 * the baseMediaDecodeTime
5436 */
5437
5438 var collectDtsInfo = function collectDtsInfo(track, data) {
5439 if (typeof data.pts === 'number') {
5440 if (track.timelineStartInfo.pts === undefined) {
5441 track.timelineStartInfo.pts = data.pts;
5442 }
5443
5444 if (track.minSegmentPts === undefined) {
5445 track.minSegmentPts = data.pts;
5446 } else {
5447 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5448 }
5449
5450 if (track.maxSegmentPts === undefined) {
5451 track.maxSegmentPts = data.pts;
5452 } else {
5453 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5454 }
5455 }
5456
5457 if (typeof data.dts === 'number') {
5458 if (track.timelineStartInfo.dts === undefined) {
5459 track.timelineStartInfo.dts = data.dts;
5460 }
5461
5462 if (track.minSegmentDts === undefined) {
5463 track.minSegmentDts = data.dts;
5464 } else {
5465 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5466 }
5467
5468 if (track.maxSegmentDts === undefined) {
5469 track.maxSegmentDts = data.dts;
5470 } else {
5471 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5472 }
5473 }
5474 };
5475 /**
5476 * Clear values used to calculate the baseMediaDecodeTime between
5477 * tracks
5478 */
5479
5480
5481 var clearDtsInfo = function clearDtsInfo(track) {
5482 delete track.minSegmentDts;
5483 delete track.maxSegmentDts;
5484 delete track.minSegmentPts;
5485 delete track.maxSegmentPts;
5486 };
5487 /**
5488 * Calculate the track's baseMediaDecodeTime based on the earliest
5489 * DTS the transmuxer has ever seen and the minimum DTS for the
5490 * current track
5491 * @param track {object} track metadata configuration
5492 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5493 * in the source; false to adjust the first segment to start at 0.
5494 */
5495
5496
5497 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5498 var baseMediaDecodeTime,
5499 scale,
5500 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
5501
5502 if (!keepOriginalTimestamps) {
5503 minSegmentDts -= track.timelineStartInfo.dts;
5504 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5505 // we want the start of the first segment to be placed
5506
5507
5508 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
5509
5510 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
5511
5512 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5513
5514 if (track.type === 'audio') {
5515 // Audio has a different clock equal to the sampling_rate so we need to
5516 // scale the PTS values into the clock rate of the track
5517 scale = track.samplerate / ONE_SECOND_IN_TS$3;
5518 baseMediaDecodeTime *= scale;
5519 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5520 }
5521
5522 return baseMediaDecodeTime;
5523 };
5524
5525 var trackDecodeInfo = {
5526 clearDtsInfo: clearDtsInfo,
5527 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5528 collectDtsInfo: collectDtsInfo
5529 };
5530 /**
5531 * mux.js
5532 *
5533 * Copyright (c) Brightcove
5534 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5535 *
5536 * Reads in-band caption information from a video elementary
5537 * stream. Captions must follow the CEA-708 standard for injection
5538 * into an MPEG-2 transport streams.
5539 * @see https://en.wikipedia.org/wiki/CEA-708
5540 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
5541 */
5542 // payload type field to indicate how they are to be
5543 // interpreted. CEAS-708 caption content is always transmitted with
5544 // payload type 0x04.
5545
5546 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
5547 RBSP_TRAILING_BITS = 128;
5548 /**
5549 * Parse a supplemental enhancement information (SEI) NAL unit.
5550 * Stops parsing once a message of type ITU T T35 has been found.
5551 *
5552 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
5553 * @return {object} the parsed SEI payload
5554 * @see Rec. ITU-T H.264, 7.3.2.3.1
5555 */
5556
5557 var parseSei = function parseSei(bytes) {
5558 var i = 0,
5559 result = {
5560 payloadType: -1,
5561 payloadSize: 0
5562 },
5563 payloadType = 0,
5564 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
5565
5566 while (i < bytes.byteLength) {
5567 // stop once we have hit the end of the sei_rbsp
5568 if (bytes[i] === RBSP_TRAILING_BITS) {
5569 break;
5570 } // Parse payload type
5571
5572
5573 while (bytes[i] === 0xFF) {
5574 payloadType += 255;
5575 i++;
5576 }
5577
5578 payloadType += bytes[i++]; // Parse payload size
5579
5580 while (bytes[i] === 0xFF) {
5581 payloadSize += 255;
5582 i++;
5583 }
5584
5585 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
5586 // there can only ever be one caption message in a frame's sei
5587
5588 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
5589 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
5590
5591 if (userIdentifier === 'GA94') {
5592 result.payloadType = payloadType;
5593 result.payloadSize = payloadSize;
5594 result.payload = bytes.subarray(i, i + payloadSize);
5595 break;
5596 } else {
5597 result.payload = void 0;
5598 }
5599 } // skip the payload and parse the next message
5600
5601
5602 i += payloadSize;
5603 payloadType = 0;
5604 payloadSize = 0;
5605 }
5606
5607 return result;
5608 }; // see ANSI/SCTE 128-1 (2013), section 8.1
5609
5610
5611 var parseUserData = function parseUserData(sei) {
5612 // itu_t_t35_contry_code must be 181 (United States) for
5613 // captions
5614 if (sei.payload[0] !== 181) {
5615 return null;
5616 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
5617
5618
5619 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
5620 return null;
5621 } // the user_identifier should be "GA94" to indicate ATSC1 data
5622
5623
5624 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
5625 return null;
5626 } // finally, user_data_type_code should be 0x03 for caption data
5627
5628
5629 if (sei.payload[7] !== 0x03) {
5630 return null;
5631 } // return the user_data_type_structure and strip the trailing
5632 // marker bits
5633
5634
5635 return sei.payload.subarray(8, sei.payload.length - 1);
5636 }; // see CEA-708-D, section 4.4
5637
5638
5639 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
5640 var results = [],
5641 i,
5642 count,
5643 offset,
5644 data; // if this is just filler, return immediately
5645
5646 if (!(userData[0] & 0x40)) {
5647 return results;
5648 } // parse out the cc_data_1 and cc_data_2 fields
5649
5650
5651 count = userData[0] & 0x1f;
5652
5653 for (i = 0; i < count; i++) {
5654 offset = i * 3;
5655 data = {
5656 type: userData[offset + 2] & 0x03,
5657 pts: pts
5658 }; // capture cc data when cc_valid is 1
5659
5660 if (userData[offset + 2] & 0x04) {
5661 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
5662 results.push(data);
5663 }
5664 }
5665
5666 return results;
5667 };
5668
5669 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
5670 var length = data.byteLength,
5671 emulationPreventionBytesPositions = [],
5672 i = 1,
5673 newLength,
5674 newData; // Find all `Emulation Prevention Bytes`
5675
5676 while (i < length - 2) {
5677 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
5678 emulationPreventionBytesPositions.push(i + 2);
5679 i += 2;
5680 } else {
5681 i++;
5682 }
5683 } // If no Emulation Prevention Bytes were found just return the original
5684 // array
5685
5686
5687 if (emulationPreventionBytesPositions.length === 0) {
5688 return data;
5689 } // Create a new array to hold the NAL unit data
5690
5691
5692 newLength = length - emulationPreventionBytesPositions.length;
5693 newData = new Uint8Array(newLength);
5694 var sourceIndex = 0;
5695
5696 for (i = 0; i < newLength; sourceIndex++, i++) {
5697 if (sourceIndex === emulationPreventionBytesPositions[0]) {
5698 // Skip this byte
5699 sourceIndex++; // Remove this position index
5700
5701 emulationPreventionBytesPositions.shift();
5702 }
5703
5704 newData[i] = data[sourceIndex];
5705 }
5706
5707 return newData;
5708 }; // exports
5709
5710
5711 var captionPacketParser = {
5712 parseSei: parseSei,
5713 parseUserData: parseUserData,
5714 parseCaptionPackets: parseCaptionPackets,
5715 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
5716 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
5717 }; // Link To Transport
5718 // -----------------
5719
5720 var CaptionStream$1 = function CaptionStream(options) {
5721 options = options || {};
5722 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
5723
5724 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
5725 this.captionPackets_ = [];
5726 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
5727 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
5728 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
5729 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
5730 ];
5731
5732 if (this.parse708captions_) {
5733 this.cc708Stream_ = new Cea708Stream({
5734 captionServices: options.captionServices
5735 }); // eslint-disable-line no-use-before-define
5736 }
5737
5738 this.reset(); // forward data and done events from CCs to this CaptionStream
5739
5740 this.ccStreams_.forEach(function (cc) {
5741 cc.on('data', this.trigger.bind(this, 'data'));
5742 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
5743 cc.on('done', this.trigger.bind(this, 'done'));
5744 }, this);
5745
5746 if (this.parse708captions_) {
5747 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
5748 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
5749 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
5750 }
5751 };
5752
5753 CaptionStream$1.prototype = new stream();
5754
5755 CaptionStream$1.prototype.push = function (event) {
5756 var sei, userData, newCaptionPackets; // only examine SEI NALs
5757
5758 if (event.nalUnitType !== 'sei_rbsp') {
5759 return;
5760 } // parse the sei
5761
5762
5763 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5764
5765 if (!sei.payload) {
5766 return;
5767 } // ignore everything but user_data_registered_itu_t_t35
5768
5769
5770 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5771 return;
5772 } // parse out the user data payload
5773
5774
5775 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5776
5777 if (!userData) {
5778 return;
5779 } // Sometimes, the same segment # will be downloaded twice. To stop the
5780 // caption data from being processed twice, we track the latest dts we've
5781 // received and ignore everything with a dts before that. However, since
5782 // data for a specific dts can be split across packets on either side of
5783 // a segment boundary, we need to make sure we *don't* ignore the packets
5784 // from the *next* segment that have dts === this.latestDts_. By constantly
5785 // tracking the number of packets received with dts === this.latestDts_, we
5786 // know how many should be ignored once we start receiving duplicates.
5787
5788
5789 if (event.dts < this.latestDts_) {
5790 // We've started getting older data, so set the flag.
5791 this.ignoreNextEqualDts_ = true;
5792 return;
5793 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5794 this.numSameDts_--;
5795
5796 if (!this.numSameDts_) {
5797 // We've received the last duplicate packet, time to start processing again
5798 this.ignoreNextEqualDts_ = false;
5799 }
5800
5801 return;
5802 } // parse out CC data packets and save them for later
5803
5804
5805 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5806 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5807
5808 if (this.latestDts_ !== event.dts) {
5809 this.numSameDts_ = 0;
5810 }
5811
5812 this.numSameDts_++;
5813 this.latestDts_ = event.dts;
5814 };
5815
5816 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
5817 this.ccStreams_.forEach(function (cc) {
5818 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5819 }, this);
5820 };
5821
5822 CaptionStream$1.prototype.flushStream = function (flushType) {
5823 // make sure we actually parsed captions before proceeding
5824 if (!this.captionPackets_.length) {
5825 this.flushCCStreams(flushType);
5826 return;
5827 } // In Chrome, the Array#sort function is not stable so add a
5828 // presortIndex that we can use to ensure we get a stable-sort
5829
5830
5831 this.captionPackets_.forEach(function (elem, idx) {
5832 elem.presortIndex = idx;
5833 }); // sort caption byte-pairs based on their PTS values
5834
5835 this.captionPackets_.sort(function (a, b) {
5836 if (a.pts === b.pts) {
5837 return a.presortIndex - b.presortIndex;
5838 }
5839
5840 return a.pts - b.pts;
5841 });
5842 this.captionPackets_.forEach(function (packet) {
5843 if (packet.type < 2) {
5844 // Dispatch packet to the right Cea608Stream
5845 this.dispatchCea608Packet(packet);
5846 } else {
5847 // Dispatch packet to the Cea708Stream
5848 this.dispatchCea708Packet(packet);
5849 }
5850 }, this);
5851 this.captionPackets_.length = 0;
5852 this.flushCCStreams(flushType);
5853 };
5854
5855 CaptionStream$1.prototype.flush = function () {
5856 return this.flushStream('flush');
5857 }; // Only called if handling partial data
5858
5859
5860 CaptionStream$1.prototype.partialFlush = function () {
5861 return this.flushStream('partialFlush');
5862 };
5863
5864 CaptionStream$1.prototype.reset = function () {
5865 this.latestDts_ = null;
5866 this.ignoreNextEqualDts_ = false;
5867 this.numSameDts_ = 0;
5868 this.activeCea608Channel_ = [null, null];
5869 this.ccStreams_.forEach(function (ccStream) {
5870 ccStream.reset();
5871 });
5872 }; // From the CEA-608 spec:
5873
5874 /*
5875 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5876 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5877 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5878 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5879 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5880 * to switch to captioning or Text.
5881 */
5882 // With that in mind, we ignore any data between an XDS control code and a
5883 // subsequent closed-captioning control code.
5884
5885
5886 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
5887 // NOTE: packet.type is the CEA608 field
5888 if (this.setsTextOrXDSActive(packet)) {
5889 this.activeCea608Channel_[packet.type] = null;
5890 } else if (this.setsChannel1Active(packet)) {
5891 this.activeCea608Channel_[packet.type] = 0;
5892 } else if (this.setsChannel2Active(packet)) {
5893 this.activeCea608Channel_[packet.type] = 1;
5894 }
5895
5896 if (this.activeCea608Channel_[packet.type] === null) {
5897 // If we haven't received anything to set the active channel, or the
5898 // packets are Text/XDS data, discard the data; we don't want jumbled
5899 // captions
5900 return;
5901 }
5902
5903 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5904 };
5905
5906 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
5907 return (packet.ccData & 0x7800) === 0x1000;
5908 };
5909
5910 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
5911 return (packet.ccData & 0x7800) === 0x1800;
5912 };
5913
5914 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
5915 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5916 };
5917
5918 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
5919 if (this.parse708captions_) {
5920 this.cc708Stream_.push(packet);
5921 }
5922 }; // ----------------------
5923 // Session to Application
5924 // ----------------------
5925 // This hash maps special and extended character codes to their
5926 // proper Unicode equivalent. The first one-byte key is just a
5927 // non-standard character code. The two-byte keys that follow are
5928 // the extended CEA708 character codes, along with the preceding
5929 // 0x10 extended character byte to distinguish these codes from
5930 // non-extended character codes. Every CEA708 character code that
5931 // is not in this object maps directly to a standard unicode
5932 // character code.
5933 // The transparent space and non-breaking transparent space are
5934 // technically not fully supported since there is no code to
5935 // make them transparent, so they have normal non-transparent
5936 // stand-ins.
5937 // The special closed caption (CC) character isn't a standard
5938 // unicode character, so a fairly similar unicode character was
5939 // chosen in it's place.
5940
5941
5942 var CHARACTER_TRANSLATION_708 = {
5943 0x7f: 0x266a,
5944 // ♪
5945 0x1020: 0x20,
5946 // Transparent Space
5947 0x1021: 0xa0,
5948 // Nob-breaking Transparent Space
5949 0x1025: 0x2026,
5950 // …
5951 0x102a: 0x0160,
5952 // Š
5953 0x102c: 0x0152,
5954 // Œ
5955 0x1030: 0x2588,
5956 // █
5957 0x1031: 0x2018,
5958 // ‘
5959 0x1032: 0x2019,
5960 // ’
5961 0x1033: 0x201c,
5962 // “
5963 0x1034: 0x201d,
5964 // ”
5965 0x1035: 0x2022,
5966 // •
5967 0x1039: 0x2122,
5968 // ™
5969 0x103a: 0x0161,
5970 // š
5971 0x103c: 0x0153,
5972 // œ
5973 0x103d: 0x2120,
5974 // ℠
5975 0x103f: 0x0178,
5976 // Ÿ
5977 0x1076: 0x215b,
5978 // ⅛
5979 0x1077: 0x215c,
5980 // ⅜
5981 0x1078: 0x215d,
5982 // ⅝
5983 0x1079: 0x215e,
5984 // ⅞
5985 0x107a: 0x23d0,
5986 // ⏐
5987 0x107b: 0x23a4,
5988 // ⎤
5989 0x107c: 0x23a3,
5990 // ⎣
5991 0x107d: 0x23af,
5992 // ⎯
5993 0x107e: 0x23a6,
5994 // ⎦
5995 0x107f: 0x23a1,
5996 // ⎡
5997 0x10a0: 0x3138 // ㄸ (CC char)
5998
5999 };
6000
6001 var get708CharFromCode = function get708CharFromCode(code) {
6002 var newCode = CHARACTER_TRANSLATION_708[code] || code;
6003
6004 if (code & 0x1000 && code === newCode) {
6005 // Invalid extended code
6006 return '';
6007 }
6008
6009 return String.fromCharCode(newCode);
6010 };
6011
6012 var within708TextBlock = function within708TextBlock(b) {
6013 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
6014 };
6015
6016 var Cea708Window = function Cea708Window(windowNum) {
6017 this.windowNum = windowNum;
6018 this.reset();
6019 };
6020
6021 Cea708Window.prototype.reset = function () {
6022 this.clearText();
6023 this.pendingNewLine = false;
6024 this.winAttr = {};
6025 this.penAttr = {};
6026 this.penLoc = {};
6027 this.penColor = {}; // These default values are arbitrary,
6028 // defineWindow will usually override them
6029
6030 this.visible = 0;
6031 this.rowLock = 0;
6032 this.columnLock = 0;
6033 this.priority = 0;
6034 this.relativePositioning = 0;
6035 this.anchorVertical = 0;
6036 this.anchorHorizontal = 0;
6037 this.anchorPoint = 0;
6038 this.rowCount = 1;
6039 this.virtualRowCount = this.rowCount + 1;
6040 this.columnCount = 41;
6041 this.windowStyle = 0;
6042 this.penStyle = 0;
6043 };
6044
6045 Cea708Window.prototype.getText = function () {
6046 return this.rows.join('\n');
6047 };
6048
6049 Cea708Window.prototype.clearText = function () {
6050 this.rows = [''];
6051 this.rowIdx = 0;
6052 };
6053
6054 Cea708Window.prototype.newLine = function (pts) {
6055 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
6056 this.beforeRowOverflow(pts);
6057 }
6058
6059 if (this.rows.length > 0) {
6060 this.rows.push('');
6061 this.rowIdx++;
6062 } // Show all virtual rows since there's no visible scrolling
6063
6064
6065 while (this.rows.length > this.virtualRowCount) {
6066 this.rows.shift();
6067 this.rowIdx--;
6068 }
6069 };
6070
6071 Cea708Window.prototype.isEmpty = function () {
6072 if (this.rows.length === 0) {
6073 return true;
6074 } else if (this.rows.length === 1) {
6075 return this.rows[0] === '';
6076 }
6077
6078 return false;
6079 };
6080
6081 Cea708Window.prototype.addText = function (text) {
6082 this.rows[this.rowIdx] += text;
6083 };
6084
6085 Cea708Window.prototype.backspace = function () {
6086 if (!this.isEmpty()) {
6087 var row = this.rows[this.rowIdx];
6088 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
6089 }
6090 };
6091
6092 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
6093 this.serviceNum = serviceNum;
6094 this.text = '';
6095 this.currentWindow = new Cea708Window(-1);
6096 this.windows = [];
6097 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
6098
6099 if (typeof encoding === 'string') {
6100 this.createTextDecoder(encoding);
6101 }
6102 };
6103 /**
6104 * Initialize service windows
6105 * Must be run before service use
6106 *
6107 * @param {Integer} pts PTS value
6108 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
6109 */
6110
6111
6112 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
6113 this.startPts = pts;
6114
6115 for (var win = 0; win < 8; win++) {
6116 this.windows[win] = new Cea708Window(win);
6117
6118 if (typeof beforeRowOverflow === 'function') {
6119 this.windows[win].beforeRowOverflow = beforeRowOverflow;
6120 }
6121 }
6122 };
6123 /**
6124 * Set current window of service to be affected by commands
6125 *
6126 * @param {Integer} windowNum Window number
6127 */
6128
6129
6130 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
6131 this.currentWindow = this.windows[windowNum];
6132 };
6133 /**
6134 * Try to create a TextDecoder if it is natively supported
6135 */
6136
6137
6138 Cea708Service.prototype.createTextDecoder = function (encoding) {
6139 if (typeof TextDecoder === 'undefined') {
6140 this.stream.trigger('log', {
6141 level: 'warn',
6142 message: 'The `encoding` option is unsupported without TextDecoder support'
6143 });
6144 } else {
6145 try {
6146 this.textDecoder_ = new TextDecoder(encoding);
6147 } catch (error) {
6148 this.stream.trigger('log', {
6149 level: 'warn',
6150 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
6151 });
6152 }
6153 }
6154 };
6155
6156 var Cea708Stream = function Cea708Stream(options) {
6157 options = options || {};
6158 Cea708Stream.prototype.init.call(this);
6159 var self = this;
6160 var captionServices = options.captionServices || {};
6161 var captionServiceEncodings = {};
6162 var serviceProps; // Get service encodings from captionServices option block
6163
6164 Object.keys(captionServices).forEach(function (serviceName) {
6165 serviceProps = captionServices[serviceName];
6166
6167 if (/^SERVICE/.test(serviceName)) {
6168 captionServiceEncodings[serviceName] = serviceProps.encoding;
6169 }
6170 });
6171 this.serviceEncodings = captionServiceEncodings;
6172 this.current708Packet = null;
6173 this.services = {};
6174
6175 this.push = function (packet) {
6176 if (packet.type === 3) {
6177 // 708 packet start
6178 self.new708Packet();
6179 self.add708Bytes(packet);
6180 } else {
6181 if (self.current708Packet === null) {
6182 // This should only happen at the start of a file if there's no packet start.
6183 self.new708Packet();
6184 }
6185
6186 self.add708Bytes(packet);
6187 }
6188 };
6189 };
6190
6191 Cea708Stream.prototype = new stream();
6192 /**
6193 * Push current 708 packet, create new 708 packet.
6194 */
6195
6196 Cea708Stream.prototype.new708Packet = function () {
6197 if (this.current708Packet !== null) {
6198 this.push708Packet();
6199 }
6200
6201 this.current708Packet = {
6202 data: [],
6203 ptsVals: []
6204 };
6205 };
6206 /**
6207 * Add pts and both bytes from packet into current 708 packet.
6208 */
6209
6210
6211 Cea708Stream.prototype.add708Bytes = function (packet) {
6212 var data = packet.ccData;
6213 var byte0 = data >>> 8;
6214 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
6215 // that service blocks will always line up with byte pairs.
6216
6217 this.current708Packet.ptsVals.push(packet.pts);
6218 this.current708Packet.data.push(byte0);
6219 this.current708Packet.data.push(byte1);
6220 };
6221 /**
6222 * Parse completed 708 packet into service blocks and push each service block.
6223 */
6224
6225
6226 Cea708Stream.prototype.push708Packet = function () {
6227 var packet708 = this.current708Packet;
6228 var packetData = packet708.data;
6229 var serviceNum = null;
6230 var blockSize = null;
6231 var i = 0;
6232 var b = packetData[i++];
6233 packet708.seq = b >> 6;
6234 packet708.sizeCode = b & 0x3f; // 0b00111111;
6235
6236 for (; i < packetData.length; i++) {
6237 b = packetData[i++];
6238 serviceNum = b >> 5;
6239 blockSize = b & 0x1f; // 0b00011111
6240
6241 if (serviceNum === 7 && blockSize > 0) {
6242 // Extended service num
6243 b = packetData[i++];
6244 serviceNum = b;
6245 }
6246
6247 this.pushServiceBlock(serviceNum, i, blockSize);
6248
6249 if (blockSize > 0) {
6250 i += blockSize - 1;
6251 }
6252 }
6253 };
6254 /**
6255 * Parse service block, execute commands, read text.
6256 *
6257 * Note: While many of these commands serve important purposes,
6258 * many others just parse out the parameters or attributes, but
6259 * nothing is done with them because this is not a full and complete
6260 * implementation of the entire 708 spec.
6261 *
6262 * @param {Integer} serviceNum Service number
6263 * @param {Integer} start Start index of the 708 packet data
6264 * @param {Integer} size Block size
6265 */
6266
6267
6268 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
6269 var b;
6270 var i = start;
6271 var packetData = this.current708Packet.data;
6272 var service = this.services[serviceNum];
6273
6274 if (!service) {
6275 service = this.initService(serviceNum, i);
6276 }
6277
6278 for (; i < start + size && i < packetData.length; i++) {
6279 b = packetData[i];
6280
6281 if (within708TextBlock(b)) {
6282 i = this.handleText(i, service);
6283 } else if (b === 0x18) {
6284 i = this.multiByteCharacter(i, service);
6285 } else if (b === 0x10) {
6286 i = this.extendedCommands(i, service);
6287 } else if (0x80 <= b && b <= 0x87) {
6288 i = this.setCurrentWindow(i, service);
6289 } else if (0x98 <= b && b <= 0x9f) {
6290 i = this.defineWindow(i, service);
6291 } else if (b === 0x88) {
6292 i = this.clearWindows(i, service);
6293 } else if (b === 0x8c) {
6294 i = this.deleteWindows(i, service);
6295 } else if (b === 0x89) {
6296 i = this.displayWindows(i, service);
6297 } else if (b === 0x8a) {
6298 i = this.hideWindows(i, service);
6299 } else if (b === 0x8b) {
6300 i = this.toggleWindows(i, service);
6301 } else if (b === 0x97) {
6302 i = this.setWindowAttributes(i, service);
6303 } else if (b === 0x90) {
6304 i = this.setPenAttributes(i, service);
6305 } else if (b === 0x91) {
6306 i = this.setPenColor(i, service);
6307 } else if (b === 0x92) {
6308 i = this.setPenLocation(i, service);
6309 } else if (b === 0x8f) {
6310 service = this.reset(i, service);
6311 } else if (b === 0x08) {
6312 // BS: Backspace
6313 service.currentWindow.backspace();
6314 } else if (b === 0x0c) {
6315 // FF: Form feed
6316 service.currentWindow.clearText();
6317 } else if (b === 0x0d) {
6318 // CR: Carriage return
6319 service.currentWindow.pendingNewLine = true;
6320 } else if (b === 0x0e) {
6321 // HCR: Horizontal carriage return
6322 service.currentWindow.clearText();
6323 } else if (b === 0x8d) {
6324 // DLY: Delay, nothing to do
6325 i++;
6326 } else ;
6327 }
6328 };
6329 /**
6330 * Execute an extended command
6331 *
6332 * @param {Integer} i Current index in the 708 packet
6333 * @param {Service} service The service object to be affected
6334 * @return {Integer} New index after parsing
6335 */
6336
6337
6338 Cea708Stream.prototype.extendedCommands = function (i, service) {
6339 var packetData = this.current708Packet.data;
6340 var b = packetData[++i];
6341
6342 if (within708TextBlock(b)) {
6343 i = this.handleText(i, service, {
6344 isExtended: true
6345 });
6346 }
6347
6348 return i;
6349 };
6350 /**
6351 * Get PTS value of a given byte index
6352 *
6353 * @param {Integer} byteIndex Index of the byte
6354 * @return {Integer} PTS
6355 */
6356
6357
6358 Cea708Stream.prototype.getPts = function (byteIndex) {
6359 // There's 1 pts value per 2 bytes
6360 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
6361 };
6362 /**
6363 * Initializes a service
6364 *
6365 * @param {Integer} serviceNum Service number
6366 * @return {Service} Initialized service object
6367 */
6368
6369
6370 Cea708Stream.prototype.initService = function (serviceNum, i) {
6371 var serviceName = 'SERVICE' + serviceNum;
6372 var self = this;
6373 var serviceName;
6374 var encoding;
6375
6376 if (serviceName in this.serviceEncodings) {
6377 encoding = this.serviceEncodings[serviceName];
6378 }
6379
6380 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
6381 this.services[serviceNum].init(this.getPts(i), function (pts) {
6382 self.flushDisplayed(pts, self.services[serviceNum]);
6383 });
6384 return this.services[serviceNum];
6385 };
6386 /**
6387 * Execute text writing to current window
6388 *
6389 * @param {Integer} i Current index in the 708 packet
6390 * @param {Service} service The service object to be affected
6391 * @return {Integer} New index after parsing
6392 */
6393
6394
6395 Cea708Stream.prototype.handleText = function (i, service, options) {
6396 var isExtended = options && options.isExtended;
6397 var isMultiByte = options && options.isMultiByte;
6398 var packetData = this.current708Packet.data;
6399 var extended = isExtended ? 0x1000 : 0x0000;
6400 var currentByte = packetData[i];
6401 var nextByte = packetData[i + 1];
6402 var win = service.currentWindow;
6403 var char;
6404 var charCodeArray; // Use the TextDecoder if one was created for this service
6405
6406 if (service.textDecoder_ && !isExtended) {
6407 if (isMultiByte) {
6408 charCodeArray = [currentByte, nextByte];
6409 i++;
6410 } else {
6411 charCodeArray = [currentByte];
6412 }
6413
6414 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
6415 } else {
6416 char = get708CharFromCode(extended | currentByte);
6417 }
6418
6419 if (win.pendingNewLine && !win.isEmpty()) {
6420 win.newLine(this.getPts(i));
6421 }
6422
6423 win.pendingNewLine = false;
6424 win.addText(char);
6425 return i;
6426 };
6427 /**
6428 * Handle decoding of multibyte character
6429 *
6430 * @param {Integer} i Current index in the 708 packet
6431 * @param {Service} service The service object to be affected
6432 * @return {Integer} New index after parsing
6433 */
6434
6435
6436 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
6437 var packetData = this.current708Packet.data;
6438 var firstByte = packetData[i + 1];
6439 var secondByte = packetData[i + 2];
6440
6441 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
6442 i = this.handleText(++i, service, {
6443 isMultiByte: true
6444 });
6445 }
6446
6447 return i;
6448 };
6449 /**
6450 * Parse and execute the CW# command.
6451 *
6452 * Set the current window.
6453 *
6454 * @param {Integer} i Current index in the 708 packet
6455 * @param {Service} service The service object to be affected
6456 * @return {Integer} New index after parsing
6457 */
6458
6459
6460 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
6461 var packetData = this.current708Packet.data;
6462 var b = packetData[i];
6463 var windowNum = b & 0x07;
6464 service.setCurrentWindow(windowNum);
6465 return i;
6466 };
6467 /**
6468 * Parse and execute the DF# command.
6469 *
6470 * Define a window and set it as the current window.
6471 *
6472 * @param {Integer} i Current index in the 708 packet
6473 * @param {Service} service The service object to be affected
6474 * @return {Integer} New index after parsing
6475 */
6476
6477
6478 Cea708Stream.prototype.defineWindow = function (i, service) {
6479 var packetData = this.current708Packet.data;
6480 var b = packetData[i];
6481 var windowNum = b & 0x07;
6482 service.setCurrentWindow(windowNum);
6483 var win = service.currentWindow;
6484 b = packetData[++i];
6485 win.visible = (b & 0x20) >> 5; // v
6486
6487 win.rowLock = (b & 0x10) >> 4; // rl
6488
6489 win.columnLock = (b & 0x08) >> 3; // cl
6490
6491 win.priority = b & 0x07; // p
6492
6493 b = packetData[++i];
6494 win.relativePositioning = (b & 0x80) >> 7; // rp
6495
6496 win.anchorVertical = b & 0x7f; // av
6497
6498 b = packetData[++i];
6499 win.anchorHorizontal = b; // ah
6500
6501 b = packetData[++i];
6502 win.anchorPoint = (b & 0xf0) >> 4; // ap
6503
6504 win.rowCount = b & 0x0f; // rc
6505
6506 b = packetData[++i];
6507 win.columnCount = b & 0x3f; // cc
6508
6509 b = packetData[++i];
6510 win.windowStyle = (b & 0x38) >> 3; // ws
6511
6512 win.penStyle = b & 0x07; // ps
6513 // The spec says there are (rowCount+1) "virtual rows"
6514
6515 win.virtualRowCount = win.rowCount + 1;
6516 return i;
6517 };
6518 /**
6519 * Parse and execute the SWA command.
6520 *
6521 * Set attributes of the current window.
6522 *
6523 * @param {Integer} i Current index in the 708 packet
6524 * @param {Service} service The service object to be affected
6525 * @return {Integer} New index after parsing
6526 */
6527
6528
6529 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
6530 var packetData = this.current708Packet.data;
6531 var b = packetData[i];
6532 var winAttr = service.currentWindow.winAttr;
6533 b = packetData[++i];
6534 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
6535
6536 winAttr.fillRed = (b & 0x30) >> 4; // fr
6537
6538 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
6539
6540 winAttr.fillBlue = b & 0x03; // fb
6541
6542 b = packetData[++i];
6543 winAttr.borderType = (b & 0xc0) >> 6; // bt
6544
6545 winAttr.borderRed = (b & 0x30) >> 4; // br
6546
6547 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
6548
6549 winAttr.borderBlue = b & 0x03; // bb
6550
6551 b = packetData[++i];
6552 winAttr.borderType += (b & 0x80) >> 5; // bt
6553
6554 winAttr.wordWrap = (b & 0x40) >> 6; // ww
6555
6556 winAttr.printDirection = (b & 0x30) >> 4; // pd
6557
6558 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
6559
6560 winAttr.justify = b & 0x03; // j
6561
6562 b = packetData[++i];
6563 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
6564
6565 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
6566
6567 winAttr.displayEffect = b & 0x03; // de
6568
6569 return i;
6570 };
6571 /**
6572 * Gather text from all displayed windows and push a caption to output.
6573 *
6574 * @param {Integer} i Current index in the 708 packet
6575 * @param {Service} service The service object to be affected
6576 */
6577
6578
6579 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
6580 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
6581 // display text in the correct order, but sample files so far have not shown any issue.
6582
6583 for (var winId = 0; winId < 8; winId++) {
6584 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
6585 displayedText.push(service.windows[winId].getText());
6586 }
6587 }
6588
6589 service.endPts = pts;
6590 service.text = displayedText.join('\n\n');
6591 this.pushCaption(service);
6592 service.startPts = pts;
6593 };
6594 /**
6595 * Push a caption to output if the caption contains text.
6596 *
6597 * @param {Service} service The service object to be affected
6598 */
6599
6600
6601 Cea708Stream.prototype.pushCaption = function (service) {
6602 if (service.text !== '') {
6603 this.trigger('data', {
6604 startPts: service.startPts,
6605 endPts: service.endPts,
6606 text: service.text,
6607 stream: 'cc708_' + service.serviceNum
6608 });
6609 service.text = '';
6610 service.startPts = service.endPts;
6611 }
6612 };
6613 /**
6614 * Parse and execute the DSW command.
6615 *
6616 * Set visible property of windows based on the parsed bitmask.
6617 *
6618 * @param {Integer} i Current index in the 708 packet
6619 * @param {Service} service The service object to be affected
6620 * @return {Integer} New index after parsing
6621 */
6622
6623
6624 Cea708Stream.prototype.displayWindows = function (i, service) {
6625 var packetData = this.current708Packet.data;
6626 var b = packetData[++i];
6627 var pts = this.getPts(i);
6628 this.flushDisplayed(pts, service);
6629
6630 for (var winId = 0; winId < 8; winId++) {
6631 if (b & 0x01 << winId) {
6632 service.windows[winId].visible = 1;
6633 }
6634 }
6635
6636 return i;
6637 };
6638 /**
6639 * Parse and execute the HDW command.
6640 *
6641 * Set visible property of windows based on the parsed bitmask.
6642 *
6643 * @param {Integer} i Current index in the 708 packet
6644 * @param {Service} service The service object to be affected
6645 * @return {Integer} New index after parsing
6646 */
6647
6648
6649 Cea708Stream.prototype.hideWindows = function (i, service) {
6650 var packetData = this.current708Packet.data;
6651 var b = packetData[++i];
6652 var pts = this.getPts(i);
6653 this.flushDisplayed(pts, service);
6654
6655 for (var winId = 0; winId < 8; winId++) {
6656 if (b & 0x01 << winId) {
6657 service.windows[winId].visible = 0;
6658 }
6659 }
6660
6661 return i;
6662 };
6663 /**
6664 * Parse and execute the TGW command.
6665 *
6666 * Set visible property of windows based on the parsed bitmask.
6667 *
6668 * @param {Integer} i Current index in the 708 packet
6669 * @param {Service} service The service object to be affected
6670 * @return {Integer} New index after parsing
6671 */
6672
6673
6674 Cea708Stream.prototype.toggleWindows = function (i, service) {
6675 var packetData = this.current708Packet.data;
6676 var b = packetData[++i];
6677 var pts = this.getPts(i);
6678 this.flushDisplayed(pts, service);
6679
6680 for (var winId = 0; winId < 8; winId++) {
6681 if (b & 0x01 << winId) {
6682 service.windows[winId].visible ^= 1;
6683 }
6684 }
6685
6686 return i;
6687 };
6688 /**
6689 * Parse and execute the CLW command.
6690 *
6691 * Clear text of windows based on the parsed bitmask.
6692 *
6693 * @param {Integer} i Current index in the 708 packet
6694 * @param {Service} service The service object to be affected
6695 * @return {Integer} New index after parsing
6696 */
6697
6698
6699 Cea708Stream.prototype.clearWindows = function (i, service) {
6700 var packetData = this.current708Packet.data;
6701 var b = packetData[++i];
6702 var pts = this.getPts(i);
6703 this.flushDisplayed(pts, service);
6704
6705 for (var winId = 0; winId < 8; winId++) {
6706 if (b & 0x01 << winId) {
6707 service.windows[winId].clearText();
6708 }
6709 }
6710
6711 return i;
6712 };
6713 /**
6714 * Parse and execute the DLW command.
6715 *
6716 * Re-initialize windows based on the parsed bitmask.
6717 *
6718 * @param {Integer} i Current index in the 708 packet
6719 * @param {Service} service The service object to be affected
6720 * @return {Integer} New index after parsing
6721 */
6722
6723
6724 Cea708Stream.prototype.deleteWindows = function (i, service) {
6725 var packetData = this.current708Packet.data;
6726 var b = packetData[++i];
6727 var pts = this.getPts(i);
6728 this.flushDisplayed(pts, service);
6729
6730 for (var winId = 0; winId < 8; winId++) {
6731 if (b & 0x01 << winId) {
6732 service.windows[winId].reset();
6733 }
6734 }
6735
6736 return i;
6737 };
6738 /**
6739 * Parse and execute the SPA command.
6740 *
6741 * Set pen attributes of the current window.
6742 *
6743 * @param {Integer} i Current index in the 708 packet
6744 * @param {Service} service The service object to be affected
6745 * @return {Integer} New index after parsing
6746 */
6747
6748
6749 Cea708Stream.prototype.setPenAttributes = function (i, service) {
6750 var packetData = this.current708Packet.data;
6751 var b = packetData[i];
6752 var penAttr = service.currentWindow.penAttr;
6753 b = packetData[++i];
6754 penAttr.textTag = (b & 0xf0) >> 4; // tt
6755
6756 penAttr.offset = (b & 0x0c) >> 2; // o
6757
6758 penAttr.penSize = b & 0x03; // s
6759
6760 b = packetData[++i];
6761 penAttr.italics = (b & 0x80) >> 7; // i
6762
6763 penAttr.underline = (b & 0x40) >> 6; // u
6764
6765 penAttr.edgeType = (b & 0x38) >> 3; // et
6766
6767 penAttr.fontStyle = b & 0x07; // fs
6768
6769 return i;
6770 };
6771 /**
6772 * Parse and execute the SPC command.
6773 *
6774 * Set pen color of the current window.
6775 *
6776 * @param {Integer} i Current index in the 708 packet
6777 * @param {Service} service The service object to be affected
6778 * @return {Integer} New index after parsing
6779 */
6780
6781
6782 Cea708Stream.prototype.setPenColor = function (i, service) {
6783 var packetData = this.current708Packet.data;
6784 var b = packetData[i];
6785 var penColor = service.currentWindow.penColor;
6786 b = packetData[++i];
6787 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
6788
6789 penColor.fgRed = (b & 0x30) >> 4; // fr
6790
6791 penColor.fgGreen = (b & 0x0c) >> 2; // fg
6792
6793 penColor.fgBlue = b & 0x03; // fb
6794
6795 b = packetData[++i];
6796 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
6797
6798 penColor.bgRed = (b & 0x30) >> 4; // br
6799
6800 penColor.bgGreen = (b & 0x0c) >> 2; // bg
6801
6802 penColor.bgBlue = b & 0x03; // bb
6803
6804 b = packetData[++i];
6805 penColor.edgeRed = (b & 0x30) >> 4; // er
6806
6807 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
6808
6809 penColor.edgeBlue = b & 0x03; // eb
6810
6811 return i;
6812 };
6813 /**
6814 * Parse and execute the SPL command.
6815 *
6816 * Set pen location of the current window.
6817 *
6818 * @param {Integer} i Current index in the 708 packet
6819 * @param {Service} service The service object to be affected
6820 * @return {Integer} New index after parsing
6821 */
6822
6823
6824 Cea708Stream.prototype.setPenLocation = function (i, service) {
6825 var packetData = this.current708Packet.data;
6826 var b = packetData[i];
6827 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
6828
6829 service.currentWindow.pendingNewLine = true;
6830 b = packetData[++i];
6831 penLoc.row = b & 0x0f; // r
6832
6833 b = packetData[++i];
6834 penLoc.column = b & 0x3f; // c
6835
6836 return i;
6837 };
6838 /**
6839 * Execute the RST command.
6840 *
6841 * Reset service to a clean slate. Re-initialize.
6842 *
6843 * @param {Integer} i Current index in the 708 packet
6844 * @param {Service} service The service object to be affected
6845 * @return {Service} Re-initialized service
6846 */
6847
6848
6849 Cea708Stream.prototype.reset = function (i, service) {
6850 var pts = this.getPts(i);
6851 this.flushDisplayed(pts, service);
6852 return this.initService(service.serviceNum, i);
6853 }; // This hash maps non-ASCII, special, and extended character codes to their
6854 // proper Unicode equivalent. The first keys that are only a single byte
6855 // are the non-standard ASCII characters, which simply map the CEA608 byte
6856 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6857 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6858 // can be performed regardless of the field and data channel on which the
6859 // character code was received.
6860
6861
6862 var CHARACTER_TRANSLATION = {
6863 0x2a: 0xe1,
6864 // á
6865 0x5c: 0xe9,
6866 // é
6867 0x5e: 0xed,
6868 // í
6869 0x5f: 0xf3,
6870 // ó
6871 0x60: 0xfa,
6872 // ú
6873 0x7b: 0xe7,
6874 // ç
6875 0x7c: 0xf7,
6876 // ÷
6877 0x7d: 0xd1,
6878 // Ñ
6879 0x7e: 0xf1,
6880 // ñ
6881 0x7f: 0x2588,
6882 // █
6883 0x0130: 0xae,
6884 // ®
6885 0x0131: 0xb0,
6886 // °
6887 0x0132: 0xbd,
6888 // ½
6889 0x0133: 0xbf,
6890 // ¿
6891 0x0134: 0x2122,
6892 // ™
6893 0x0135: 0xa2,
6894 // ¢
6895 0x0136: 0xa3,
6896 // £
6897 0x0137: 0x266a,
6898 // ♪
6899 0x0138: 0xe0,
6900 // à
6901 0x0139: 0xa0,
6902 //
6903 0x013a: 0xe8,
6904 // è
6905 0x013b: 0xe2,
6906 // â
6907 0x013c: 0xea,
6908 // ê
6909 0x013d: 0xee,
6910 // î
6911 0x013e: 0xf4,
6912 // ô
6913 0x013f: 0xfb,
6914 // û
6915 0x0220: 0xc1,
6916 // Á
6917 0x0221: 0xc9,
6918 // É
6919 0x0222: 0xd3,
6920 // Ó
6921 0x0223: 0xda,
6922 // Ú
6923 0x0224: 0xdc,
6924 // Ü
6925 0x0225: 0xfc,
6926 // ü
6927 0x0226: 0x2018,
6928 // ‘
6929 0x0227: 0xa1,
6930 // ¡
6931 0x0228: 0x2a,
6932 // *
6933 0x0229: 0x27,
6934 // '
6935 0x022a: 0x2014,
6936 // —
6937 0x022b: 0xa9,
6938 // ©
6939 0x022c: 0x2120,
6940 // ℠
6941 0x022d: 0x2022,
6942 // •
6943 0x022e: 0x201c,
6944 // “
6945 0x022f: 0x201d,
6946 // ”
6947 0x0230: 0xc0,
6948 // À
6949 0x0231: 0xc2,
6950 // Â
6951 0x0232: 0xc7,
6952 // Ç
6953 0x0233: 0xc8,
6954 // È
6955 0x0234: 0xca,
6956 // Ê
6957 0x0235: 0xcb,
6958 // Ë
6959 0x0236: 0xeb,
6960 // ë
6961 0x0237: 0xce,
6962 // Î
6963 0x0238: 0xcf,
6964 // Ï
6965 0x0239: 0xef,
6966 // ï
6967 0x023a: 0xd4,
6968 // Ô
6969 0x023b: 0xd9,
6970 // Ù
6971 0x023c: 0xf9,
6972 // ù
6973 0x023d: 0xdb,
6974 // Û
6975 0x023e: 0xab,
6976 // «
6977 0x023f: 0xbb,
6978 // »
6979 0x0320: 0xc3,
6980 // Ã
6981 0x0321: 0xe3,
6982 // ã
6983 0x0322: 0xcd,
6984 // Í
6985 0x0323: 0xcc,
6986 // Ì
6987 0x0324: 0xec,
6988 // ì
6989 0x0325: 0xd2,
6990 // Ò
6991 0x0326: 0xf2,
6992 // ò
6993 0x0327: 0xd5,
6994 // Õ
6995 0x0328: 0xf5,
6996 // õ
6997 0x0329: 0x7b,
6998 // {
6999 0x032a: 0x7d,
7000 // }
7001 0x032b: 0x5c,
7002 // \
7003 0x032c: 0x5e,
7004 // ^
7005 0x032d: 0x5f,
7006 // _
7007 0x032e: 0x7c,
7008 // |
7009 0x032f: 0x7e,
7010 // ~
7011 0x0330: 0xc4,
7012 // Ä
7013 0x0331: 0xe4,
7014 // ä
7015 0x0332: 0xd6,
7016 // Ö
7017 0x0333: 0xf6,
7018 // ö
7019 0x0334: 0xdf,
7020 // ß
7021 0x0335: 0xa5,
7022 // ¥
7023 0x0336: 0xa4,
7024 // ¤
7025 0x0337: 0x2502,
7026 // │
7027 0x0338: 0xc5,
7028 // Å
7029 0x0339: 0xe5,
7030 // å
7031 0x033a: 0xd8,
7032 // Ø
7033 0x033b: 0xf8,
7034 // ø
7035 0x033c: 0x250c,
7036 // ┌
7037 0x033d: 0x2510,
7038 // ┐
7039 0x033e: 0x2514,
7040 // └
7041 0x033f: 0x2518 // ┘
7042
7043 };
7044
7045 var getCharFromCode = function getCharFromCode(code) {
7046 if (code === null) {
7047 return '';
7048 }
7049
7050 code = CHARACTER_TRANSLATION[code] || code;
7051 return String.fromCharCode(code);
7052 }; // the index of the last row in a CEA-608 display buffer
7053
7054
7055 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
7056 // getting it through bit logic.
7057
7058 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
7059 // cells. The "bottom" row is the last element in the outer array.
7060
7061 var createDisplayBuffer = function createDisplayBuffer() {
7062 var result = [],
7063 i = BOTTOM_ROW + 1;
7064
7065 while (i--) {
7066 result.push('');
7067 }
7068
7069 return result;
7070 };
7071
7072 var Cea608Stream = function Cea608Stream(field, dataChannel) {
7073 Cea608Stream.prototype.init.call(this);
7074 this.field_ = field || 0;
7075 this.dataChannel_ = dataChannel || 0;
7076 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
7077 this.setConstants();
7078 this.reset();
7079
7080 this.push = function (packet) {
7081 var data, swap, char0, char1, text; // remove the parity bits
7082
7083 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
7084
7085 if (data === this.lastControlCode_) {
7086 this.lastControlCode_ = null;
7087 return;
7088 } // Store control codes
7089
7090
7091 if ((data & 0xf000) === 0x1000) {
7092 this.lastControlCode_ = data;
7093 } else if (data !== this.PADDING_) {
7094 this.lastControlCode_ = null;
7095 }
7096
7097 char0 = data >>> 8;
7098 char1 = data & 0xff;
7099
7100 if (data === this.PADDING_) {
7101 return;
7102 } else if (data === this.RESUME_CAPTION_LOADING_) {
7103 this.mode_ = 'popOn';
7104 } else if (data === this.END_OF_CAPTION_) {
7105 // If an EOC is received while in paint-on mode, the displayed caption
7106 // text should be swapped to non-displayed memory as if it was a pop-on
7107 // caption. Because of that, we should explicitly switch back to pop-on
7108 // mode
7109 this.mode_ = 'popOn';
7110 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
7111
7112 this.flushDisplayed(packet.pts); // flip memory
7113
7114 swap = this.displayed_;
7115 this.displayed_ = this.nonDisplayed_;
7116 this.nonDisplayed_ = swap; // start measuring the time to display the caption
7117
7118 this.startPts_ = packet.pts;
7119 } else if (data === this.ROLL_UP_2_ROWS_) {
7120 this.rollUpRows_ = 2;
7121 this.setRollUp(packet.pts);
7122 } else if (data === this.ROLL_UP_3_ROWS_) {
7123 this.rollUpRows_ = 3;
7124 this.setRollUp(packet.pts);
7125 } else if (data === this.ROLL_UP_4_ROWS_) {
7126 this.rollUpRows_ = 4;
7127 this.setRollUp(packet.pts);
7128 } else if (data === this.CARRIAGE_RETURN_) {
7129 this.clearFormatting(packet.pts);
7130 this.flushDisplayed(packet.pts);
7131 this.shiftRowsUp_();
7132 this.startPts_ = packet.pts;
7133 } else if (data === this.BACKSPACE_) {
7134 if (this.mode_ === 'popOn') {
7135 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7136 } else {
7137 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7138 }
7139 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
7140 this.flushDisplayed(packet.pts);
7141 this.displayed_ = createDisplayBuffer();
7142 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
7143 this.nonDisplayed_ = createDisplayBuffer();
7144 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
7145 if (this.mode_ !== 'paintOn') {
7146 // NOTE: This should be removed when proper caption positioning is
7147 // implemented
7148 this.flushDisplayed(packet.pts);
7149 this.displayed_ = createDisplayBuffer();
7150 }
7151
7152 this.mode_ = 'paintOn';
7153 this.startPts_ = packet.pts; // Append special characters to caption text
7154 } else if (this.isSpecialCharacter(char0, char1)) {
7155 // Bitmask char0 so that we can apply character transformations
7156 // regardless of field and data channel.
7157 // Then byte-shift to the left and OR with char1 so we can pass the
7158 // entire character code to `getCharFromCode`.
7159 char0 = (char0 & 0x03) << 8;
7160 text = getCharFromCode(char0 | char1);
7161 this[this.mode_](packet.pts, text);
7162 this.column_++; // Append extended characters to caption text
7163 } else if (this.isExtCharacter(char0, char1)) {
7164 // Extended characters always follow their "non-extended" equivalents.
7165 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
7166 // decoders are supposed to drop the "è", while compliant decoders
7167 // backspace the "e" and insert "è".
7168 // Delete the previous character
7169 if (this.mode_ === 'popOn') {
7170 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7171 } else {
7172 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7173 } // Bitmask char0 so that we can apply character transformations
7174 // regardless of field and data channel.
7175 // Then byte-shift to the left and OR with char1 so we can pass the
7176 // entire character code to `getCharFromCode`.
7177
7178
7179 char0 = (char0 & 0x03) << 8;
7180 text = getCharFromCode(char0 | char1);
7181 this[this.mode_](packet.pts, text);
7182 this.column_++; // Process mid-row codes
7183 } else if (this.isMidRowCode(char0, char1)) {
7184 // Attributes are not additive, so clear all formatting
7185 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
7186 // should be replaced with spaces, so add one now
7187
7188 this[this.mode_](packet.pts, ' ');
7189 this.column_++;
7190
7191 if ((char1 & 0xe) === 0xe) {
7192 this.addFormatting(packet.pts, ['i']);
7193 }
7194
7195 if ((char1 & 0x1) === 0x1) {
7196 this.addFormatting(packet.pts, ['u']);
7197 } // Detect offset control codes and adjust cursor
7198
7199 } else if (this.isOffsetControlCode(char0, char1)) {
7200 // Cursor position is set by indent PAC (see below) in 4-column
7201 // increments, with an additional offset code of 1-3 to reach any
7202 // of the 32 columns specified by CEA-608. So all we need to do
7203 // here is increment the column cursor by the given offset.
7204 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
7205 } else if (this.isPAC(char0, char1)) {
7206 // There's no logic for PAC -> row mapping, so we have to just
7207 // find the row code in an array and use its index :(
7208 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
7209
7210 if (this.mode_ === 'rollUp') {
7211 // This implies that the base row is incorrectly set.
7212 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
7213 // of roll-up rows set.
7214 if (row - this.rollUpRows_ + 1 < 0) {
7215 row = this.rollUpRows_ - 1;
7216 }
7217
7218 this.setRollUp(packet.pts, row);
7219 }
7220
7221 if (row !== this.row_) {
7222 // formatting is only persistent for current row
7223 this.clearFormatting(packet.pts);
7224 this.row_ = row;
7225 } // All PACs can apply underline, so detect and apply
7226 // (All odd-numbered second bytes set underline)
7227
7228
7229 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
7230 this.addFormatting(packet.pts, ['u']);
7231 }
7232
7233 if ((data & 0x10) === 0x10) {
7234 // We've got an indent level code. Each successive even number
7235 // increments the column cursor by 4, so we can get the desired
7236 // column position by bit-shifting to the right (to get n/2)
7237 // and multiplying by 4.
7238 this.column_ = ((data & 0xe) >> 1) * 4;
7239 }
7240
7241 if (this.isColorPAC(char1)) {
7242 // it's a color code, though we only support white, which
7243 // can be either normal or italicized. white italics can be
7244 // either 0x4e or 0x6e depending on the row, so we just
7245 // bitwise-and with 0xe to see if italics should be turned on
7246 if ((char1 & 0xe) === 0xe) {
7247 this.addFormatting(packet.pts, ['i']);
7248 }
7249 } // We have a normal character in char0, and possibly one in char1
7250
7251 } else if (this.isNormalChar(char0)) {
7252 if (char1 === 0x00) {
7253 char1 = null;
7254 }
7255
7256 text = getCharFromCode(char0);
7257 text += getCharFromCode(char1);
7258 this[this.mode_](packet.pts, text);
7259 this.column_ += text.length;
7260 } // finish data processing
7261
7262 };
7263 };
7264
7265 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
7266 // display buffer
7267
7268 Cea608Stream.prototype.flushDisplayed = function (pts) {
7269 var content = this.displayed_ // remove spaces from the start and end of the string
7270 .map(function (row, index) {
7271 try {
7272 return row.trim();
7273 } catch (e) {
7274 // Ordinarily, this shouldn't happen. However, caption
7275 // parsing errors should not throw exceptions and
7276 // break playback.
7277 this.trigger('log', {
7278 level: 'warn',
7279 message: 'Skipping a malformed 608 caption at index ' + index + '.'
7280 });
7281 return '';
7282 }
7283 }, this) // combine all text rows to display in one cue
7284 .join('\n') // and remove blank rows from the start and end, but not the middle
7285 .replace(/^\n+|\n+$/g, '');
7286
7287 if (content.length) {
7288 this.trigger('data', {
7289 startPts: this.startPts_,
7290 endPts: pts,
7291 text: content,
7292 stream: this.name_
7293 });
7294 }
7295 };
7296 /**
7297 * Zero out the data, used for startup and on seek
7298 */
7299
7300
7301 Cea608Stream.prototype.reset = function () {
7302 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
7303 // actually display captions. If a caption is shifted to a row
7304 // with a lower index than this, it is cleared from the display
7305 // buffer
7306
7307 this.topRow_ = 0;
7308 this.startPts_ = 0;
7309 this.displayed_ = createDisplayBuffer();
7310 this.nonDisplayed_ = createDisplayBuffer();
7311 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
7312
7313 this.column_ = 0;
7314 this.row_ = BOTTOM_ROW;
7315 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
7316
7317 this.formatting_ = [];
7318 };
7319 /**
7320 * Sets up control code and related constants for this instance
7321 */
7322
7323
7324 Cea608Stream.prototype.setConstants = function () {
7325 // The following attributes have these uses:
7326 // ext_ : char0 for mid-row codes, and the base for extended
7327 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
7328 // extended codes)
7329 // control_: char0 for control codes, except byte-shifted to the
7330 // left so that we can do this.control_ | CONTROL_CODE
7331 // offset_: char0 for tab offset codes
7332 //
7333 // It's also worth noting that control codes, and _only_ control codes,
7334 // differ between field 1 and field2. Field 2 control codes are always
7335 // their field 1 value plus 1. That's why there's the "| field" on the
7336 // control value.
7337 if (this.dataChannel_ === 0) {
7338 this.BASE_ = 0x10;
7339 this.EXT_ = 0x11;
7340 this.CONTROL_ = (0x14 | this.field_) << 8;
7341 this.OFFSET_ = 0x17;
7342 } else if (this.dataChannel_ === 1) {
7343 this.BASE_ = 0x18;
7344 this.EXT_ = 0x19;
7345 this.CONTROL_ = (0x1c | this.field_) << 8;
7346 this.OFFSET_ = 0x1f;
7347 } // Constants for the LSByte command codes recognized by Cea608Stream. This
7348 // list is not exhaustive. For a more comprehensive listing and semantics see
7349 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
7350 // Padding
7351
7352
7353 this.PADDING_ = 0x0000; // Pop-on Mode
7354
7355 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
7356 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
7357
7358 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
7359 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
7360 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
7361 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
7362
7363 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
7364
7365 this.BACKSPACE_ = this.CONTROL_ | 0x21;
7366 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
7367 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
7368 };
7369 /**
7370 * Detects if the 2-byte packet data is a special character
7371 *
7372 * Special characters have a second byte in the range 0x30 to 0x3f,
7373 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
7374 * data channel 2).
7375 *
7376 * @param {Integer} char0 The first byte
7377 * @param {Integer} char1 The second byte
7378 * @return {Boolean} Whether the 2 bytes are an special character
7379 */
7380
7381
7382 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
7383 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
7384 };
7385 /**
7386 * Detects if the 2-byte packet data is an extended character
7387 *
7388 * Extended characters have a second byte in the range 0x20 to 0x3f,
7389 * with the first byte being 0x12 or 0x13 (for data channel 1) or
7390 * 0x1a or 0x1b (for data channel 2).
7391 *
7392 * @param {Integer} char0 The first byte
7393 * @param {Integer} char1 The second byte
7394 * @return {Boolean} Whether the 2 bytes are an extended character
7395 */
7396
7397
7398 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
7399 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
7400 };
7401 /**
7402 * Detects if the 2-byte packet is a mid-row code
7403 *
7404 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
7405 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
7406 * channel 2).
7407 *
7408 * @param {Integer} char0 The first byte
7409 * @param {Integer} char1 The second byte
7410 * @return {Boolean} Whether the 2 bytes are a mid-row code
7411 */
7412
7413
7414 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
7415 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
7416 };
7417 /**
7418 * Detects if the 2-byte packet is an offset control code
7419 *
7420 * Offset control codes have a second byte in the range 0x21 to 0x23,
7421 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
7422 * data channel 2).
7423 *
7424 * @param {Integer} char0 The first byte
7425 * @param {Integer} char1 The second byte
7426 * @return {Boolean} Whether the 2 bytes are an offset control code
7427 */
7428
7429
7430 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
7431 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
7432 };
7433 /**
7434 * Detects if the 2-byte packet is a Preamble Address Code
7435 *
7436 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
7437 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
7438 * range 0x40 to 0x7f.
7439 *
7440 * @param {Integer} char0 The first byte
7441 * @param {Integer} char1 The second byte
7442 * @return {Boolean} Whether the 2 bytes are a PAC
7443 */
7444
7445
7446 Cea608Stream.prototype.isPAC = function (char0, char1) {
7447 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
7448 };
7449 /**
7450 * Detects if a packet's second byte is in the range of a PAC color code
7451 *
7452 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
7453 * 0x60 to 0x6f.
7454 *
7455 * @param {Integer} char1 The second byte
7456 * @return {Boolean} Whether the byte is a color PAC
7457 */
7458
7459
7460 Cea608Stream.prototype.isColorPAC = function (char1) {
7461 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
7462 };
7463 /**
7464 * Detects if a single byte is in the range of a normal character
7465 *
7466 * Normal text bytes are in the range 0x20 to 0x7f.
7467 *
7468 * @param {Integer} char The byte
7469 * @return {Boolean} Whether the byte is a normal character
7470 */
7471
7472
7473 Cea608Stream.prototype.isNormalChar = function (char) {
7474 return char >= 0x20 && char <= 0x7f;
7475 };
7476 /**
7477 * Configures roll-up
7478 *
7479 * @param {Integer} pts Current PTS
7480 * @param {Integer} newBaseRow Used by PACs to slide the current window to
7481 * a new position
7482 */
7483
7484
7485 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
7486 // Reset the base row to the bottom row when switching modes
7487 if (this.mode_ !== 'rollUp') {
7488 this.row_ = BOTTOM_ROW;
7489 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
7490
7491 this.flushDisplayed(pts);
7492 this.nonDisplayed_ = createDisplayBuffer();
7493 this.displayed_ = createDisplayBuffer();
7494 }
7495
7496 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
7497 // move currently displayed captions (up or down) to the new base row
7498 for (var i = 0; i < this.rollUpRows_; i++) {
7499 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
7500 this.displayed_[this.row_ - i] = '';
7501 }
7502 }
7503
7504 if (newBaseRow === undefined) {
7505 newBaseRow = this.row_;
7506 }
7507
7508 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
7509 }; // Adds the opening HTML tag for the passed character to the caption text,
7510 // and keeps track of it for later closing
7511
7512
7513 Cea608Stream.prototype.addFormatting = function (pts, format) {
7514 this.formatting_ = this.formatting_.concat(format);
7515 var text = format.reduce(function (text, format) {
7516 return text + '<' + format + '>';
7517 }, '');
7518 this[this.mode_](pts, text);
7519 }; // Adds HTML closing tags for current formatting to caption text and
7520 // clears remembered formatting
7521
7522
7523 Cea608Stream.prototype.clearFormatting = function (pts) {
7524 if (!this.formatting_.length) {
7525 return;
7526 }
7527
7528 var text = this.formatting_.reverse().reduce(function (text, format) {
7529 return text + '</' + format + '>';
7530 }, '');
7531 this.formatting_ = [];
7532 this[this.mode_](pts, text);
7533 }; // Mode Implementations
7534
7535
7536 Cea608Stream.prototype.popOn = function (pts, text) {
7537 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
7538
7539 baseRow += text;
7540 this.nonDisplayed_[this.row_] = baseRow;
7541 };
7542
7543 Cea608Stream.prototype.rollUp = function (pts, text) {
7544 var baseRow = this.displayed_[this.row_];
7545 baseRow += text;
7546 this.displayed_[this.row_] = baseRow;
7547 };
7548
7549 Cea608Stream.prototype.shiftRowsUp_ = function () {
7550 var i; // clear out inactive rows
7551
7552 for (i = 0; i < this.topRow_; i++) {
7553 this.displayed_[i] = '';
7554 }
7555
7556 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7557 this.displayed_[i] = '';
7558 } // shift displayed rows up
7559
7560
7561 for (i = this.topRow_; i < this.row_; i++) {
7562 this.displayed_[i] = this.displayed_[i + 1];
7563 } // clear out the bottom row
7564
7565
7566 this.displayed_[this.row_] = '';
7567 };
7568
7569 Cea608Stream.prototype.paintOn = function (pts, text) {
7570 var baseRow = this.displayed_[this.row_];
7571 baseRow += text;
7572 this.displayed_[this.row_] = baseRow;
7573 }; // exports
7574
7575
7576 var captionStream = {
7577 CaptionStream: CaptionStream$1,
7578 Cea608Stream: Cea608Stream,
7579 Cea708Stream: Cea708Stream
7580 };
7581 /**
7582 * mux.js
7583 *
7584 * Copyright (c) Brightcove
7585 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7586 */
7587
7588 var streamTypes = {
7589 H264_STREAM_TYPE: 0x1B,
7590 ADTS_STREAM_TYPE: 0x0F,
7591 METADATA_STREAM_TYPE: 0x15
7592 };
7593 var MAX_TS = 8589934592;
7594 var RO_THRESH = 4294967296;
7595 var TYPE_SHARED = 'shared';
7596
7597 var handleRollover$1 = function handleRollover(value, reference) {
7598 var direction = 1;
7599
7600 if (value > reference) {
7601 // If the current timestamp value is greater than our reference timestamp and we detect a
7602 // timestamp rollover, this means the roll over is happening in the opposite direction.
7603 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7604 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7605 // rollover point. In loading this segment, the timestamp values will be very large,
7606 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7607 // the time stamp to be `value - 2^33`.
7608 direction = -1;
7609 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7610 // cause an incorrect adjustment.
7611
7612
7613 while (Math.abs(reference - value) > RO_THRESH) {
7614 value += direction * MAX_TS;
7615 }
7616
7617 return value;
7618 };
7619
7620 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
7621 var lastDTS, referenceDTS;
7622 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
7623 // video and audio. We could use `undefined` here, but having a string
7624 // makes debugging a little clearer.
7625
7626 this.type_ = type || TYPE_SHARED;
7627
7628 this.push = function (data) {
7629 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7630 // streams will only accept data that matches their type.
7631 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7632 return;
7633 }
7634
7635 if (referenceDTS === undefined) {
7636 referenceDTS = data.dts;
7637 }
7638
7639 data.dts = handleRollover$1(data.dts, referenceDTS);
7640 data.pts = handleRollover$1(data.pts, referenceDTS);
7641 lastDTS = data.dts;
7642 this.trigger('data', data);
7643 };
7644
7645 this.flush = function () {
7646 referenceDTS = lastDTS;
7647 this.trigger('done');
7648 };
7649
7650 this.endTimeline = function () {
7651 this.flush();
7652 this.trigger('endedtimeline');
7653 };
7654
7655 this.discontinuity = function () {
7656 referenceDTS = void 0;
7657 lastDTS = void 0;
7658 };
7659
7660 this.reset = function () {
7661 this.discontinuity();
7662 this.trigger('reset');
7663 };
7664 };
7665
7666 TimestampRolloverStream$1.prototype = new stream();
7667 var timestampRolloverStream = {
7668 TimestampRolloverStream: TimestampRolloverStream$1,
7669 handleRollover: handleRollover$1
7670 };
7671
7672 var percentEncode$1 = function percentEncode(bytes, start, end) {
7673 var i,
7674 result = '';
7675
7676 for (i = start; i < end; i++) {
7677 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7678 }
7679
7680 return result;
7681 },
7682 // return the string representation of the specified byte range,
7683 // interpreted as UTf-8.
7684 parseUtf8 = function parseUtf8(bytes, start, end) {
7685 return decodeURIComponent(percentEncode$1(bytes, start, end));
7686 },
7687 // return the string representation of the specified byte range,
7688 // interpreted as ISO-8859-1.
7689 parseIso88591$1 = function parseIso88591(bytes, start, end) {
7690 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
7691 },
7692 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
7693 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7694 },
7695 tagParsers = {
7696 TXXX: function TXXX(tag) {
7697 var i;
7698
7699 if (tag.data[0] !== 3) {
7700 // ignore frames with unrecognized character encodings
7701 return;
7702 }
7703
7704 for (i = 1; i < tag.data.length; i++) {
7705 if (tag.data[i] === 0) {
7706 // parse the text fields
7707 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
7708
7709 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7710 break;
7711 }
7712 }
7713
7714 tag.data = tag.value;
7715 },
7716 WXXX: function WXXX(tag) {
7717 var i;
7718
7719 if (tag.data[0] !== 3) {
7720 // ignore frames with unrecognized character encodings
7721 return;
7722 }
7723
7724 for (i = 1; i < tag.data.length; i++) {
7725 if (tag.data[i] === 0) {
7726 // parse the description and URL fields
7727 tag.description = parseUtf8(tag.data, 1, i);
7728 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7729 break;
7730 }
7731 }
7732 },
7733 PRIV: function PRIV(tag) {
7734 var i;
7735
7736 for (i = 0; i < tag.data.length; i++) {
7737 if (tag.data[i] === 0) {
7738 // parse the description and URL fields
7739 tag.owner = parseIso88591$1(tag.data, 0, i);
7740 break;
7741 }
7742 }
7743
7744 tag.privateData = tag.data.subarray(i + 1);
7745 tag.data = tag.privateData;
7746 }
7747 },
7748 _MetadataStream;
7749
7750 _MetadataStream = function MetadataStream(options) {
7751 var settings = {
7752 // the bytes of the program-level descriptor field in MP2T
7753 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7754 // program element descriptors"
7755 descriptor: options && options.descriptor
7756 },
7757 // the total size in bytes of the ID3 tag being parsed
7758 tagSize = 0,
7759 // tag data that is not complete enough to be parsed
7760 buffer = [],
7761 // the total number of bytes currently in the buffer
7762 bufferSize = 0,
7763 i;
7764
7765 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
7766 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7767
7768
7769 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7770
7771 if (settings.descriptor) {
7772 for (i = 0; i < settings.descriptor.length; i++) {
7773 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7774 }
7775 }
7776
7777 this.push = function (chunk) {
7778 var tag, frameStart, frameSize, frame, i, frameHeader;
7779
7780 if (chunk.type !== 'timed-metadata') {
7781 return;
7782 } // if data_alignment_indicator is set in the PES header,
7783 // we must have the start of a new ID3 tag. Assume anything
7784 // remaining in the buffer was malformed and throw it out
7785
7786
7787 if (chunk.dataAlignmentIndicator) {
7788 bufferSize = 0;
7789 buffer.length = 0;
7790 } // ignore events that don't look like ID3 data
7791
7792
7793 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7794 this.trigger('log', {
7795 level: 'warn',
7796 message: 'Skipping unrecognized metadata packet'
7797 });
7798 return;
7799 } // add this chunk to the data we've collected so far
7800
7801
7802 buffer.push(chunk);
7803 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
7804
7805 if (buffer.length === 1) {
7806 // the frame size is transmitted as a 28-bit integer in the
7807 // last four bytes of the ID3 header.
7808 // The most significant bit of each byte is dropped and the
7809 // results concatenated to recover the actual value.
7810 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
7811 // convenient for our comparisons to include it
7812
7813 tagSize += 10;
7814 } // if the entire frame has not arrived, wait for more data
7815
7816
7817 if (bufferSize < tagSize) {
7818 return;
7819 } // collect the entire frame so it can be parsed
7820
7821
7822 tag = {
7823 data: new Uint8Array(tagSize),
7824 frames: [],
7825 pts: buffer[0].pts,
7826 dts: buffer[0].dts
7827 };
7828
7829 for (i = 0; i < tagSize;) {
7830 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7831 i += buffer[0].data.byteLength;
7832 bufferSize -= buffer[0].data.byteLength;
7833 buffer.shift();
7834 } // find the start of the first frame and the end of the tag
7835
7836
7837 frameStart = 10;
7838
7839 if (tag.data[5] & 0x40) {
7840 // advance the frame start past the extended header
7841 frameStart += 4; // header size field
7842
7843 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
7844
7845 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
7846 } // parse one or more ID3 frames
7847 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7848
7849
7850 do {
7851 // determine the number of bytes in this frame
7852 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
7853
7854 if (frameSize < 1) {
7855 this.trigger('log', {
7856 level: 'warn',
7857 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
7858 });
7859 return;
7860 }
7861
7862 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7863 frame = {
7864 id: frameHeader,
7865 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7866 };
7867 frame.key = frame.id;
7868
7869 if (tagParsers[frame.id]) {
7870 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7871 // time for raw AAC data
7872
7873 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7874 var d = frame.data,
7875 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7876 size *= 4;
7877 size += d[7] & 0x03;
7878 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7879 // on the value of this frame
7880 // we couldn't have known the appropriate pts and dts before
7881 // parsing this ID3 tag so set those values now
7882
7883 if (tag.pts === undefined && tag.dts === undefined) {
7884 tag.pts = frame.timeStamp;
7885 tag.dts = frame.timeStamp;
7886 }
7887
7888 this.trigger('timestamp', frame);
7889 }
7890 }
7891
7892 tag.frames.push(frame);
7893 frameStart += 10; // advance past the frame header
7894
7895 frameStart += frameSize; // advance past the frame body
7896 } while (frameStart < tagSize);
7897
7898 this.trigger('data', tag);
7899 };
7900 };
7901
7902 _MetadataStream.prototype = new stream();
7903 var metadataStream = _MetadataStream;
7904 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
7905
7906 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7907
7908
7909 var MP2T_PACKET_LENGTH$1 = 188,
7910 // bytes
7911 SYNC_BYTE$1 = 0x47;
7912 /**
7913 * Splits an incoming stream of binary data into MPEG-2 Transport
7914 * Stream packets.
7915 */
7916
7917 _TransportPacketStream = function TransportPacketStream() {
7918 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
7919 bytesInBuffer = 0;
7920
7921 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7922
7923 /**
7924 * Split a stream of data into M2TS packets
7925 **/
7926
7927
7928 this.push = function (bytes) {
7929 var startIndex = 0,
7930 endIndex = MP2T_PACKET_LENGTH$1,
7931 everything; // If there are bytes remaining from the last segment, prepend them to the
7932 // bytes that were pushed in
7933
7934 if (bytesInBuffer) {
7935 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7936 everything.set(buffer.subarray(0, bytesInBuffer));
7937 everything.set(bytes, bytesInBuffer);
7938 bytesInBuffer = 0;
7939 } else {
7940 everything = bytes;
7941 } // While we have enough data for a packet
7942
7943
7944 while (endIndex < everything.byteLength) {
7945 // Look for a pair of start and end sync bytes in the data..
7946 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
7947 // We found a packet so emit it and jump one whole packet forward in
7948 // the stream
7949 this.trigger('data', everything.subarray(startIndex, endIndex));
7950 startIndex += MP2T_PACKET_LENGTH$1;
7951 endIndex += MP2T_PACKET_LENGTH$1;
7952 continue;
7953 } // If we get here, we have somehow become de-synchronized and we need to step
7954 // forward one byte at a time until we find a pair of sync bytes that denote
7955 // a packet
7956
7957
7958 startIndex++;
7959 endIndex++;
7960 } // If there was some data left over at the end of the segment that couldn't
7961 // possibly be a whole packet, keep it because it might be the start of a packet
7962 // that continues in the next segment
7963
7964
7965 if (startIndex < everything.byteLength) {
7966 buffer.set(everything.subarray(startIndex), 0);
7967 bytesInBuffer = everything.byteLength - startIndex;
7968 }
7969 };
7970 /**
7971 * Passes identified M2TS packets to the TransportParseStream to be parsed
7972 **/
7973
7974
7975 this.flush = function () {
7976 // If the buffer contains a whole packet when we are being flushed, emit it
7977 // and empty the buffer. Otherwise hold onto the data because it may be
7978 // important for decoding the next segment
7979 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
7980 this.trigger('data', buffer);
7981 bytesInBuffer = 0;
7982 }
7983
7984 this.trigger('done');
7985 };
7986
7987 this.endTimeline = function () {
7988 this.flush();
7989 this.trigger('endedtimeline');
7990 };
7991
7992 this.reset = function () {
7993 bytesInBuffer = 0;
7994 this.trigger('reset');
7995 };
7996 };
7997
7998 _TransportPacketStream.prototype = new stream();
7999 /**
8000 * Accepts an MP2T TransportPacketStream and emits data events with parsed
8001 * forms of the individual transport stream packets.
8002 */
8003
8004 _TransportParseStream = function TransportParseStream() {
8005 var parsePsi, parsePat, parsePmt, self;
8006
8007 _TransportParseStream.prototype.init.call(this);
8008
8009 self = this;
8010 this.packetsWaitingForPmt = [];
8011 this.programMapTable = undefined;
8012
8013 parsePsi = function parsePsi(payload, psi) {
8014 var offset = 0; // PSI packets may be split into multiple sections and those
8015 // sections may be split into multiple packets. If a PSI
8016 // section starts in this packet, the payload_unit_start_indicator
8017 // will be true and the first byte of the payload will indicate
8018 // the offset from the current position to the start of the
8019 // section.
8020
8021 if (psi.payloadUnitStartIndicator) {
8022 offset += payload[offset] + 1;
8023 }
8024
8025 if (psi.type === 'pat') {
8026 parsePat(payload.subarray(offset), psi);
8027 } else {
8028 parsePmt(payload.subarray(offset), psi);
8029 }
8030 };
8031
8032 parsePat = function parsePat(payload, pat) {
8033 pat.section_number = payload[7]; // eslint-disable-line camelcase
8034
8035 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
8036 // skip the PSI header and parse the first PMT entry
8037
8038 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
8039 pat.pmtPid = self.pmtPid;
8040 };
8041 /**
8042 * Parse out the relevant fields of a Program Map Table (PMT).
8043 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
8044 * packet. The first byte in this array should be the table_id
8045 * field.
8046 * @param pmt {object} the object that should be decorated with
8047 * fields parsed from the PMT.
8048 */
8049
8050
8051 parsePmt = function parsePmt(payload, pmt) {
8052 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
8053 // take effect. We don't believe this should ever be the case
8054 // for HLS but we'll ignore "forward" PMT declarations if we see
8055 // them. Future PMT declarations have the current_next_indicator
8056 // set to zero.
8057
8058 if (!(payload[5] & 0x01)) {
8059 return;
8060 } // overwrite any existing program map table
8061
8062
8063 self.programMapTable = {
8064 video: null,
8065 audio: null,
8066 'timed-metadata': {}
8067 }; // the mapping table ends at the end of the current section
8068
8069 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
8070 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
8071 // long the program info descriptors are
8072
8073 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
8074
8075 offset = 12 + programInfoLength;
8076
8077 while (offset < tableEnd) {
8078 var streamType = payload[offset];
8079 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
8080 // TODO: should this be done for metadata too? for now maintain behavior of
8081 // multiple metadata streams
8082
8083 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
8084 self.programMapTable.video = pid;
8085 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
8086 self.programMapTable.audio = pid;
8087 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
8088 // map pid to stream type for metadata streams
8089 self.programMapTable['timed-metadata'][pid] = streamType;
8090 } // move to the next table entry
8091 // skip past the elementary stream descriptors, if present
8092
8093
8094 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
8095 } // record the map on the packet as well
8096
8097
8098 pmt.programMapTable = self.programMapTable;
8099 };
8100 /**
8101 * Deliver a new MP2T packet to the next stream in the pipeline.
8102 */
8103
8104
8105 this.push = function (packet) {
8106 var result = {},
8107 offset = 4;
8108 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
8109
8110 result.pid = packet[1] & 0x1f;
8111 result.pid <<= 8;
8112 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
8113 // fifth byte of the TS packet header. The adaptation field is
8114 // used to add stuffing to PES packets that don't fill a complete
8115 // TS packet, and to specify some forms of timing and control data
8116 // that we do not currently use.
8117
8118 if ((packet[3] & 0x30) >>> 4 > 0x01) {
8119 offset += packet[offset] + 1;
8120 } // parse the rest of the packet based on the type
8121
8122
8123 if (result.pid === 0) {
8124 result.type = 'pat';
8125 parsePsi(packet.subarray(offset), result);
8126 this.trigger('data', result);
8127 } else if (result.pid === this.pmtPid) {
8128 result.type = 'pmt';
8129 parsePsi(packet.subarray(offset), result);
8130 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
8131
8132 while (this.packetsWaitingForPmt.length) {
8133 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
8134 }
8135 } else if (this.programMapTable === undefined) {
8136 // When we have not seen a PMT yet, defer further processing of
8137 // PES packets until one has been parsed
8138 this.packetsWaitingForPmt.push([packet, offset, result]);
8139 } else {
8140 this.processPes_(packet, offset, result);
8141 }
8142 };
8143
8144 this.processPes_ = function (packet, offset, result) {
8145 // set the appropriate stream type
8146 if (result.pid === this.programMapTable.video) {
8147 result.streamType = streamTypes.H264_STREAM_TYPE;
8148 } else if (result.pid === this.programMapTable.audio) {
8149 result.streamType = streamTypes.ADTS_STREAM_TYPE;
8150 } else {
8151 // if not video or audio, it is timed-metadata or unknown
8152 // if unknown, streamType will be undefined
8153 result.streamType = this.programMapTable['timed-metadata'][result.pid];
8154 }
8155
8156 result.type = 'pes';
8157 result.data = packet.subarray(offset);
8158 this.trigger('data', result);
8159 };
8160 };
8161
8162 _TransportParseStream.prototype = new stream();
8163 _TransportParseStream.STREAM_TYPES = {
8164 h264: 0x1b,
8165 adts: 0x0f
8166 };
8167 /**
8168 * Reconsistutes program elementary stream (PES) packets from parsed
8169 * transport stream packets. That is, if you pipe an
8170 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
8171 * events will be events which capture the bytes for individual PES
8172 * packets plus relevant metadata that has been extracted from the
8173 * container.
8174 */
8175
8176 _ElementaryStream = function ElementaryStream() {
8177 var self = this,
8178 segmentHadPmt = false,
8179 // PES packet fragments
8180 video = {
8181 data: [],
8182 size: 0
8183 },
8184 audio = {
8185 data: [],
8186 size: 0
8187 },
8188 timedMetadata = {
8189 data: [],
8190 size: 0
8191 },
8192 programMapTable,
8193 parsePes = function parsePes(payload, pes) {
8194 var ptsDtsFlags;
8195 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
8196
8197 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
8198 // that are frame data that is continuing from the previous fragment. This
8199 // is to check that the pes data is the start of a new pes payload
8200
8201 if (startPrefix !== 1) {
8202 return;
8203 } // get the packet length, this will be 0 for video
8204
8205
8206 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
8207
8208 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
8209 // and a DTS value. Determine what combination of values is
8210 // available to work with.
8211
8212 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
8213 // performs all bitwise operations on 32-bit integers but javascript
8214 // supports a much greater range (52-bits) of integer using standard
8215 // mathematical operations.
8216 // We construct a 31-bit value using bitwise operators over the 31
8217 // most significant bits and then multiply by 4 (equal to a left-shift
8218 // of 2) before we add the final 2 least significant bits of the
8219 // timestamp (equal to an OR.)
8220
8221 if (ptsDtsFlags & 0xC0) {
8222 // the PTS and DTS are not written out directly. For information
8223 // on how they are encoded, see
8224 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
8225 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
8226 pes.pts *= 4; // Left shift by 2
8227
8228 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
8229
8230 pes.dts = pes.pts;
8231
8232 if (ptsDtsFlags & 0x40) {
8233 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
8234 pes.dts *= 4; // Left shift by 2
8235
8236 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
8237 }
8238 } // the data section starts immediately after the PES header.
8239 // pes_header_data_length specifies the number of header bytes
8240 // that follow the last byte of the field.
8241
8242
8243 pes.data = payload.subarray(9 + payload[8]);
8244 },
8245
8246 /**
8247 * Pass completely parsed PES packets to the next stream in the pipeline
8248 **/
8249 flushStream = function flushStream(stream, type, forceFlush) {
8250 var packetData = new Uint8Array(stream.size),
8251 event = {
8252 type: type
8253 },
8254 i = 0,
8255 offset = 0,
8256 packetFlushable = false,
8257 fragment; // do nothing if there is not enough buffered data for a complete
8258 // PES header
8259
8260 if (!stream.data.length || stream.size < 9) {
8261 return;
8262 }
8263
8264 event.trackId = stream.data[0].pid; // reassemble the packet
8265
8266 for (i = 0; i < stream.data.length; i++) {
8267 fragment = stream.data[i];
8268 packetData.set(fragment.data, offset);
8269 offset += fragment.data.byteLength;
8270 } // parse assembled packet's PES header
8271
8272
8273 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
8274 // check that there is enough stream data to fill the packet
8275
8276 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
8277
8278 if (forceFlush || packetFlushable) {
8279 stream.size = 0;
8280 stream.data.length = 0;
8281 } // only emit packets that are complete. this is to avoid assembling
8282 // incomplete PES packets due to poor segmentation
8283
8284
8285 if (packetFlushable) {
8286 self.trigger('data', event);
8287 }
8288 };
8289
8290 _ElementaryStream.prototype.init.call(this);
8291 /**
8292 * Identifies M2TS packet types and parses PES packets using metadata
8293 * parsed from the PMT
8294 **/
8295
8296
8297 this.push = function (data) {
8298 ({
8299 pat: function pat() {// we have to wait for the PMT to arrive as well before we
8300 // have any meaningful metadata
8301 },
8302 pes: function pes() {
8303 var stream, streamType;
8304
8305 switch (data.streamType) {
8306 case streamTypes.H264_STREAM_TYPE:
8307 stream = video;
8308 streamType = 'video';
8309 break;
8310
8311 case streamTypes.ADTS_STREAM_TYPE:
8312 stream = audio;
8313 streamType = 'audio';
8314 break;
8315
8316 case streamTypes.METADATA_STREAM_TYPE:
8317 stream = timedMetadata;
8318 streamType = 'timed-metadata';
8319 break;
8320
8321 default:
8322 // ignore unknown stream types
8323 return;
8324 } // if a new packet is starting, we can flush the completed
8325 // packet
8326
8327
8328 if (data.payloadUnitStartIndicator) {
8329 flushStream(stream, streamType, true);
8330 } // buffer this fragment until we are sure we've received the
8331 // complete payload
8332
8333
8334 stream.data.push(data);
8335 stream.size += data.data.byteLength;
8336 },
8337 pmt: function pmt() {
8338 var event = {
8339 type: 'metadata',
8340 tracks: []
8341 };
8342 programMapTable = data.programMapTable; // translate audio and video streams to tracks
8343
8344 if (programMapTable.video !== null) {
8345 event.tracks.push({
8346 timelineStartInfo: {
8347 baseMediaDecodeTime: 0
8348 },
8349 id: +programMapTable.video,
8350 codec: 'avc',
8351 type: 'video'
8352 });
8353 }
8354
8355 if (programMapTable.audio !== null) {
8356 event.tracks.push({
8357 timelineStartInfo: {
8358 baseMediaDecodeTime: 0
8359 },
8360 id: +programMapTable.audio,
8361 codec: 'adts',
8362 type: 'audio'
8363 });
8364 }
8365
8366 segmentHadPmt = true;
8367 self.trigger('data', event);
8368 }
8369 })[data.type]();
8370 };
8371
8372 this.reset = function () {
8373 video.size = 0;
8374 video.data.length = 0;
8375 audio.size = 0;
8376 audio.data.length = 0;
8377 this.trigger('reset');
8378 };
8379 /**
8380 * Flush any remaining input. Video PES packets may be of variable
8381 * length. Normally, the start of a new video packet can trigger the
8382 * finalization of the previous packet. That is not possible if no
8383 * more video is forthcoming, however. In that case, some other
8384 * mechanism (like the end of the file) has to be employed. When it is
8385 * clear that no additional data is forthcoming, calling this method
8386 * will flush the buffered packets.
8387 */
8388
8389
8390 this.flushStreams_ = function () {
8391 // !!THIS ORDER IS IMPORTANT!!
8392 // video first then audio
8393 flushStream(video, 'video');
8394 flushStream(audio, 'audio');
8395 flushStream(timedMetadata, 'timed-metadata');
8396 };
8397
8398 this.flush = function () {
8399 // if on flush we haven't had a pmt emitted
8400 // and we have a pmt to emit. emit the pmt
8401 // so that we trigger a trackinfo downstream.
8402 if (!segmentHadPmt && programMapTable) {
8403 var pmt = {
8404 type: 'metadata',
8405 tracks: []
8406 }; // translate audio and video streams to tracks
8407
8408 if (programMapTable.video !== null) {
8409 pmt.tracks.push({
8410 timelineStartInfo: {
8411 baseMediaDecodeTime: 0
8412 },
8413 id: +programMapTable.video,
8414 codec: 'avc',
8415 type: 'video'
8416 });
8417 }
8418
8419 if (programMapTable.audio !== null) {
8420 pmt.tracks.push({
8421 timelineStartInfo: {
8422 baseMediaDecodeTime: 0
8423 },
8424 id: +programMapTable.audio,
8425 codec: 'adts',
8426 type: 'audio'
8427 });
8428 }
8429
8430 self.trigger('data', pmt);
8431 }
8432
8433 segmentHadPmt = false;
8434 this.flushStreams_();
8435 this.trigger('done');
8436 };
8437 };
8438
8439 _ElementaryStream.prototype = new stream();
8440 var m2ts = {
8441 PAT_PID: 0x0000,
8442 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
8443 TransportPacketStream: _TransportPacketStream,
8444 TransportParseStream: _TransportParseStream,
8445 ElementaryStream: _ElementaryStream,
8446 TimestampRolloverStream: TimestampRolloverStream,
8447 CaptionStream: captionStream.CaptionStream,
8448 Cea608Stream: captionStream.Cea608Stream,
8449 Cea708Stream: captionStream.Cea708Stream,
8450 MetadataStream: metadataStream
8451 };
8452
8453 for (var type in streamTypes) {
8454 if (streamTypes.hasOwnProperty(type)) {
8455 m2ts[type] = streamTypes[type];
8456 }
8457 }
8458
8459 var m2ts_1 = m2ts;
8460 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
8461
8462 var _AdtsStream;
8463
8464 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8465 /*
8466 * Accepts a ElementaryStream and emits data events with parsed
8467 * AAC Audio Frames of the individual packets. Input audio in ADTS
8468 * format is unpacked and re-emitted as AAC frames.
8469 *
8470 * @see http://wiki.multimedia.cx/index.php?title=ADTS
8471 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
8472 */
8473
8474 _AdtsStream = function AdtsStream(handlePartialSegments) {
8475 var buffer,
8476 frameNum = 0;
8477
8478 _AdtsStream.prototype.init.call(this);
8479
8480 this.skipWarn_ = function (start, end) {
8481 this.trigger('log', {
8482 level: 'warn',
8483 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
8484 });
8485 };
8486
8487 this.push = function (packet) {
8488 var i = 0,
8489 frameLength,
8490 protectionSkipBytes,
8491 oldBuffer,
8492 sampleCount,
8493 adtsFrameDuration;
8494
8495 if (!handlePartialSegments) {
8496 frameNum = 0;
8497 }
8498
8499 if (packet.type !== 'audio') {
8500 // ignore non-audio data
8501 return;
8502 } // Prepend any data in the buffer to the input data so that we can parse
8503 // aac frames the cross a PES packet boundary
8504
8505
8506 if (buffer && buffer.length) {
8507 oldBuffer = buffer;
8508 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
8509 buffer.set(oldBuffer);
8510 buffer.set(packet.data, oldBuffer.byteLength);
8511 } else {
8512 buffer = packet.data;
8513 } // unpack any ADTS frames which have been fully received
8514 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
8515
8516
8517 var skip; // We use i + 7 here because we want to be able to parse the entire header.
8518 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
8519
8520 while (i + 7 < buffer.length) {
8521 // Look for the start of an ADTS header..
8522 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
8523 if (typeof skip !== 'number') {
8524 skip = i;
8525 } // If a valid header was not found, jump one forward and attempt to
8526 // find a valid ADTS header starting at the next byte
8527
8528
8529 i++;
8530 continue;
8531 }
8532
8533 if (typeof skip === 'number') {
8534 this.skipWarn_(skip, i);
8535 skip = null;
8536 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
8537 // end of the ADTS header
8538
8539
8540 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
8541 // end of the sync sequence
8542 // NOTE: frame length includes the size of the header
8543
8544 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
8545 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
8546 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
8547 // then we have to wait for more data
8548
8549 if (buffer.byteLength - i < frameLength) {
8550 break;
8551 } // Otherwise, deliver the complete AAC frame
8552
8553
8554 this.trigger('data', {
8555 pts: packet.pts + frameNum * adtsFrameDuration,
8556 dts: packet.dts + frameNum * adtsFrameDuration,
8557 sampleCount: sampleCount,
8558 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
8559 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
8560 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
8561 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
8562 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
8563 samplesize: 16,
8564 // data is the frame without it's header
8565 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
8566 });
8567 frameNum++;
8568 i += frameLength;
8569 }
8570
8571 if (typeof skip === 'number') {
8572 this.skipWarn_(skip, i);
8573 skip = null;
8574 } // remove processed bytes from the buffer.
8575
8576
8577 buffer = buffer.subarray(i);
8578 };
8579
8580 this.flush = function () {
8581 frameNum = 0;
8582 this.trigger('done');
8583 };
8584
8585 this.reset = function () {
8586 buffer = void 0;
8587 this.trigger('reset');
8588 };
8589
8590 this.endTimeline = function () {
8591 buffer = void 0;
8592 this.trigger('endedtimeline');
8593 };
8594 };
8595
8596 _AdtsStream.prototype = new stream();
8597 var adts = _AdtsStream;
8598 /**
8599 * mux.js
8600 *
8601 * Copyright (c) Brightcove
8602 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8603 */
8604
8605 var ExpGolomb;
8606 /**
8607 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
8608 * scheme used by h264.
8609 */
8610
8611 ExpGolomb = function ExpGolomb(workingData) {
8612 var // the number of bytes left to examine in workingData
8613 workingBytesAvailable = workingData.byteLength,
8614 // the current word being examined
8615 workingWord = 0,
8616 // :uint
8617 // the number of bits left to examine in the current word
8618 workingBitsAvailable = 0; // :uint;
8619 // ():uint
8620
8621 this.length = function () {
8622 return 8 * workingBytesAvailable;
8623 }; // ():uint
8624
8625
8626 this.bitsAvailable = function () {
8627 return 8 * workingBytesAvailable + workingBitsAvailable;
8628 }; // ():void
8629
8630
8631 this.loadWord = function () {
8632 var position = workingData.byteLength - workingBytesAvailable,
8633 workingBytes = new Uint8Array(4),
8634 availableBytes = Math.min(4, workingBytesAvailable);
8635
8636 if (availableBytes === 0) {
8637 throw new Error('no bytes available');
8638 }
8639
8640 workingBytes.set(workingData.subarray(position, position + availableBytes));
8641 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
8642
8643 workingBitsAvailable = availableBytes * 8;
8644 workingBytesAvailable -= availableBytes;
8645 }; // (count:int):void
8646
8647
8648 this.skipBits = function (count) {
8649 var skipBytes; // :int
8650
8651 if (workingBitsAvailable > count) {
8652 workingWord <<= count;
8653 workingBitsAvailable -= count;
8654 } else {
8655 count -= workingBitsAvailable;
8656 skipBytes = Math.floor(count / 8);
8657 count -= skipBytes * 8;
8658 workingBytesAvailable -= skipBytes;
8659 this.loadWord();
8660 workingWord <<= count;
8661 workingBitsAvailable -= count;
8662 }
8663 }; // (size:int):uint
8664
8665
8666 this.readBits = function (size) {
8667 var bits = Math.min(workingBitsAvailable, size),
8668 // :uint
8669 valu = workingWord >>> 32 - bits; // :uint
8670 // if size > 31, handle error
8671
8672 workingBitsAvailable -= bits;
8673
8674 if (workingBitsAvailable > 0) {
8675 workingWord <<= bits;
8676 } else if (workingBytesAvailable > 0) {
8677 this.loadWord();
8678 }
8679
8680 bits = size - bits;
8681
8682 if (bits > 0) {
8683 return valu << bits | this.readBits(bits);
8684 }
8685
8686 return valu;
8687 }; // ():uint
8688
8689
8690 this.skipLeadingZeros = function () {
8691 var leadingZeroCount; // :uint
8692
8693 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8694 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8695 // the first bit of working word is 1
8696 workingWord <<= leadingZeroCount;
8697 workingBitsAvailable -= leadingZeroCount;
8698 return leadingZeroCount;
8699 }
8700 } // we exhausted workingWord and still have not found a 1
8701
8702
8703 this.loadWord();
8704 return leadingZeroCount + this.skipLeadingZeros();
8705 }; // ():void
8706
8707
8708 this.skipUnsignedExpGolomb = function () {
8709 this.skipBits(1 + this.skipLeadingZeros());
8710 }; // ():void
8711
8712
8713 this.skipExpGolomb = function () {
8714 this.skipBits(1 + this.skipLeadingZeros());
8715 }; // ():uint
8716
8717
8718 this.readUnsignedExpGolomb = function () {
8719 var clz = this.skipLeadingZeros(); // :uint
8720
8721 return this.readBits(clz + 1) - 1;
8722 }; // ():int
8723
8724
8725 this.readExpGolomb = function () {
8726 var valu = this.readUnsignedExpGolomb(); // :int
8727
8728 if (0x01 & valu) {
8729 // the number is odd if the low order bit is set
8730 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8731 }
8732
8733 return -1 * (valu >>> 1); // divide by two then make it negative
8734 }; // Some convenience functions
8735 // :Boolean
8736
8737
8738 this.readBoolean = function () {
8739 return this.readBits(1) === 1;
8740 }; // ():int
8741
8742
8743 this.readUnsignedByte = function () {
8744 return this.readBits(8);
8745 };
8746
8747 this.loadWord();
8748 };
8749
8750 var expGolomb = ExpGolomb;
8751
8752 var _H264Stream, _NalByteStream;
8753
8754 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8755 /**
8756 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8757 */
8758
8759 _NalByteStream = function NalByteStream() {
8760 var syncPoint = 0,
8761 i,
8762 buffer;
8763
8764 _NalByteStream.prototype.init.call(this);
8765 /*
8766 * Scans a byte stream and triggers a data event with the NAL units found.
8767 * @param {Object} data Event received from H264Stream
8768 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8769 *
8770 * @see H264Stream.push
8771 */
8772
8773
8774 this.push = function (data) {
8775 var swapBuffer;
8776
8777 if (!buffer) {
8778 buffer = data.data;
8779 } else {
8780 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8781 swapBuffer.set(buffer);
8782 swapBuffer.set(data.data, buffer.byteLength);
8783 buffer = swapBuffer;
8784 }
8785
8786 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
8787 // scan for NAL unit boundaries
8788 // a match looks like this:
8789 // 0 0 1 .. NAL .. 0 0 1
8790 // ^ sync point ^ i
8791 // or this:
8792 // 0 0 1 .. NAL .. 0 0 0
8793 // ^ sync point ^ i
8794 // advance the sync point to a NAL start, if necessary
8795
8796 for (; syncPoint < len - 3; syncPoint++) {
8797 if (buffer[syncPoint + 2] === 1) {
8798 // the sync point is properly aligned
8799 i = syncPoint + 5;
8800 break;
8801 }
8802 }
8803
8804 while (i < len) {
8805 // look at the current byte to determine if we've hit the end of
8806 // a NAL unit boundary
8807 switch (buffer[i]) {
8808 case 0:
8809 // skip past non-sync sequences
8810 if (buffer[i - 1] !== 0) {
8811 i += 2;
8812 break;
8813 } else if (buffer[i - 2] !== 0) {
8814 i++;
8815 break;
8816 } // deliver the NAL unit if it isn't empty
8817
8818
8819 if (syncPoint + 3 !== i - 2) {
8820 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8821 } // drop trailing zeroes
8822
8823
8824 do {
8825 i++;
8826 } while (buffer[i] !== 1 && i < len);
8827
8828 syncPoint = i - 2;
8829 i += 3;
8830 break;
8831
8832 case 1:
8833 // skip past non-sync sequences
8834 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8835 i += 3;
8836 break;
8837 } // deliver the NAL unit
8838
8839
8840 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8841 syncPoint = i - 2;
8842 i += 3;
8843 break;
8844
8845 default:
8846 // the current byte isn't a one or zero, so it cannot be part
8847 // of a sync sequence
8848 i += 3;
8849 break;
8850 }
8851 } // filter out the NAL units that were delivered
8852
8853
8854 buffer = buffer.subarray(syncPoint);
8855 i -= syncPoint;
8856 syncPoint = 0;
8857 };
8858
8859 this.reset = function () {
8860 buffer = null;
8861 syncPoint = 0;
8862 this.trigger('reset');
8863 };
8864
8865 this.flush = function () {
8866 // deliver the last buffered NAL unit
8867 if (buffer && buffer.byteLength > 3) {
8868 this.trigger('data', buffer.subarray(syncPoint + 3));
8869 } // reset the stream state
8870
8871
8872 buffer = null;
8873 syncPoint = 0;
8874 this.trigger('done');
8875 };
8876
8877 this.endTimeline = function () {
8878 this.flush();
8879 this.trigger('endedtimeline');
8880 };
8881 };
8882
8883 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
8884 // see Recommendation ITU-T H.264 (4/2013),
8885 // 7.3.2.1.1 Sequence parameter set data syntax
8886
8887 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8888 100: true,
8889 110: true,
8890 122: true,
8891 244: true,
8892 44: true,
8893 83: true,
8894 86: true,
8895 118: true,
8896 128: true,
8897 // TODO: the three profiles below don't
8898 // appear to have sps data in the specificiation anymore?
8899 138: true,
8900 139: true,
8901 134: true
8902 };
8903 /**
8904 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8905 * events.
8906 */
8907
8908 _H264Stream = function H264Stream() {
8909 var nalByteStream = new _NalByteStream(),
8910 self,
8911 trackId,
8912 currentPts,
8913 currentDts,
8914 discardEmulationPreventionBytes,
8915 readSequenceParameterSet,
8916 skipScalingList;
8917
8918 _H264Stream.prototype.init.call(this);
8919
8920 self = this;
8921 /*
8922 * Pushes a packet from a stream onto the NalByteStream
8923 *
8924 * @param {Object} packet - A packet received from a stream
8925 * @param {Uint8Array} packet.data - The raw bytes of the packet
8926 * @param {Number} packet.dts - Decode timestamp of the packet
8927 * @param {Number} packet.pts - Presentation timestamp of the packet
8928 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8929 * @param {('video'|'audio')} packet.type - The type of packet
8930 *
8931 */
8932
8933 this.push = function (packet) {
8934 if (packet.type !== 'video') {
8935 return;
8936 }
8937
8938 trackId = packet.trackId;
8939 currentPts = packet.pts;
8940 currentDts = packet.dts;
8941 nalByteStream.push(packet);
8942 };
8943 /*
8944 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8945 * for the NALUs to the next stream component.
8946 * Also, preprocess caption and sequence parameter NALUs.
8947 *
8948 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8949 * @see NalByteStream.push
8950 */
8951
8952
8953 nalByteStream.on('data', function (data) {
8954 var event = {
8955 trackId: trackId,
8956 pts: currentPts,
8957 dts: currentDts,
8958 data: data,
8959 nalUnitTypeCode: data[0] & 0x1f
8960 };
8961
8962 switch (event.nalUnitTypeCode) {
8963 case 0x05:
8964 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8965 break;
8966
8967 case 0x06:
8968 event.nalUnitType = 'sei_rbsp';
8969 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8970 break;
8971
8972 case 0x07:
8973 event.nalUnitType = 'seq_parameter_set_rbsp';
8974 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8975 event.config = readSequenceParameterSet(event.escapedRBSP);
8976 break;
8977
8978 case 0x08:
8979 event.nalUnitType = 'pic_parameter_set_rbsp';
8980 break;
8981
8982 case 0x09:
8983 event.nalUnitType = 'access_unit_delimiter_rbsp';
8984 break;
8985 } // This triggers data on the H264Stream
8986
8987
8988 self.trigger('data', event);
8989 });
8990 nalByteStream.on('done', function () {
8991 self.trigger('done');
8992 });
8993 nalByteStream.on('partialdone', function () {
8994 self.trigger('partialdone');
8995 });
8996 nalByteStream.on('reset', function () {
8997 self.trigger('reset');
8998 });
8999 nalByteStream.on('endedtimeline', function () {
9000 self.trigger('endedtimeline');
9001 });
9002
9003 this.flush = function () {
9004 nalByteStream.flush();
9005 };
9006
9007 this.partialFlush = function () {
9008 nalByteStream.partialFlush();
9009 };
9010
9011 this.reset = function () {
9012 nalByteStream.reset();
9013 };
9014
9015 this.endTimeline = function () {
9016 nalByteStream.endTimeline();
9017 };
9018 /**
9019 * Advance the ExpGolomb decoder past a scaling list. The scaling
9020 * list is optionally transmitted as part of a sequence parameter
9021 * set and is not relevant to transmuxing.
9022 * @param count {number} the number of entries in this scaling list
9023 * @param expGolombDecoder {object} an ExpGolomb pointed to the
9024 * start of a scaling list
9025 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
9026 */
9027
9028
9029 skipScalingList = function skipScalingList(count, expGolombDecoder) {
9030 var lastScale = 8,
9031 nextScale = 8,
9032 j,
9033 deltaScale;
9034
9035 for (j = 0; j < count; j++) {
9036 if (nextScale !== 0) {
9037 deltaScale = expGolombDecoder.readExpGolomb();
9038 nextScale = (lastScale + deltaScale + 256) % 256;
9039 }
9040
9041 lastScale = nextScale === 0 ? lastScale : nextScale;
9042 }
9043 };
9044 /**
9045 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
9046 * Sequence Payload"
9047 * @param data {Uint8Array} the bytes of a RBSP from a NAL
9048 * unit
9049 * @return {Uint8Array} the RBSP without any Emulation
9050 * Prevention Bytes
9051 */
9052
9053
9054 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
9055 var length = data.byteLength,
9056 emulationPreventionBytesPositions = [],
9057 i = 1,
9058 newLength,
9059 newData; // Find all `Emulation Prevention Bytes`
9060
9061 while (i < length - 2) {
9062 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
9063 emulationPreventionBytesPositions.push(i + 2);
9064 i += 2;
9065 } else {
9066 i++;
9067 }
9068 } // If no Emulation Prevention Bytes were found just return the original
9069 // array
9070
9071
9072 if (emulationPreventionBytesPositions.length === 0) {
9073 return data;
9074 } // Create a new array to hold the NAL unit data
9075
9076
9077 newLength = length - emulationPreventionBytesPositions.length;
9078 newData = new Uint8Array(newLength);
9079 var sourceIndex = 0;
9080
9081 for (i = 0; i < newLength; sourceIndex++, i++) {
9082 if (sourceIndex === emulationPreventionBytesPositions[0]) {
9083 // Skip this byte
9084 sourceIndex++; // Remove this position index
9085
9086 emulationPreventionBytesPositions.shift();
9087 }
9088
9089 newData[i] = data[sourceIndex];
9090 }
9091
9092 return newData;
9093 };
9094 /**
9095 * Read a sequence parameter set and return some interesting video
9096 * properties. A sequence parameter set is the H264 metadata that
9097 * describes the properties of upcoming video frames.
9098 * @param data {Uint8Array} the bytes of a sequence parameter set
9099 * @return {object} an object with configuration parsed from the
9100 * sequence parameter set, including the dimensions of the
9101 * associated video frames.
9102 */
9103
9104
9105 readSequenceParameterSet = function readSequenceParameterSet(data) {
9106 var frameCropLeftOffset = 0,
9107 frameCropRightOffset = 0,
9108 frameCropTopOffset = 0,
9109 frameCropBottomOffset = 0,
9110 expGolombDecoder,
9111 profileIdc,
9112 levelIdc,
9113 profileCompatibility,
9114 chromaFormatIdc,
9115 picOrderCntType,
9116 numRefFramesInPicOrderCntCycle,
9117 picWidthInMbsMinus1,
9118 picHeightInMapUnitsMinus1,
9119 frameMbsOnlyFlag,
9120 scalingListCount,
9121 sarRatio = [1, 1],
9122 aspectRatioIdc,
9123 i;
9124 expGolombDecoder = new expGolomb(data);
9125 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
9126
9127 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
9128
9129 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
9130
9131 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
9132 // some profiles have more optional data we don't need
9133
9134 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
9135 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
9136
9137 if (chromaFormatIdc === 3) {
9138 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
9139 }
9140
9141 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
9142
9143 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
9144
9145 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
9146
9147 if (expGolombDecoder.readBoolean()) {
9148 // seq_scaling_matrix_present_flag
9149 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
9150
9151 for (i = 0; i < scalingListCount; i++) {
9152 if (expGolombDecoder.readBoolean()) {
9153 // seq_scaling_list_present_flag[ i ]
9154 if (i < 6) {
9155 skipScalingList(16, expGolombDecoder);
9156 } else {
9157 skipScalingList(64, expGolombDecoder);
9158 }
9159 }
9160 }
9161 }
9162 }
9163
9164 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
9165
9166 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
9167
9168 if (picOrderCntType === 0) {
9169 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
9170 } else if (picOrderCntType === 1) {
9171 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
9172
9173 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
9174
9175 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
9176
9177 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
9178
9179 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
9180 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
9181 }
9182 }
9183
9184 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
9185
9186 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
9187
9188 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9189 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9190 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
9191
9192 if (frameMbsOnlyFlag === 0) {
9193 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
9194 }
9195
9196 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
9197
9198 if (expGolombDecoder.readBoolean()) {
9199 // frame_cropping_flag
9200 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
9201 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
9202 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
9203 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
9204 }
9205
9206 if (expGolombDecoder.readBoolean()) {
9207 // vui_parameters_present_flag
9208 if (expGolombDecoder.readBoolean()) {
9209 // aspect_ratio_info_present_flag
9210 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
9211
9212 switch (aspectRatioIdc) {
9213 case 1:
9214 sarRatio = [1, 1];
9215 break;
9216
9217 case 2:
9218 sarRatio = [12, 11];
9219 break;
9220
9221 case 3:
9222 sarRatio = [10, 11];
9223 break;
9224
9225 case 4:
9226 sarRatio = [16, 11];
9227 break;
9228
9229 case 5:
9230 sarRatio = [40, 33];
9231 break;
9232
9233 case 6:
9234 sarRatio = [24, 11];
9235 break;
9236
9237 case 7:
9238 sarRatio = [20, 11];
9239 break;
9240
9241 case 8:
9242 sarRatio = [32, 11];
9243 break;
9244
9245 case 9:
9246 sarRatio = [80, 33];
9247 break;
9248
9249 case 10:
9250 sarRatio = [18, 11];
9251 break;
9252
9253 case 11:
9254 sarRatio = [15, 11];
9255 break;
9256
9257 case 12:
9258 sarRatio = [64, 33];
9259 break;
9260
9261 case 13:
9262 sarRatio = [160, 99];
9263 break;
9264
9265 case 14:
9266 sarRatio = [4, 3];
9267 break;
9268
9269 case 15:
9270 sarRatio = [3, 2];
9271 break;
9272
9273 case 16:
9274 sarRatio = [2, 1];
9275 break;
9276
9277 case 255:
9278 {
9279 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
9280 break;
9281 }
9282 }
9283
9284 if (sarRatio) {
9285 sarRatio[0] / sarRatio[1];
9286 }
9287 }
9288 }
9289
9290 return {
9291 profileIdc: profileIdc,
9292 levelIdc: levelIdc,
9293 profileCompatibility: profileCompatibility,
9294 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
9295 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
9296 // sar is sample aspect ratio
9297 sarRatio: sarRatio
9298 };
9299 };
9300 };
9301
9302 _H264Stream.prototype = new stream();
9303 var h264 = {
9304 H264Stream: _H264Stream,
9305 NalByteStream: _NalByteStream
9306 };
9307 /**
9308 * mux.js
9309 *
9310 * Copyright (c) Brightcove
9311 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9312 *
9313 * Utilities to detect basic properties and metadata about Aac data.
9314 */
9315
9316 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
9317
9318 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
9319 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
9320 flags = header[byteIndex + 5],
9321 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
9322
9323 returnSize = returnSize >= 0 ? returnSize : 0;
9324
9325 if (footerPresent) {
9326 return returnSize + 20;
9327 }
9328
9329 return returnSize + 10;
9330 };
9331
9332 var getId3Offset = function getId3Offset(data, offset) {
9333 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
9334 return offset;
9335 }
9336
9337 offset += parseId3TagSize(data, offset);
9338 return getId3Offset(data, offset);
9339 }; // TODO: use vhs-utils
9340
9341
9342 var isLikelyAacData$1 = function isLikelyAacData(data) {
9343 var offset = getId3Offset(data, 0);
9344 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
9345 // is not mp3 data but aac data.
9346 (data[offset + 1] & 0x16) === 0x10;
9347 };
9348
9349 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
9350 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
9351 }; // return a percent-encoded representation of the specified byte range
9352 // @see http://en.wikipedia.org/wiki/Percent-encoding
9353
9354
9355 var percentEncode = function percentEncode(bytes, start, end) {
9356 var i,
9357 result = '';
9358
9359 for (i = start; i < end; i++) {
9360 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
9361 }
9362
9363 return result;
9364 }; // return the string representation of the specified byte range,
9365 // interpreted as ISO-8859-1.
9366
9367
9368 var parseIso88591 = function parseIso88591(bytes, start, end) {
9369 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
9370 };
9371
9372 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
9373 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9374 middle = header[byteIndex + 4] << 3,
9375 highTwo = header[byteIndex + 3] & 0x3 << 11;
9376 return highTwo | middle | lowThree;
9377 };
9378
9379 var parseType$2 = function parseType(header, byteIndex) {
9380 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
9381 return 'timed-metadata';
9382 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
9383 return 'audio';
9384 }
9385
9386 return null;
9387 };
9388
9389 var parseSampleRate = function parseSampleRate(packet) {
9390 var i = 0;
9391
9392 while (i + 5 < packet.length) {
9393 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
9394 // If a valid header was not found, jump one forward and attempt to
9395 // find a valid ADTS header starting at the next byte
9396 i++;
9397 continue;
9398 }
9399
9400 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
9401 }
9402
9403 return null;
9404 };
9405
9406 var parseAacTimestamp = function parseAacTimestamp(packet) {
9407 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
9408
9409 frameStart = 10;
9410
9411 if (packet[5] & 0x40) {
9412 // advance the frame start past the extended header
9413 frameStart += 4; // header size field
9414
9415 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
9416 } // parse one or more ID3 frames
9417 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
9418
9419
9420 do {
9421 // determine the number of bytes in this frame
9422 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
9423
9424 if (frameSize < 1) {
9425 return null;
9426 }
9427
9428 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
9429
9430 if (frameHeader === 'PRIV') {
9431 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
9432
9433 for (var i = 0; i < frame.byteLength; i++) {
9434 if (frame[i] === 0) {
9435 var owner = parseIso88591(frame, 0, i);
9436
9437 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
9438 var d = frame.subarray(i + 1);
9439 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
9440 size *= 4;
9441 size += d[7] & 0x03;
9442 return size;
9443 }
9444
9445 break;
9446 }
9447 }
9448 }
9449
9450 frameStart += 10; // advance past the frame header
9451
9452 frameStart += frameSize; // advance past the frame body
9453 } while (frameStart < packet.byteLength);
9454
9455 return null;
9456 };
9457
9458 var utils = {
9459 isLikelyAacData: isLikelyAacData$1,
9460 parseId3TagSize: parseId3TagSize,
9461 parseAdtsSize: parseAdtsSize,
9462 parseType: parseType$2,
9463 parseSampleRate: parseSampleRate,
9464 parseAacTimestamp: parseAacTimestamp
9465 };
9466
9467 var _AacStream;
9468 /**
9469 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
9470 */
9471
9472
9473 _AacStream = function AacStream() {
9474 var everything = new Uint8Array(),
9475 timeStamp = 0;
9476
9477 _AacStream.prototype.init.call(this);
9478
9479 this.setTimestamp = function (timestamp) {
9480 timeStamp = timestamp;
9481 };
9482
9483 this.push = function (bytes) {
9484 var frameSize = 0,
9485 byteIndex = 0,
9486 bytesLeft,
9487 chunk,
9488 packet,
9489 tempLength; // If there are bytes remaining from the last segment, prepend them to the
9490 // bytes that were pushed in
9491
9492 if (everything.length) {
9493 tempLength = everything.length;
9494 everything = new Uint8Array(bytes.byteLength + tempLength);
9495 everything.set(everything.subarray(0, tempLength));
9496 everything.set(bytes, tempLength);
9497 } else {
9498 everything = bytes;
9499 }
9500
9501 while (everything.length - byteIndex >= 3) {
9502 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
9503 // Exit early because we don't have enough to parse
9504 // the ID3 tag header
9505 if (everything.length - byteIndex < 10) {
9506 break;
9507 } // check framesize
9508
9509
9510 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9511 // to emit a full packet
9512 // Add to byteIndex to support multiple ID3 tags in sequence
9513
9514 if (byteIndex + frameSize > everything.length) {
9515 break;
9516 }
9517
9518 chunk = {
9519 type: 'timed-metadata',
9520 data: everything.subarray(byteIndex, byteIndex + frameSize)
9521 };
9522 this.trigger('data', chunk);
9523 byteIndex += frameSize;
9524 continue;
9525 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
9526 // Exit early because we don't have enough to parse
9527 // the ADTS frame header
9528 if (everything.length - byteIndex < 7) {
9529 break;
9530 }
9531
9532 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9533 // to emit a full packet
9534
9535 if (byteIndex + frameSize > everything.length) {
9536 break;
9537 }
9538
9539 packet = {
9540 type: 'audio',
9541 data: everything.subarray(byteIndex, byteIndex + frameSize),
9542 pts: timeStamp,
9543 dts: timeStamp
9544 };
9545 this.trigger('data', packet);
9546 byteIndex += frameSize;
9547 continue;
9548 }
9549
9550 byteIndex++;
9551 }
9552
9553 bytesLeft = everything.length - byteIndex;
9554
9555 if (bytesLeft > 0) {
9556 everything = everything.subarray(byteIndex);
9557 } else {
9558 everything = new Uint8Array();
9559 }
9560 };
9561
9562 this.reset = function () {
9563 everything = new Uint8Array();
9564 this.trigger('reset');
9565 };
9566
9567 this.endTimeline = function () {
9568 everything = new Uint8Array();
9569 this.trigger('endedtimeline');
9570 };
9571 };
9572
9573 _AacStream.prototype = new stream();
9574 var aac = _AacStream; // constants
9575
9576 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
9577 var audioProperties = AUDIO_PROPERTIES;
9578 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
9579 var videoProperties = VIDEO_PROPERTIES;
9580 var H264Stream = h264.H264Stream;
9581 var isLikelyAacData = utils.isLikelyAacData;
9582 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
9583
9584 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
9585
9586 var retriggerForStream = function retriggerForStream(key, event) {
9587 event.stream = key;
9588 this.trigger('log', event);
9589 };
9590
9591 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
9592 var keys = Object.keys(pipeline);
9593
9594 for (var i = 0; i < keys.length; i++) {
9595 var key = keys[i]; // skip non-stream keys and headOfPipeline
9596 // which is just a duplicate
9597
9598 if (key === 'headOfPipeline' || !pipeline[key].on) {
9599 continue;
9600 }
9601
9602 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
9603 }
9604 };
9605 /**
9606 * Compare two arrays (even typed) for same-ness
9607 */
9608
9609
9610 var arrayEquals = function arrayEquals(a, b) {
9611 var i;
9612
9613 if (a.length !== b.length) {
9614 return false;
9615 } // compare the value of each element in the array
9616
9617
9618 for (i = 0; i < a.length; i++) {
9619 if (a[i] !== b[i]) {
9620 return false;
9621 }
9622 }
9623
9624 return true;
9625 };
9626
9627 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
9628 var ptsOffsetFromDts = startPts - startDts,
9629 decodeDuration = endDts - startDts,
9630 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
9631 // however, the player time values will reflect a start from the baseMediaDecodeTime.
9632 // In order to provide relevant values for the player times, base timing info on the
9633 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
9634
9635 return {
9636 start: {
9637 dts: baseMediaDecodeTime,
9638 pts: baseMediaDecodeTime + ptsOffsetFromDts
9639 },
9640 end: {
9641 dts: baseMediaDecodeTime + decodeDuration,
9642 pts: baseMediaDecodeTime + presentationDuration
9643 },
9644 prependedContentDuration: prependedContentDuration,
9645 baseMediaDecodeTime: baseMediaDecodeTime
9646 };
9647 };
9648 /**
9649 * Constructs a single-track, ISO BMFF media segment from AAC data
9650 * events. The output of this stream can be fed to a SourceBuffer
9651 * configured with a suitable initialization segment.
9652 * @param track {object} track metadata configuration
9653 * @param options {object} transmuxer options object
9654 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9655 * in the source; false to adjust the first segment to start at 0.
9656 */
9657
9658
9659 _AudioSegmentStream = function AudioSegmentStream(track, options) {
9660 var adtsFrames = [],
9661 sequenceNumber,
9662 earliestAllowedDts = 0,
9663 audioAppendStartTs = 0,
9664 videoBaseMediaDecodeTime = Infinity;
9665 options = options || {};
9666 sequenceNumber = options.firstSequenceNumber || 0;
9667
9668 _AudioSegmentStream.prototype.init.call(this);
9669
9670 this.push = function (data) {
9671 trackDecodeInfo.collectDtsInfo(track, data);
9672
9673 if (track) {
9674 audioProperties.forEach(function (prop) {
9675 track[prop] = data[prop];
9676 });
9677 } // buffer audio data until end() is called
9678
9679
9680 adtsFrames.push(data);
9681 };
9682
9683 this.setEarliestDts = function (earliestDts) {
9684 earliestAllowedDts = earliestDts;
9685 };
9686
9687 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9688 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9689 };
9690
9691 this.setAudioAppendStart = function (timestamp) {
9692 audioAppendStartTs = timestamp;
9693 };
9694
9695 this.flush = function () {
9696 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
9697
9698 if (adtsFrames.length === 0) {
9699 this.trigger('done', 'AudioSegmentStream');
9700 return;
9701 }
9702
9703 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9704 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
9705
9706 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9707 // samples (that is, adts frames) in the audio data
9708
9709 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9710
9711 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9712 adtsFrames = [];
9713 moof = mp4Generator.moof(sequenceNumber, [track]);
9714 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
9715
9716 sequenceNumber++;
9717 boxes.set(moof);
9718 boxes.set(mdat, moof.byteLength);
9719 trackDecodeInfo.clearDtsInfo(track);
9720 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
9721 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9722 // valid use-case where an init segment/data should be triggered without associated
9723 // frames. Leaving for now, but should be looked into.
9724
9725 if (frames.length) {
9726 segmentDuration = frames.length * frameDuration;
9727 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
9728 // frame info is in video clock cycles. Convert to match expectation of
9729 // listeners (that all timestamps will be based on video clock cycles).
9730 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
9731 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
9732 this.trigger('timingInfo', {
9733 start: frames[0].pts,
9734 end: frames[0].pts + segmentDuration
9735 });
9736 }
9737
9738 this.trigger('data', {
9739 track: track,
9740 boxes: boxes
9741 });
9742 this.trigger('done', 'AudioSegmentStream');
9743 };
9744
9745 this.reset = function () {
9746 trackDecodeInfo.clearDtsInfo(track);
9747 adtsFrames = [];
9748 this.trigger('reset');
9749 };
9750 };
9751
9752 _AudioSegmentStream.prototype = new stream();
9753 /**
9754 * Constructs a single-track, ISO BMFF media segment from H264 data
9755 * events. The output of this stream can be fed to a SourceBuffer
9756 * configured with a suitable initialization segment.
9757 * @param track {object} track metadata configuration
9758 * @param options {object} transmuxer options object
9759 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9760 * gopsToAlignWith list when attempting to align gop pts
9761 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9762 * in the source; false to adjust the first segment to start at 0.
9763 */
9764
9765 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9766 var sequenceNumber,
9767 nalUnits = [],
9768 gopsToAlignWith = [],
9769 config,
9770 pps;
9771 options = options || {};
9772 sequenceNumber = options.firstSequenceNumber || 0;
9773
9774 _VideoSegmentStream.prototype.init.call(this);
9775
9776 delete track.minPTS;
9777 this.gopCache_ = [];
9778 /**
9779 * Constructs a ISO BMFF segment given H264 nalUnits
9780 * @param {Object} nalUnit A data event representing a nalUnit
9781 * @param {String} nalUnit.nalUnitType
9782 * @param {Object} nalUnit.config Properties for a mp4 track
9783 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9784 * @see lib/codecs/h264.js
9785 **/
9786
9787 this.push = function (nalUnit) {
9788 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
9789
9790 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9791 config = nalUnit.config;
9792 track.sps = [nalUnit.data];
9793 videoProperties.forEach(function (prop) {
9794 track[prop] = config[prop];
9795 }, this);
9796 }
9797
9798 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9799 pps = nalUnit.data;
9800 track.pps = [nalUnit.data];
9801 } // buffer video until flush() is called
9802
9803
9804 nalUnits.push(nalUnit);
9805 };
9806 /**
9807 * Pass constructed ISO BMFF track and boxes on to the
9808 * next stream in the pipeline
9809 **/
9810
9811
9812 this.flush = function () {
9813 var frames,
9814 gopForFusion,
9815 gops,
9816 moof,
9817 mdat,
9818 boxes,
9819 prependedContentDuration = 0,
9820 firstGop,
9821 lastGop; // Throw away nalUnits at the start of the byte stream until
9822 // we find the first AUD
9823
9824 while (nalUnits.length) {
9825 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9826 break;
9827 }
9828
9829 nalUnits.shift();
9830 } // Return early if no video data has been observed
9831
9832
9833 if (nalUnits.length === 0) {
9834 this.resetStream_();
9835 this.trigger('done', 'VideoSegmentStream');
9836 return;
9837 } // Organize the raw nal-units into arrays that represent
9838 // higher-level constructs such as frames and gops
9839 // (group-of-pictures)
9840
9841
9842 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9843 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
9844 // a problem since MSE (on Chrome) requires a leading keyframe.
9845 //
9846 // We have two approaches to repairing this situation:
9847 // 1) GOP-FUSION:
9848 // This is where we keep track of the GOPS (group-of-pictures)
9849 // from previous fragments and attempt to find one that we can
9850 // prepend to the current fragment in order to create a valid
9851 // fragment.
9852 // 2) KEYFRAME-PULLING:
9853 // Here we search for the first keyframe in the fragment and
9854 // throw away all the frames between the start of the fragment
9855 // and that keyframe. We then extend the duration and pull the
9856 // PTS of the keyframe forward so that it covers the time range
9857 // of the frames that were disposed of.
9858 //
9859 // #1 is far prefereable over #2 which can cause "stuttering" but
9860 // requires more things to be just right.
9861
9862 if (!gops[0][0].keyFrame) {
9863 // Search for a gop for fusion from our gopCache
9864 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9865
9866 if (gopForFusion) {
9867 // in order to provide more accurate timing information about the segment, save
9868 // the number of seconds prepended to the original segment due to GOP fusion
9869 prependedContentDuration = gopForFusion.duration;
9870 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
9871 // new gop at the beginning
9872
9873 gops.byteLength += gopForFusion.byteLength;
9874 gops.nalCount += gopForFusion.nalCount;
9875 gops.pts = gopForFusion.pts;
9876 gops.dts = gopForFusion.dts;
9877 gops.duration += gopForFusion.duration;
9878 } else {
9879 // If we didn't find a candidate gop fall back to keyframe-pulling
9880 gops = frameUtils.extendFirstKeyFrame(gops);
9881 }
9882 } // Trim gops to align with gopsToAlignWith
9883
9884
9885 if (gopsToAlignWith.length) {
9886 var alignedGops;
9887
9888 if (options.alignGopsAtEnd) {
9889 alignedGops = this.alignGopsAtEnd_(gops);
9890 } else {
9891 alignedGops = this.alignGopsAtStart_(gops);
9892 }
9893
9894 if (!alignedGops) {
9895 // save all the nals in the last GOP into the gop cache
9896 this.gopCache_.unshift({
9897 gop: gops.pop(),
9898 pps: track.pps,
9899 sps: track.sps
9900 }); // Keep a maximum of 6 GOPs in the cache
9901
9902 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9903
9904 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
9905
9906 this.resetStream_();
9907 this.trigger('done', 'VideoSegmentStream');
9908 return;
9909 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9910 // when recalculated before sending off to CoalesceStream
9911
9912
9913 trackDecodeInfo.clearDtsInfo(track);
9914 gops = alignedGops;
9915 }
9916
9917 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
9918 // samples (that is, frames) in the video data
9919
9920 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
9921
9922 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9923 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9924 this.trigger('processedGopsInfo', gops.map(function (gop) {
9925 return {
9926 pts: gop.pts,
9927 dts: gop.dts,
9928 byteLength: gop.byteLength
9929 };
9930 }));
9931 firstGop = gops[0];
9932 lastGop = gops[gops.length - 1];
9933 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9934 this.trigger('timingInfo', {
9935 start: gops[0].pts,
9936 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9937 }); // save all the nals in the last GOP into the gop cache
9938
9939 this.gopCache_.unshift({
9940 gop: gops.pop(),
9941 pps: track.pps,
9942 sps: track.sps
9943 }); // Keep a maximum of 6 GOPs in the cache
9944
9945 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9946
9947 nalUnits = [];
9948 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9949 this.trigger('timelineStartInfo', track.timelineStartInfo);
9950 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9951 // throwing away hundreds of media segment fragments
9952
9953 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9954
9955 sequenceNumber++;
9956 boxes.set(moof);
9957 boxes.set(mdat, moof.byteLength);
9958 this.trigger('data', {
9959 track: track,
9960 boxes: boxes
9961 });
9962 this.resetStream_(); // Continue with the flush process now
9963
9964 this.trigger('done', 'VideoSegmentStream');
9965 };
9966
9967 this.reset = function () {
9968 this.resetStream_();
9969 nalUnits = [];
9970 this.gopCache_.length = 0;
9971 gopsToAlignWith.length = 0;
9972 this.trigger('reset');
9973 };
9974
9975 this.resetStream_ = function () {
9976 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9977 // for instance, when we are rendition switching
9978
9979 config = undefined;
9980 pps = undefined;
9981 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9982 // return it or return null if no good candidate was found
9983
9984
9985 this.getGopForFusion_ = function (nalUnit) {
9986 var halfSecond = 45000,
9987 // Half-a-second in a 90khz clock
9988 allowableOverlap = 10000,
9989 // About 3 frames @ 30fps
9990 nearestDistance = Infinity,
9991 dtsDistance,
9992 nearestGopObj,
9993 currentGop,
9994 currentGopObj,
9995 i; // Search for the GOP nearest to the beginning of this nal unit
9996
9997 for (i = 0; i < this.gopCache_.length; i++) {
9998 currentGopObj = this.gopCache_[i];
9999 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
10000
10001 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
10002 continue;
10003 } // Reject Gops that would require a negative baseMediaDecodeTime
10004
10005
10006 if (currentGop.dts < track.timelineStartInfo.dts) {
10007 continue;
10008 } // The distance between the end of the gop and the start of the nalUnit
10009
10010
10011 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
10012 // a half-second of the nal unit
10013
10014 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
10015 // Always use the closest GOP we found if there is more than
10016 // one candidate
10017 if (!nearestGopObj || nearestDistance > dtsDistance) {
10018 nearestGopObj = currentGopObj;
10019 nearestDistance = dtsDistance;
10020 }
10021 }
10022 }
10023
10024 if (nearestGopObj) {
10025 return nearestGopObj.gop;
10026 }
10027
10028 return null;
10029 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10030 // of gopsToAlignWith starting from the START of the list
10031
10032
10033 this.alignGopsAtStart_ = function (gops) {
10034 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
10035 byteLength = gops.byteLength;
10036 nalCount = gops.nalCount;
10037 duration = gops.duration;
10038 alignIndex = gopIndex = 0;
10039
10040 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
10041 align = gopsToAlignWith[alignIndex];
10042 gop = gops[gopIndex];
10043
10044 if (align.pts === gop.pts) {
10045 break;
10046 }
10047
10048 if (gop.pts > align.pts) {
10049 // this current gop starts after the current gop we want to align on, so increment
10050 // align index
10051 alignIndex++;
10052 continue;
10053 } // current gop starts before the current gop we want to align on. so increment gop
10054 // index
10055
10056
10057 gopIndex++;
10058 byteLength -= gop.byteLength;
10059 nalCount -= gop.nalCount;
10060 duration -= gop.duration;
10061 }
10062
10063 if (gopIndex === 0) {
10064 // no gops to trim
10065 return gops;
10066 }
10067
10068 if (gopIndex === gops.length) {
10069 // all gops trimmed, skip appending all gops
10070 return null;
10071 }
10072
10073 alignedGops = gops.slice(gopIndex);
10074 alignedGops.byteLength = byteLength;
10075 alignedGops.duration = duration;
10076 alignedGops.nalCount = nalCount;
10077 alignedGops.pts = alignedGops[0].pts;
10078 alignedGops.dts = alignedGops[0].dts;
10079 return alignedGops;
10080 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10081 // of gopsToAlignWith starting from the END of the list
10082
10083
10084 this.alignGopsAtEnd_ = function (gops) {
10085 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
10086 alignIndex = gopsToAlignWith.length - 1;
10087 gopIndex = gops.length - 1;
10088 alignEndIndex = null;
10089 matchFound = false;
10090
10091 while (alignIndex >= 0 && gopIndex >= 0) {
10092 align = gopsToAlignWith[alignIndex];
10093 gop = gops[gopIndex];
10094
10095 if (align.pts === gop.pts) {
10096 matchFound = true;
10097 break;
10098 }
10099
10100 if (align.pts > gop.pts) {
10101 alignIndex--;
10102 continue;
10103 }
10104
10105 if (alignIndex === gopsToAlignWith.length - 1) {
10106 // gop.pts is greater than the last alignment candidate. If no match is found
10107 // by the end of this loop, we still want to append gops that come after this
10108 // point
10109 alignEndIndex = gopIndex;
10110 }
10111
10112 gopIndex--;
10113 }
10114
10115 if (!matchFound && alignEndIndex === null) {
10116 return null;
10117 }
10118
10119 var trimIndex;
10120
10121 if (matchFound) {
10122 trimIndex = gopIndex;
10123 } else {
10124 trimIndex = alignEndIndex;
10125 }
10126
10127 if (trimIndex === 0) {
10128 return gops;
10129 }
10130
10131 var alignedGops = gops.slice(trimIndex);
10132 var metadata = alignedGops.reduce(function (total, gop) {
10133 total.byteLength += gop.byteLength;
10134 total.duration += gop.duration;
10135 total.nalCount += gop.nalCount;
10136 return total;
10137 }, {
10138 byteLength: 0,
10139 duration: 0,
10140 nalCount: 0
10141 });
10142 alignedGops.byteLength = metadata.byteLength;
10143 alignedGops.duration = metadata.duration;
10144 alignedGops.nalCount = metadata.nalCount;
10145 alignedGops.pts = alignedGops[0].pts;
10146 alignedGops.dts = alignedGops[0].dts;
10147 return alignedGops;
10148 };
10149
10150 this.alignGopsWith = function (newGopsToAlignWith) {
10151 gopsToAlignWith = newGopsToAlignWith;
10152 };
10153 };
10154
10155 _VideoSegmentStream.prototype = new stream();
10156 /**
10157 * A Stream that can combine multiple streams (ie. audio & video)
10158 * into a single output segment for MSE. Also supports audio-only
10159 * and video-only streams.
10160 * @param options {object} transmuxer options object
10161 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
10162 * in the source; false to adjust the first segment to start at media timeline start.
10163 */
10164
10165 _CoalesceStream = function CoalesceStream(options, metadataStream) {
10166 // Number of Tracks per output segment
10167 // If greater than 1, we combine multiple
10168 // tracks into a single segment
10169 this.numberOfTracks = 0;
10170 this.metadataStream = metadataStream;
10171 options = options || {};
10172
10173 if (typeof options.remux !== 'undefined') {
10174 this.remuxTracks = !!options.remux;
10175 } else {
10176 this.remuxTracks = true;
10177 }
10178
10179 if (typeof options.keepOriginalTimestamps === 'boolean') {
10180 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
10181 } else {
10182 this.keepOriginalTimestamps = false;
10183 }
10184
10185 this.pendingTracks = [];
10186 this.videoTrack = null;
10187 this.pendingBoxes = [];
10188 this.pendingCaptions = [];
10189 this.pendingMetadata = [];
10190 this.pendingBytes = 0;
10191 this.emittedTracks = 0;
10192
10193 _CoalesceStream.prototype.init.call(this); // Take output from multiple
10194
10195
10196 this.push = function (output) {
10197 // buffer incoming captions until the associated video segment
10198 // finishes
10199 if (output.text) {
10200 return this.pendingCaptions.push(output);
10201 } // buffer incoming id3 tags until the final flush
10202
10203
10204 if (output.frames) {
10205 return this.pendingMetadata.push(output);
10206 } // Add this track to the list of pending tracks and store
10207 // important information required for the construction of
10208 // the final segment
10209
10210
10211 this.pendingTracks.push(output.track);
10212 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
10213 // We unshift audio and push video because
10214 // as of Chrome 75 when switching from
10215 // one init segment to another if the video
10216 // mdat does not appear after the audio mdat
10217 // only audio will play for the duration of our transmux.
10218
10219 if (output.track.type === 'video') {
10220 this.videoTrack = output.track;
10221 this.pendingBoxes.push(output.boxes);
10222 }
10223
10224 if (output.track.type === 'audio') {
10225 this.audioTrack = output.track;
10226 this.pendingBoxes.unshift(output.boxes);
10227 }
10228 };
10229 };
10230
10231 _CoalesceStream.prototype = new stream();
10232
10233 _CoalesceStream.prototype.flush = function (flushSource) {
10234 var offset = 0,
10235 event = {
10236 captions: [],
10237 captionStreams: {},
10238 metadata: [],
10239 info: {}
10240 },
10241 caption,
10242 id3,
10243 initSegment,
10244 timelineStartPts = 0,
10245 i;
10246
10247 if (this.pendingTracks.length < this.numberOfTracks) {
10248 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
10249 // Return because we haven't received a flush from a data-generating
10250 // portion of the segment (meaning that we have only recieved meta-data
10251 // or captions.)
10252 return;
10253 } else if (this.remuxTracks) {
10254 // Return until we have enough tracks from the pipeline to remux (if we
10255 // are remuxing audio and video into a single MP4)
10256 return;
10257 } else if (this.pendingTracks.length === 0) {
10258 // In the case where we receive a flush without any data having been
10259 // received we consider it an emitted track for the purposes of coalescing
10260 // `done` events.
10261 // We do this for the case where there is an audio and video track in the
10262 // segment but no audio data. (seen in several playlists with alternate
10263 // audio tracks and no audio present in the main TS segments.)
10264 this.emittedTracks++;
10265
10266 if (this.emittedTracks >= this.numberOfTracks) {
10267 this.trigger('done');
10268 this.emittedTracks = 0;
10269 }
10270
10271 return;
10272 }
10273 }
10274
10275 if (this.videoTrack) {
10276 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
10277 videoProperties.forEach(function (prop) {
10278 event.info[prop] = this.videoTrack[prop];
10279 }, this);
10280 } else if (this.audioTrack) {
10281 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
10282 audioProperties.forEach(function (prop) {
10283 event.info[prop] = this.audioTrack[prop];
10284 }, this);
10285 }
10286
10287 if (this.videoTrack || this.audioTrack) {
10288 if (this.pendingTracks.length === 1) {
10289 event.type = this.pendingTracks[0].type;
10290 } else {
10291 event.type = 'combined';
10292 }
10293
10294 this.emittedTracks += this.pendingTracks.length;
10295 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
10296
10297 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
10298 // and track definitions
10299
10300 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
10301
10302 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
10303
10304 for (i = 0; i < this.pendingBoxes.length; i++) {
10305 event.data.set(this.pendingBoxes[i], offset);
10306 offset += this.pendingBoxes[i].byteLength;
10307 } // Translate caption PTS times into second offsets to match the
10308 // video timeline for the segment, and add track info
10309
10310
10311 for (i = 0; i < this.pendingCaptions.length; i++) {
10312 caption = this.pendingCaptions[i];
10313 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
10314 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
10315 event.captionStreams[caption.stream] = true;
10316 event.captions.push(caption);
10317 } // Translate ID3 frame PTS times into second offsets to match the
10318 // video timeline for the segment
10319
10320
10321 for (i = 0; i < this.pendingMetadata.length; i++) {
10322 id3 = this.pendingMetadata[i];
10323 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
10324 event.metadata.push(id3);
10325 } // We add this to every single emitted segment even though we only need
10326 // it for the first
10327
10328
10329 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
10330
10331 this.pendingTracks.length = 0;
10332 this.videoTrack = null;
10333 this.pendingBoxes.length = 0;
10334 this.pendingCaptions.length = 0;
10335 this.pendingBytes = 0;
10336 this.pendingMetadata.length = 0; // Emit the built segment
10337 // We include captions and ID3 tags for backwards compatibility,
10338 // ideally we should send only video and audio in the data event
10339
10340 this.trigger('data', event); // Emit each caption to the outside world
10341 // Ideally, this would happen immediately on parsing captions,
10342 // but we need to ensure that video data is sent back first
10343 // so that caption timing can be adjusted to match video timing
10344
10345 for (i = 0; i < event.captions.length; i++) {
10346 caption = event.captions[i];
10347 this.trigger('caption', caption);
10348 } // Emit each id3 tag to the outside world
10349 // Ideally, this would happen immediately on parsing the tag,
10350 // but we need to ensure that video data is sent back first
10351 // so that ID3 frame timing can be adjusted to match video timing
10352
10353
10354 for (i = 0; i < event.metadata.length; i++) {
10355 id3 = event.metadata[i];
10356 this.trigger('id3Frame', id3);
10357 }
10358 } // Only emit `done` if all tracks have been flushed and emitted
10359
10360
10361 if (this.emittedTracks >= this.numberOfTracks) {
10362 this.trigger('done');
10363 this.emittedTracks = 0;
10364 }
10365 };
10366
10367 _CoalesceStream.prototype.setRemux = function (val) {
10368 this.remuxTracks = val;
10369 };
10370 /**
10371 * A Stream that expects MP2T binary data as input and produces
10372 * corresponding media segments, suitable for use with Media Source
10373 * Extension (MSE) implementations that support the ISO BMFF byte
10374 * stream format, like Chrome.
10375 */
10376
10377
10378 _Transmuxer = function Transmuxer(options) {
10379 var self = this,
10380 hasFlushed = true,
10381 videoTrack,
10382 audioTrack;
10383
10384 _Transmuxer.prototype.init.call(this);
10385
10386 options = options || {};
10387 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10388 this.transmuxPipeline_ = {};
10389
10390 this.setupAacPipeline = function () {
10391 var pipeline = {};
10392 this.transmuxPipeline_ = pipeline;
10393 pipeline.type = 'aac';
10394 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10395
10396 pipeline.aacStream = new aac();
10397 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
10398 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
10399 pipeline.adtsStream = new adts();
10400 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10401 pipeline.headOfPipeline = pipeline.aacStream;
10402 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
10403 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
10404 pipeline.metadataStream.on('timestamp', function (frame) {
10405 pipeline.aacStream.setTimestamp(frame.timeStamp);
10406 });
10407 pipeline.aacStream.on('data', function (data) {
10408 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10409 return;
10410 }
10411
10412 audioTrack = audioTrack || {
10413 timelineStartInfo: {
10414 baseMediaDecodeTime: self.baseMediaDecodeTime
10415 },
10416 codec: 'adts',
10417 type: 'audio'
10418 }; // hook up the audio segment stream to the first track with aac data
10419
10420 pipeline.coalesceStream.numberOfTracks++;
10421 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10422 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10423 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10424
10425 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
10426
10427 self.trigger('trackinfo', {
10428 hasAudio: !!audioTrack,
10429 hasVideo: !!videoTrack
10430 });
10431 }); // Re-emit any data coming from the coalesce stream to the outside world
10432
10433 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
10434
10435 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10436 addPipelineLogRetriggers(this, pipeline);
10437 };
10438
10439 this.setupTsPipeline = function () {
10440 var pipeline = {};
10441 this.transmuxPipeline_ = pipeline;
10442 pipeline.type = 'ts';
10443 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10444
10445 pipeline.packetStream = new m2ts_1.TransportPacketStream();
10446 pipeline.parseStream = new m2ts_1.TransportParseStream();
10447 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
10448 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
10449 pipeline.adtsStream = new adts();
10450 pipeline.h264Stream = new H264Stream();
10451 pipeline.captionStream = new m2ts_1.CaptionStream(options);
10452 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10453 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
10454
10455 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
10456 // demux the streams
10457
10458 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
10459 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
10460 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
10461
10462 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
10463 pipeline.elementaryStream.on('data', function (data) {
10464 var i;
10465
10466 if (data.type === 'metadata') {
10467 i = data.tracks.length; // scan the tracks listed in the metadata
10468
10469 while (i--) {
10470 if (!videoTrack && data.tracks[i].type === 'video') {
10471 videoTrack = data.tracks[i];
10472 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10473 } else if (!audioTrack && data.tracks[i].type === 'audio') {
10474 audioTrack = data.tracks[i];
10475 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10476 }
10477 } // hook up the video segment stream to the first track with h264 data
10478
10479
10480 if (videoTrack && !pipeline.videoSegmentStream) {
10481 pipeline.coalesceStream.numberOfTracks++;
10482 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
10483 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
10484 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10485 // When video emits timelineStartInfo data after a flush, we forward that
10486 // info to the AudioSegmentStream, if it exists, because video timeline
10487 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
10488 // because this is a particularly subtle form of timestamp alteration.
10489 if (audioTrack && !options.keepOriginalTimestamps) {
10490 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
10491 // very earliest DTS we have seen in video because Chrome will
10492 // interpret any video track with a baseMediaDecodeTime that is
10493 // non-zero as a gap.
10494
10495 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
10496 }
10497 });
10498 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
10499 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
10500 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
10501 if (audioTrack) {
10502 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
10503 }
10504 });
10505 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
10506
10507 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
10508 }
10509
10510 if (audioTrack && !pipeline.audioSegmentStream) {
10511 // hook up the audio segment stream to the first track with aac data
10512 pipeline.coalesceStream.numberOfTracks++;
10513 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10514 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10515 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
10516 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
10517
10518 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
10519 } // emit pmt info
10520
10521
10522 self.trigger('trackinfo', {
10523 hasAudio: !!audioTrack,
10524 hasVideo: !!videoTrack
10525 });
10526 }
10527 }); // Re-emit any data coming from the coalesce stream to the outside world
10528
10529 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
10530 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
10531 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
10532 self.trigger('id3Frame', id3Frame);
10533 });
10534 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
10535
10536 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10537 addPipelineLogRetriggers(this, pipeline);
10538 }; // hook up the segment streams once track metadata is delivered
10539
10540
10541 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10542 var pipeline = this.transmuxPipeline_;
10543
10544 if (!options.keepOriginalTimestamps) {
10545 this.baseMediaDecodeTime = baseMediaDecodeTime;
10546 }
10547
10548 if (audioTrack) {
10549 audioTrack.timelineStartInfo.dts = undefined;
10550 audioTrack.timelineStartInfo.pts = undefined;
10551 trackDecodeInfo.clearDtsInfo(audioTrack);
10552
10553 if (pipeline.audioTimestampRolloverStream) {
10554 pipeline.audioTimestampRolloverStream.discontinuity();
10555 }
10556 }
10557
10558 if (videoTrack) {
10559 if (pipeline.videoSegmentStream) {
10560 pipeline.videoSegmentStream.gopCache_ = [];
10561 }
10562
10563 videoTrack.timelineStartInfo.dts = undefined;
10564 videoTrack.timelineStartInfo.pts = undefined;
10565 trackDecodeInfo.clearDtsInfo(videoTrack);
10566 pipeline.captionStream.reset();
10567 }
10568
10569 if (pipeline.timestampRolloverStream) {
10570 pipeline.timestampRolloverStream.discontinuity();
10571 }
10572 };
10573
10574 this.setAudioAppendStart = function (timestamp) {
10575 if (audioTrack) {
10576 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
10577 }
10578 };
10579
10580 this.setRemux = function (val) {
10581 var pipeline = this.transmuxPipeline_;
10582 options.remux = val;
10583
10584 if (pipeline && pipeline.coalesceStream) {
10585 pipeline.coalesceStream.setRemux(val);
10586 }
10587 };
10588
10589 this.alignGopsWith = function (gopsToAlignWith) {
10590 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
10591 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
10592 }
10593 };
10594
10595 this.getLogTrigger_ = function (key) {
10596 var self = this;
10597 return function (event) {
10598 event.stream = key;
10599 self.trigger('log', event);
10600 };
10601 }; // feed incoming data to the front of the parsing pipeline
10602
10603
10604 this.push = function (data) {
10605 if (hasFlushed) {
10606 var isAac = isLikelyAacData(data);
10607
10608 if (isAac && this.transmuxPipeline_.type !== 'aac') {
10609 this.setupAacPipeline();
10610 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
10611 this.setupTsPipeline();
10612 }
10613
10614 hasFlushed = false;
10615 }
10616
10617 this.transmuxPipeline_.headOfPipeline.push(data);
10618 }; // flush any buffered data
10619
10620
10621 this.flush = function () {
10622 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
10623
10624 this.transmuxPipeline_.headOfPipeline.flush();
10625 };
10626
10627 this.endTimeline = function () {
10628 this.transmuxPipeline_.headOfPipeline.endTimeline();
10629 };
10630
10631 this.reset = function () {
10632 if (this.transmuxPipeline_.headOfPipeline) {
10633 this.transmuxPipeline_.headOfPipeline.reset();
10634 }
10635 }; // Caption data has to be reset when seeking outside buffered range
10636
10637
10638 this.resetCaptions = function () {
10639 if (this.transmuxPipeline_.captionStream) {
10640 this.transmuxPipeline_.captionStream.reset();
10641 }
10642 };
10643 };
10644
10645 _Transmuxer.prototype = new stream();
10646 var transmuxer = {
10647 Transmuxer: _Transmuxer,
10648 VideoSegmentStream: _VideoSegmentStream,
10649 AudioSegmentStream: _AudioSegmentStream,
10650 AUDIO_PROPERTIES: audioProperties,
10651 VIDEO_PROPERTIES: videoProperties,
10652 // exported for testing
10653 generateSegmentTimingInfo: generateSegmentTimingInfo
10654 };
10655 /**
10656 * mux.js
10657 *
10658 * Copyright (c) Brightcove
10659 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10660 */
10661
10662 var toUnsigned$3 = function toUnsigned(value) {
10663 return value >>> 0;
10664 };
10665
10666 var toHexString$1 = function toHexString(value) {
10667 return ('00' + value.toString(16)).slice(-2);
10668 };
10669
10670 var bin = {
10671 toUnsigned: toUnsigned$3,
10672 toHexString: toHexString$1
10673 };
10674
10675 var parseType$1 = function parseType(buffer) {
10676 var result = '';
10677 result += String.fromCharCode(buffer[0]);
10678 result += String.fromCharCode(buffer[1]);
10679 result += String.fromCharCode(buffer[2]);
10680 result += String.fromCharCode(buffer[3]);
10681 return result;
10682 };
10683
10684 var parseType_1 = parseType$1;
10685 var toUnsigned$2 = bin.toUnsigned;
10686
10687 var findBox = function findBox(data, path) {
10688 var results = [],
10689 i,
10690 size,
10691 type,
10692 end,
10693 subresults;
10694
10695 if (!path.length) {
10696 // short-circuit the search for empty paths
10697 return null;
10698 }
10699
10700 for (i = 0; i < data.byteLength;) {
10701 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10702 type = parseType_1(data.subarray(i + 4, i + 8));
10703 end = size > 1 ? i + size : data.byteLength;
10704
10705 if (type === path[0]) {
10706 if (path.length === 1) {
10707 // this is the end of the path and we've found the box we were
10708 // looking for
10709 results.push(data.subarray(i + 8, end));
10710 } else {
10711 // recursively search for the next box along the path
10712 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10713
10714 if (subresults.length) {
10715 results = results.concat(subresults);
10716 }
10717 }
10718 }
10719
10720 i = end;
10721 } // we've finished searching all of data
10722
10723
10724 return results;
10725 };
10726
10727 var findBox_1 = findBox;
10728 var toUnsigned$1 = bin.toUnsigned;
10729 var getUint64$1 = numbers.getUint64;
10730
10731 var tfdt = function tfdt(data) {
10732 var result = {
10733 version: data[0],
10734 flags: new Uint8Array(data.subarray(1, 4))
10735 };
10736
10737 if (result.version === 1) {
10738 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
10739 } else {
10740 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
10741 }
10742
10743 return result;
10744 };
10745
10746 var parseTfdt = tfdt;
10747
10748 var parseSampleFlags = function parseSampleFlags(flags) {
10749 return {
10750 isLeading: (flags[0] & 0x0c) >>> 2,
10751 dependsOn: flags[0] & 0x03,
10752 isDependedOn: (flags[1] & 0xc0) >>> 6,
10753 hasRedundancy: (flags[1] & 0x30) >>> 4,
10754 paddingValue: (flags[1] & 0x0e) >>> 1,
10755 isNonSyncSample: flags[1] & 0x01,
10756 degradationPriority: flags[2] << 8 | flags[3]
10757 };
10758 };
10759
10760 var parseSampleFlags_1 = parseSampleFlags;
10761
10762 var trun = function trun(data) {
10763 var result = {
10764 version: data[0],
10765 flags: new Uint8Array(data.subarray(1, 4)),
10766 samples: []
10767 },
10768 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10769 // Flag interpretation
10770 dataOffsetPresent = result.flags[2] & 0x01,
10771 // compare with 2nd byte of 0x1
10772 firstSampleFlagsPresent = result.flags[2] & 0x04,
10773 // compare with 2nd byte of 0x4
10774 sampleDurationPresent = result.flags[1] & 0x01,
10775 // compare with 2nd byte of 0x100
10776 sampleSizePresent = result.flags[1] & 0x02,
10777 // compare with 2nd byte of 0x200
10778 sampleFlagsPresent = result.flags[1] & 0x04,
10779 // compare with 2nd byte of 0x400
10780 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10781 // compare with 2nd byte of 0x800
10782 sampleCount = view.getUint32(4),
10783 offset = 8,
10784 sample;
10785
10786 if (dataOffsetPresent) {
10787 // 32 bit signed integer
10788 result.dataOffset = view.getInt32(offset);
10789 offset += 4;
10790 } // Overrides the flags for the first sample only. The order of
10791 // optional values will be: duration, size, compositionTimeOffset
10792
10793
10794 if (firstSampleFlagsPresent && sampleCount) {
10795 sample = {
10796 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10797 };
10798 offset += 4;
10799
10800 if (sampleDurationPresent) {
10801 sample.duration = view.getUint32(offset);
10802 offset += 4;
10803 }
10804
10805 if (sampleSizePresent) {
10806 sample.size = view.getUint32(offset);
10807 offset += 4;
10808 }
10809
10810 if (sampleCompositionTimeOffsetPresent) {
10811 if (result.version === 1) {
10812 sample.compositionTimeOffset = view.getInt32(offset);
10813 } else {
10814 sample.compositionTimeOffset = view.getUint32(offset);
10815 }
10816
10817 offset += 4;
10818 }
10819
10820 result.samples.push(sample);
10821 sampleCount--;
10822 }
10823
10824 while (sampleCount--) {
10825 sample = {};
10826
10827 if (sampleDurationPresent) {
10828 sample.duration = view.getUint32(offset);
10829 offset += 4;
10830 }
10831
10832 if (sampleSizePresent) {
10833 sample.size = view.getUint32(offset);
10834 offset += 4;
10835 }
10836
10837 if (sampleFlagsPresent) {
10838 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10839 offset += 4;
10840 }
10841
10842 if (sampleCompositionTimeOffsetPresent) {
10843 if (result.version === 1) {
10844 sample.compositionTimeOffset = view.getInt32(offset);
10845 } else {
10846 sample.compositionTimeOffset = view.getUint32(offset);
10847 }
10848
10849 offset += 4;
10850 }
10851
10852 result.samples.push(sample);
10853 }
10854
10855 return result;
10856 };
10857
10858 var parseTrun = trun;
10859
10860 var tfhd = function tfhd(data) {
10861 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10862 result = {
10863 version: data[0],
10864 flags: new Uint8Array(data.subarray(1, 4)),
10865 trackId: view.getUint32(4)
10866 },
10867 baseDataOffsetPresent = result.flags[2] & 0x01,
10868 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10869 defaultSampleDurationPresent = result.flags[2] & 0x08,
10870 defaultSampleSizePresent = result.flags[2] & 0x10,
10871 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10872 durationIsEmpty = result.flags[0] & 0x010000,
10873 defaultBaseIsMoof = result.flags[0] & 0x020000,
10874 i;
10875 i = 8;
10876
10877 if (baseDataOffsetPresent) {
10878 i += 4; // truncate top 4 bytes
10879 // FIXME: should we read the full 64 bits?
10880
10881 result.baseDataOffset = view.getUint32(12);
10882 i += 4;
10883 }
10884
10885 if (sampleDescriptionIndexPresent) {
10886 result.sampleDescriptionIndex = view.getUint32(i);
10887 i += 4;
10888 }
10889
10890 if (defaultSampleDurationPresent) {
10891 result.defaultSampleDuration = view.getUint32(i);
10892 i += 4;
10893 }
10894
10895 if (defaultSampleSizePresent) {
10896 result.defaultSampleSize = view.getUint32(i);
10897 i += 4;
10898 }
10899
10900 if (defaultSampleFlagsPresent) {
10901 result.defaultSampleFlags = view.getUint32(i);
10902 }
10903
10904 if (durationIsEmpty) {
10905 result.durationIsEmpty = true;
10906 }
10907
10908 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10909 result.baseDataOffsetIsMoof = true;
10910 }
10911
10912 return result;
10913 };
10914
10915 var parseTfhd = tfhd;
10916 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
10917 var win;
10918
10919 if (typeof window !== "undefined") {
10920 win = window;
10921 } else if (typeof commonjsGlobal !== "undefined") {
10922 win = commonjsGlobal;
10923 } else if (typeof self !== "undefined") {
10924 win = self;
10925 } else {
10926 win = {};
10927 }
10928
10929 var window_1 = win;
10930 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
10931 var CaptionStream = captionStream.CaptionStream;
10932 /**
10933 * Maps an offset in the mdat to a sample based on the the size of the samples.
10934 * Assumes that `parseSamples` has been called first.
10935 *
10936 * @param {Number} offset - The offset into the mdat
10937 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10938 * @return {?Object} The matching sample, or null if no match was found.
10939 *
10940 * @see ISO-BMFF-12/2015, Section 8.8.8
10941 **/
10942
10943 var mapToSample = function mapToSample(offset, samples) {
10944 var approximateOffset = offset;
10945
10946 for (var i = 0; i < samples.length; i++) {
10947 var sample = samples[i];
10948
10949 if (approximateOffset < sample.size) {
10950 return sample;
10951 }
10952
10953 approximateOffset -= sample.size;
10954 }
10955
10956 return null;
10957 };
10958 /**
10959 * Finds SEI nal units contained in a Media Data Box.
10960 * Assumes that `parseSamples` has been called first.
10961 *
10962 * @param {Uint8Array} avcStream - The bytes of the mdat
10963 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10964 * @param {Number} trackId - The trackId of this video track
10965 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10966 * The contents of the seiNal should match what is expected by
10967 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10968 *
10969 * @see ISO-BMFF-12/2015, Section 8.1.1
10970 * @see Rec. ITU-T H.264, 7.3.2.3.1
10971 **/
10972
10973
10974 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10975 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10976 result = {
10977 logs: [],
10978 seiNals: []
10979 },
10980 seiNal,
10981 i,
10982 length,
10983 lastMatchedSample;
10984
10985 for (i = 0; i + 4 < avcStream.length; i += length) {
10986 length = avcView.getUint32(i);
10987 i += 4; // Bail if this doesn't appear to be an H264 stream
10988
10989 if (length <= 0) {
10990 continue;
10991 }
10992
10993 switch (avcStream[i] & 0x1F) {
10994 case 0x06:
10995 var data = avcStream.subarray(i + 1, i + 1 + length);
10996 var matchingSample = mapToSample(i, samples);
10997 seiNal = {
10998 nalUnitType: 'sei_rbsp',
10999 size: length,
11000 data: data,
11001 escapedRBSP: discardEmulationPreventionBytes(data),
11002 trackId: trackId
11003 };
11004
11005 if (matchingSample) {
11006 seiNal.pts = matchingSample.pts;
11007 seiNal.dts = matchingSample.dts;
11008 lastMatchedSample = matchingSample;
11009 } else if (lastMatchedSample) {
11010 // If a matching sample cannot be found, use the last
11011 // sample's values as they should be as close as possible
11012 seiNal.pts = lastMatchedSample.pts;
11013 seiNal.dts = lastMatchedSample.dts;
11014 } else {
11015 result.logs.push({
11016 level: 'warn',
11017 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
11018 });
11019 break;
11020 }
11021
11022 result.seiNals.push(seiNal);
11023 break;
11024 }
11025 }
11026
11027 return result;
11028 };
11029 /**
11030 * Parses sample information out of Track Run Boxes and calculates
11031 * the absolute presentation and decode timestamps of each sample.
11032 *
11033 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
11034 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
11035 @see ISO-BMFF-12/2015, Section 8.8.12
11036 * @param {Object} tfhd - The parsed Track Fragment Header
11037 * @see inspect.parseTfhd
11038 * @return {Object[]} the parsed samples
11039 *
11040 * @see ISO-BMFF-12/2015, Section 8.8.8
11041 **/
11042
11043
11044 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
11045 var currentDts = baseMediaDecodeTime;
11046 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
11047 var defaultSampleSize = tfhd.defaultSampleSize || 0;
11048 var trackId = tfhd.trackId;
11049 var allSamples = [];
11050 truns.forEach(function (trun) {
11051 // Note: We currently do not parse the sample table as well
11052 // as the trun. It's possible some sources will require this.
11053 // moov > trak > mdia > minf > stbl
11054 var trackRun = parseTrun(trun);
11055 var samples = trackRun.samples;
11056 samples.forEach(function (sample) {
11057 if (sample.duration === undefined) {
11058 sample.duration = defaultSampleDuration;
11059 }
11060
11061 if (sample.size === undefined) {
11062 sample.size = defaultSampleSize;
11063 }
11064
11065 sample.trackId = trackId;
11066 sample.dts = currentDts;
11067
11068 if (sample.compositionTimeOffset === undefined) {
11069 sample.compositionTimeOffset = 0;
11070 }
11071
11072 if (typeof currentDts === 'bigint') {
11073 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
11074 currentDts += window_1.BigInt(sample.duration);
11075 } else {
11076 sample.pts = currentDts + sample.compositionTimeOffset;
11077 currentDts += sample.duration;
11078 }
11079 });
11080 allSamples = allSamples.concat(samples);
11081 });
11082 return allSamples;
11083 };
11084 /**
11085 * Parses out caption nals from an FMP4 segment's video tracks.
11086 *
11087 * @param {Uint8Array} segment - The bytes of a single segment
11088 * @param {Number} videoTrackId - The trackId of a video track in the segment
11089 * @return {Object.<Number, Object[]>} A mapping of video trackId to
11090 * a list of seiNals found in that track
11091 **/
11092
11093
11094 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
11095 // To get the samples
11096 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
11097
11098 var mdats = findBox_1(segment, ['mdat']);
11099 var captionNals = {};
11100 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
11101
11102 mdats.forEach(function (mdat, index) {
11103 var matchingTraf = trafs[index];
11104 mdatTrafPairs.push({
11105 mdat: mdat,
11106 traf: matchingTraf
11107 });
11108 });
11109 mdatTrafPairs.forEach(function (pair) {
11110 var mdat = pair.mdat;
11111 var traf = pair.traf;
11112 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
11113
11114 var headerInfo = parseTfhd(tfhd[0]);
11115 var trackId = headerInfo.trackId;
11116 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
11117
11118 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
11119 var truns = findBox_1(traf, ['trun']);
11120 var samples;
11121 var result; // Only parse video data for the chosen video track
11122
11123 if (videoTrackId === trackId && truns.length > 0) {
11124 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
11125 result = findSeiNals(mdat, samples, trackId);
11126
11127 if (!captionNals[trackId]) {
11128 captionNals[trackId] = {
11129 seiNals: [],
11130 logs: []
11131 };
11132 }
11133
11134 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
11135 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
11136 }
11137 });
11138 return captionNals;
11139 };
11140 /**
11141 * Parses out inband captions from an MP4 container and returns
11142 * caption objects that can be used by WebVTT and the TextTrack API.
11143 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
11144 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
11145 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
11146 *
11147 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11148 * @param {Number} trackId - The id of the video track to parse
11149 * @param {Number} timescale - The timescale for the video track from the init segment
11150 *
11151 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
11152 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
11153 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
11154 * @return {String} parsedCaptions[].text - The visible content of the caption
11155 **/
11156
11157
11158 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
11159 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11160
11161 if (trackId === null) {
11162 return null;
11163 }
11164
11165 captionNals = parseCaptionNals(segment, trackId);
11166 var trackNals = captionNals[trackId] || {};
11167 return {
11168 seiNals: trackNals.seiNals,
11169 logs: trackNals.logs,
11170 timescale: timescale
11171 };
11172 };
11173 /**
11174 * Converts SEI NALUs into captions that can be used by video.js
11175 **/
11176
11177
11178 var CaptionParser = function CaptionParser() {
11179 var isInitialized = false;
11180 var captionStream; // Stores segments seen before trackId and timescale are set
11181
11182 var segmentCache; // Stores video track ID of the track being parsed
11183
11184 var trackId; // Stores the timescale of the track being parsed
11185
11186 var timescale; // Stores captions parsed so far
11187
11188 var parsedCaptions; // Stores whether we are receiving partial data or not
11189
11190 var parsingPartial;
11191 /**
11192 * A method to indicate whether a CaptionParser has been initalized
11193 * @returns {Boolean}
11194 **/
11195
11196 this.isInitialized = function () {
11197 return isInitialized;
11198 };
11199 /**
11200 * Initializes the underlying CaptionStream, SEI NAL parsing
11201 * and management, and caption collection
11202 **/
11203
11204
11205 this.init = function (options) {
11206 captionStream = new CaptionStream();
11207 isInitialized = true;
11208 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
11209
11210 captionStream.on('data', function (event) {
11211 // Convert to seconds in the source's timescale
11212 event.startTime = event.startPts / timescale;
11213 event.endTime = event.endPts / timescale;
11214 parsedCaptions.captions.push(event);
11215 parsedCaptions.captionStreams[event.stream] = true;
11216 });
11217 captionStream.on('log', function (log) {
11218 parsedCaptions.logs.push(log);
11219 });
11220 };
11221 /**
11222 * Determines if a new video track will be selected
11223 * or if the timescale changed
11224 * @return {Boolean}
11225 **/
11226
11227
11228 this.isNewInit = function (videoTrackIds, timescales) {
11229 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
11230 return false;
11231 }
11232
11233 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
11234 };
11235 /**
11236 * Parses out SEI captions and interacts with underlying
11237 * CaptionStream to return dispatched captions
11238 *
11239 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11240 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
11241 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
11242 * @see parseEmbeddedCaptions
11243 * @see m2ts/caption-stream.js
11244 **/
11245
11246
11247 this.parse = function (segment, videoTrackIds, timescales) {
11248 var parsedData;
11249
11250 if (!this.isInitialized()) {
11251 return null; // This is not likely to be a video segment
11252 } else if (!videoTrackIds || !timescales) {
11253 return null;
11254 } else if (this.isNewInit(videoTrackIds, timescales)) {
11255 // Use the first video track only as there is no
11256 // mechanism to switch to other video tracks
11257 trackId = videoTrackIds[0];
11258 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
11259 // data until we have one.
11260 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11261 } else if (trackId === null || !timescale) {
11262 segmentCache.push(segment);
11263 return null;
11264 } // Now that a timescale and trackId is set, parse cached segments
11265
11266
11267 while (segmentCache.length > 0) {
11268 var cachedSegment = segmentCache.shift();
11269 this.parse(cachedSegment, videoTrackIds, timescales);
11270 }
11271
11272 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
11273
11274 if (parsedData && parsedData.logs) {
11275 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
11276 }
11277
11278 if (parsedData === null || !parsedData.seiNals) {
11279 if (parsedCaptions.logs.length) {
11280 return {
11281 logs: parsedCaptions.logs,
11282 captions: [],
11283 captionStreams: []
11284 };
11285 }
11286
11287 return null;
11288 }
11289
11290 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
11291
11292 this.flushStream();
11293 return parsedCaptions;
11294 };
11295 /**
11296 * Pushes SEI NALUs onto CaptionStream
11297 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
11298 * Assumes that `parseCaptionNals` has been called first
11299 * @see m2ts/caption-stream.js
11300 **/
11301
11302
11303 this.pushNals = function (nals) {
11304 if (!this.isInitialized() || !nals || nals.length === 0) {
11305 return null;
11306 }
11307
11308 nals.forEach(function (nal) {
11309 captionStream.push(nal);
11310 });
11311 };
11312 /**
11313 * Flushes underlying CaptionStream to dispatch processed, displayable captions
11314 * @see m2ts/caption-stream.js
11315 **/
11316
11317
11318 this.flushStream = function () {
11319 if (!this.isInitialized()) {
11320 return null;
11321 }
11322
11323 if (!parsingPartial) {
11324 captionStream.flush();
11325 } else {
11326 captionStream.partialFlush();
11327 }
11328 };
11329 /**
11330 * Reset caption buckets for new data
11331 **/
11332
11333
11334 this.clearParsedCaptions = function () {
11335 parsedCaptions.captions = [];
11336 parsedCaptions.captionStreams = {};
11337 parsedCaptions.logs = [];
11338 };
11339 /**
11340 * Resets underlying CaptionStream
11341 * @see m2ts/caption-stream.js
11342 **/
11343
11344
11345 this.resetCaptionStream = function () {
11346 if (!this.isInitialized()) {
11347 return null;
11348 }
11349
11350 captionStream.reset();
11351 };
11352 /**
11353 * Convenience method to clear all captions flushed from the
11354 * CaptionStream and still being parsed
11355 * @see m2ts/caption-stream.js
11356 **/
11357
11358
11359 this.clearAllCaptions = function () {
11360 this.clearParsedCaptions();
11361 this.resetCaptionStream();
11362 };
11363 /**
11364 * Reset caption parser
11365 **/
11366
11367
11368 this.reset = function () {
11369 segmentCache = [];
11370 trackId = null;
11371 timescale = null;
11372
11373 if (!parsedCaptions) {
11374 parsedCaptions = {
11375 captions: [],
11376 // CC1, CC2, CC3, CC4
11377 captionStreams: {},
11378 logs: []
11379 };
11380 } else {
11381 this.clearParsedCaptions();
11382 }
11383
11384 this.resetCaptionStream();
11385 };
11386
11387 this.reset();
11388 };
11389
11390 var captionParser = CaptionParser;
11391 var toUnsigned = bin.toUnsigned;
11392 var toHexString = bin.toHexString;
11393 var getUint64 = numbers.getUint64;
11394 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
11395 /**
11396 * Parses an MP4 initialization segment and extracts the timescale
11397 * values for any declared tracks. Timescale values indicate the
11398 * number of clock ticks per second to assume for time-based values
11399 * elsewhere in the MP4.
11400 *
11401 * To determine the start time of an MP4, you need two pieces of
11402 * information: the timescale unit and the earliest base media decode
11403 * time. Multiple timescales can be specified within an MP4 but the
11404 * base media decode time is always expressed in the timescale from
11405 * the media header box for the track:
11406 * ```
11407 * moov > trak > mdia > mdhd.timescale
11408 * ```
11409 * @param init {Uint8Array} the bytes of the init segment
11410 * @return {object} a hash of track ids to timescale values or null if
11411 * the init segment is malformed.
11412 */
11413
11414 timescale = function timescale(init) {
11415 var result = {},
11416 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
11417
11418 return traks.reduce(function (result, trak) {
11419 var tkhd, version, index, id, mdhd;
11420 tkhd = findBox_1(trak, ['tkhd'])[0];
11421
11422 if (!tkhd) {
11423 return null;
11424 }
11425
11426 version = tkhd[0];
11427 index = version === 0 ? 12 : 20;
11428 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
11429 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11430
11431 if (!mdhd) {
11432 return null;
11433 }
11434
11435 version = mdhd[0];
11436 index = version === 0 ? 12 : 20;
11437 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11438 return result;
11439 }, result);
11440 };
11441 /**
11442 * Determine the base media decode start time, in seconds, for an MP4
11443 * fragment. If multiple fragments are specified, the earliest time is
11444 * returned.
11445 *
11446 * The base media decode time can be parsed from track fragment
11447 * metadata:
11448 * ```
11449 * moof > traf > tfdt.baseMediaDecodeTime
11450 * ```
11451 * It requires the timescale value from the mdhd to interpret.
11452 *
11453 * @param timescale {object} a hash of track ids to timescale values.
11454 * @return {number} the earliest base media decode start time for the
11455 * fragment, in seconds
11456 */
11457
11458
11459 startTime = function startTime(timescale, fragment) {
11460 var trafs; // we need info from two childrend of each track fragment box
11461
11462 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
11463
11464 var lowestTime = trafs.reduce(function (acc, traf) {
11465 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
11466
11467 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
11468
11469 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
11470
11471 var tfdt = findBox_1(traf, ['tfdt'])[0];
11472 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
11473 var baseTime; // version 1 is 64 bit
11474
11475 if (tfdt[0] === 1) {
11476 baseTime = getUint64(tfdt.subarray(4, 12));
11477 } else {
11478 baseTime = dv.getUint32(4);
11479 } // convert base time to seconds if it is a valid number.
11480
11481
11482 var seconds;
11483
11484 if (typeof baseTime === 'bigint') {
11485 seconds = baseTime / window_1.BigInt(scale);
11486 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
11487 seconds = baseTime / scale;
11488 }
11489
11490 if (seconds < Number.MAX_SAFE_INTEGER) {
11491 seconds = Number(seconds);
11492 }
11493
11494 if (seconds < acc) {
11495 acc = seconds;
11496 }
11497
11498 return acc;
11499 }, Infinity);
11500 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
11501 };
11502 /**
11503 * Determine the composition start, in seconds, for an MP4
11504 * fragment.
11505 *
11506 * The composition start time of a fragment can be calculated using the base
11507 * media decode time, composition time offset, and timescale, as follows:
11508 *
11509 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
11510 *
11511 * All of the aforementioned information is contained within a media fragment's
11512 * `traf` box, except for timescale info, which comes from the initialization
11513 * segment, so a track id (also contained within a `traf`) is also necessary to
11514 * associate it with a timescale
11515 *
11516 *
11517 * @param timescales {object} - a hash of track ids to timescale values.
11518 * @param fragment {Unit8Array} - the bytes of a media segment
11519 * @return {number} the composition start time for the fragment, in seconds
11520 **/
11521
11522
11523 compositionStartTime = function compositionStartTime(timescales, fragment) {
11524 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
11525 var baseMediaDecodeTime = 0;
11526 var compositionTimeOffset = 0;
11527 var trackId;
11528
11529 if (trafBoxes && trafBoxes.length) {
11530 // The spec states that track run samples contained within a `traf` box are contiguous, but
11531 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
11532 // We will assume that they are, so we only need the first to calculate start time.
11533 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
11534 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
11535 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
11536
11537 if (tfhd) {
11538 var parsedTfhd = parseTfhd(tfhd);
11539 trackId = parsedTfhd.trackId;
11540 }
11541
11542 if (tfdt) {
11543 var parsedTfdt = parseTfdt(tfdt);
11544 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
11545 }
11546
11547 if (trun) {
11548 var parsedTrun = parseTrun(trun);
11549
11550 if (parsedTrun.samples && parsedTrun.samples.length) {
11551 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
11552 }
11553 }
11554 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
11555 // specified.
11556
11557
11558 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
11559
11560 if (typeof baseMediaDecodeTime === 'bigint') {
11561 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
11562 timescale = window_1.BigInt(timescale);
11563 }
11564
11565 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
11566
11567 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
11568 result = Number(result);
11569 }
11570
11571 return result;
11572 };
11573 /**
11574 * Find the trackIds of the video tracks in this source.
11575 * Found by parsing the Handler Reference and Track Header Boxes:
11576 * moov > trak > mdia > hdlr
11577 * moov > trak > tkhd
11578 *
11579 * @param {Uint8Array} init - The bytes of the init segment for this source
11580 * @return {Number[]} A list of trackIds
11581 *
11582 * @see ISO-BMFF-12/2015, Section 8.4.3
11583 **/
11584
11585
11586 getVideoTrackIds = function getVideoTrackIds(init) {
11587 var traks = findBox_1(init, ['moov', 'trak']);
11588 var videoTrackIds = [];
11589 traks.forEach(function (trak) {
11590 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
11591 var tkhds = findBox_1(trak, ['tkhd']);
11592 hdlrs.forEach(function (hdlr, index) {
11593 var handlerType = parseType_1(hdlr.subarray(8, 12));
11594 var tkhd = tkhds[index];
11595 var view;
11596 var version;
11597 var trackId;
11598
11599 if (handlerType === 'vide') {
11600 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11601 version = view.getUint8(0);
11602 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
11603 videoTrackIds.push(trackId);
11604 }
11605 });
11606 });
11607 return videoTrackIds;
11608 };
11609
11610 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
11611 // mdhd is a FullBox, meaning it will have its own version as the first byte
11612 var version = mdhd[0];
11613 var index = version === 0 ? 12 : 20;
11614 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11615 };
11616 /**
11617 * Get all the video, audio, and hint tracks from a non fragmented
11618 * mp4 segment
11619 */
11620
11621
11622 getTracks = function getTracks(init) {
11623 var traks = findBox_1(init, ['moov', 'trak']);
11624 var tracks = [];
11625 traks.forEach(function (trak) {
11626 var track = {};
11627 var tkhd = findBox_1(trak, ['tkhd'])[0];
11628 var view, tkhdVersion; // id
11629
11630 if (tkhd) {
11631 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11632 tkhdVersion = view.getUint8(0);
11633 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
11634 }
11635
11636 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
11637
11638 if (hdlr) {
11639 var type = parseType_1(hdlr.subarray(8, 12));
11640
11641 if (type === 'vide') {
11642 track.type = 'video';
11643 } else if (type === 'soun') {
11644 track.type = 'audio';
11645 } else {
11646 track.type = type;
11647 }
11648 } // codec
11649
11650
11651 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
11652
11653 if (stsd) {
11654 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
11655
11656 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
11657 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
11658 var codecConfig, codecConfigType;
11659
11660 if (codecBox) {
11661 // https://tools.ietf.org/html/rfc6381#section-3.3
11662 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
11663 // we don't need anything but the "config" parameter of the
11664 // avc1 codecBox
11665 codecConfig = codecBox.subarray(78);
11666 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11667
11668 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
11669 track.codec += '.'; // left padded with zeroes for single digit hex
11670 // profile idc
11671
11672 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
11673
11674 track.codec += toHexString(codecConfig[10]); // level idc
11675
11676 track.codec += toHexString(codecConfig[11]);
11677 } else {
11678 // TODO: show a warning that we couldn't parse the codec
11679 // and are using the default
11680 track.codec = 'avc1.4d400d';
11681 }
11682 } else if (/^mp4[a,v]$/i.test(track.codec)) {
11683 // we do not need anything but the streamDescriptor of the mp4a codecBox
11684 codecConfig = codecBox.subarray(28);
11685 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11686
11687 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
11688 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
11689
11690 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
11691 } else {
11692 // TODO: show a warning that we couldn't parse the codec
11693 // and are using the default
11694 track.codec = 'mp4a.40.2';
11695 }
11696 } else {
11697 // flac, opus, etc
11698 track.codec = track.codec.toLowerCase();
11699 }
11700 }
11701 }
11702
11703 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11704
11705 if (mdhd) {
11706 track.timescale = getTimescaleFromMediaHeader(mdhd);
11707 }
11708
11709 tracks.push(track);
11710 });
11711 return tracks;
11712 };
11713
11714 var probe$2 = {
11715 // export mp4 inspector's findBox and parseType for backwards compatibility
11716 findBox: findBox_1,
11717 parseType: parseType_1,
11718 timescale: timescale,
11719 startTime: startTime,
11720 compositionStartTime: compositionStartTime,
11721 videoTrackIds: getVideoTrackIds,
11722 tracks: getTracks,
11723 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
11724 };
11725
11726 var parsePid = function parsePid(packet) {
11727 var pid = packet[1] & 0x1f;
11728 pid <<= 8;
11729 pid |= packet[2];
11730 return pid;
11731 };
11732
11733 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
11734 return !!(packet[1] & 0x40);
11735 };
11736
11737 var parseAdaptionField = function parseAdaptionField(packet) {
11738 var offset = 0; // if an adaption field is present, its length is specified by the
11739 // fifth byte of the TS packet header. The adaptation field is
11740 // used to add stuffing to PES packets that don't fill a complete
11741 // TS packet, and to specify some forms of timing and control data
11742 // that we do not currently use.
11743
11744 if ((packet[3] & 0x30) >>> 4 > 0x01) {
11745 offset += packet[4] + 1;
11746 }
11747
11748 return offset;
11749 };
11750
11751 var parseType = function parseType(packet, pmtPid) {
11752 var pid = parsePid(packet);
11753
11754 if (pid === 0) {
11755 return 'pat';
11756 } else if (pid === pmtPid) {
11757 return 'pmt';
11758 } else if (pmtPid) {
11759 return 'pes';
11760 }
11761
11762 return null;
11763 };
11764
11765 var parsePat = function parsePat(packet) {
11766 var pusi = parsePayloadUnitStartIndicator(packet);
11767 var offset = 4 + parseAdaptionField(packet);
11768
11769 if (pusi) {
11770 offset += packet[offset] + 1;
11771 }
11772
11773 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
11774 };
11775
11776 var parsePmt = function parsePmt(packet) {
11777 var programMapTable = {};
11778 var pusi = parsePayloadUnitStartIndicator(packet);
11779 var payloadOffset = 4 + parseAdaptionField(packet);
11780
11781 if (pusi) {
11782 payloadOffset += packet[payloadOffset] + 1;
11783 } // PMTs can be sent ahead of the time when they should actually
11784 // take effect. We don't believe this should ever be the case
11785 // for HLS but we'll ignore "forward" PMT declarations if we see
11786 // them. Future PMT declarations have the current_next_indicator
11787 // set to zero.
11788
11789
11790 if (!(packet[payloadOffset + 5] & 0x01)) {
11791 return;
11792 }
11793
11794 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
11795
11796 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
11797 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
11798 // long the program info descriptors are
11799
11800 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
11801
11802 var offset = 12 + programInfoLength;
11803
11804 while (offset < tableEnd) {
11805 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
11806
11807 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
11808 // skip past the elementary stream descriptors, if present
11809
11810 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
11811 }
11812
11813 return programMapTable;
11814 };
11815
11816 var parsePesType = function parsePesType(packet, programMapTable) {
11817 var pid = parsePid(packet);
11818 var type = programMapTable[pid];
11819
11820 switch (type) {
11821 case streamTypes.H264_STREAM_TYPE:
11822 return 'video';
11823
11824 case streamTypes.ADTS_STREAM_TYPE:
11825 return 'audio';
11826
11827 case streamTypes.METADATA_STREAM_TYPE:
11828 return 'timed-metadata';
11829
11830 default:
11831 return null;
11832 }
11833 };
11834
11835 var parsePesTime = function parsePesTime(packet) {
11836 var pusi = parsePayloadUnitStartIndicator(packet);
11837
11838 if (!pusi) {
11839 return null;
11840 }
11841
11842 var offset = 4 + parseAdaptionField(packet);
11843
11844 if (offset >= packet.byteLength) {
11845 // From the H 222.0 MPEG-TS spec
11846 // "For transport stream packets carrying PES packets, stuffing is needed when there
11847 // is insufficient PES packet data to completely fill the transport stream packet
11848 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
11849 // the sum of the lengths of the data elements in it, so that the payload bytes
11850 // remaining after the adaptation field exactly accommodates the available PES packet
11851 // data."
11852 //
11853 // If the offset is >= the length of the packet, then the packet contains no data
11854 // and instead is just adaption field stuffing bytes
11855 return null;
11856 }
11857
11858 var pes = null;
11859 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
11860 // and a DTS value. Determine what combination of values is
11861 // available to work with.
11862
11863 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
11864 // performs all bitwise operations on 32-bit integers but javascript
11865 // supports a much greater range (52-bits) of integer using standard
11866 // mathematical operations.
11867 // We construct a 31-bit value using bitwise operators over the 31
11868 // most significant bits and then multiply by 4 (equal to a left-shift
11869 // of 2) before we add the final 2 least significant bits of the
11870 // timestamp (equal to an OR.)
11871
11872 if (ptsDtsFlags & 0xC0) {
11873 pes = {}; // the PTS and DTS are not written out directly. For information
11874 // on how they are encoded, see
11875 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
11876
11877 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
11878 pes.pts *= 4; // Left shift by 2
11879
11880 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
11881
11882 pes.dts = pes.pts;
11883
11884 if (ptsDtsFlags & 0x40) {
11885 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
11886 pes.dts *= 4; // Left shift by 2
11887
11888 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
11889 }
11890 }
11891
11892 return pes;
11893 };
11894
11895 var parseNalUnitType = function parseNalUnitType(type) {
11896 switch (type) {
11897 case 0x05:
11898 return 'slice_layer_without_partitioning_rbsp_idr';
11899
11900 case 0x06:
11901 return 'sei_rbsp';
11902
11903 case 0x07:
11904 return 'seq_parameter_set_rbsp';
11905
11906 case 0x08:
11907 return 'pic_parameter_set_rbsp';
11908
11909 case 0x09:
11910 return 'access_unit_delimiter_rbsp';
11911
11912 default:
11913 return null;
11914 }
11915 };
11916
11917 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
11918 var offset = 4 + parseAdaptionField(packet);
11919 var frameBuffer = packet.subarray(offset);
11920 var frameI = 0;
11921 var frameSyncPoint = 0;
11922 var foundKeyFrame = false;
11923 var nalType; // advance the sync point to a NAL start, if necessary
11924
11925 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
11926 if (frameBuffer[frameSyncPoint + 2] === 1) {
11927 // the sync point is properly aligned
11928 frameI = frameSyncPoint + 5;
11929 break;
11930 }
11931 }
11932
11933 while (frameI < frameBuffer.byteLength) {
11934 // look at the current byte to determine if we've hit the end of
11935 // a NAL unit boundary
11936 switch (frameBuffer[frameI]) {
11937 case 0:
11938 // skip past non-sync sequences
11939 if (frameBuffer[frameI - 1] !== 0) {
11940 frameI += 2;
11941 break;
11942 } else if (frameBuffer[frameI - 2] !== 0) {
11943 frameI++;
11944 break;
11945 }
11946
11947 if (frameSyncPoint + 3 !== frameI - 2) {
11948 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11949
11950 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11951 foundKeyFrame = true;
11952 }
11953 } // drop trailing zeroes
11954
11955
11956 do {
11957 frameI++;
11958 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
11959
11960 frameSyncPoint = frameI - 2;
11961 frameI += 3;
11962 break;
11963
11964 case 1:
11965 // skip past non-sync sequences
11966 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
11967 frameI += 3;
11968 break;
11969 }
11970
11971 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11972
11973 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11974 foundKeyFrame = true;
11975 }
11976
11977 frameSyncPoint = frameI - 2;
11978 frameI += 3;
11979 break;
11980
11981 default:
11982 // the current byte isn't a one or zero, so it cannot be part
11983 // of a sync sequence
11984 frameI += 3;
11985 break;
11986 }
11987 }
11988
11989 frameBuffer = frameBuffer.subarray(frameSyncPoint);
11990 frameI -= frameSyncPoint;
11991 frameSyncPoint = 0; // parse the final nal
11992
11993 if (frameBuffer && frameBuffer.byteLength > 3) {
11994 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11995
11996 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11997 foundKeyFrame = true;
11998 }
11999 }
12000
12001 return foundKeyFrame;
12002 };
12003
12004 var probe$1 = {
12005 parseType: parseType,
12006 parsePat: parsePat,
12007 parsePmt: parsePmt,
12008 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
12009 parsePesType: parsePesType,
12010 parsePesTime: parsePesTime,
12011 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
12012 };
12013 var handleRollover = timestampRolloverStream.handleRollover;
12014 var probe = {};
12015 probe.ts = probe$1;
12016 probe.aac = utils;
12017 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
12018 var MP2T_PACKET_LENGTH = 188,
12019 // bytes
12020 SYNC_BYTE = 0x47;
12021 /**
12022 * walks through segment data looking for pat and pmt packets to parse out
12023 * program map table information
12024 */
12025
12026 var parsePsi_ = function parsePsi_(bytes, pmt) {
12027 var startIndex = 0,
12028 endIndex = MP2T_PACKET_LENGTH,
12029 packet,
12030 type;
12031
12032 while (endIndex < bytes.byteLength) {
12033 // Look for a pair of start and end sync bytes in the data..
12034 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12035 // We found a packet
12036 packet = bytes.subarray(startIndex, endIndex);
12037 type = probe.ts.parseType(packet, pmt.pid);
12038
12039 switch (type) {
12040 case 'pat':
12041 pmt.pid = probe.ts.parsePat(packet);
12042 break;
12043
12044 case 'pmt':
12045 var table = probe.ts.parsePmt(packet);
12046 pmt.table = pmt.table || {};
12047 Object.keys(table).forEach(function (key) {
12048 pmt.table[key] = table[key];
12049 });
12050 break;
12051 }
12052
12053 startIndex += MP2T_PACKET_LENGTH;
12054 endIndex += MP2T_PACKET_LENGTH;
12055 continue;
12056 } // If we get here, we have somehow become de-synchronized and we need to step
12057 // forward one byte at a time until we find a pair of sync bytes that denote
12058 // a packet
12059
12060
12061 startIndex++;
12062 endIndex++;
12063 }
12064 };
12065 /**
12066 * walks through the segment data from the start and end to get timing information
12067 * for the first and last audio pes packets
12068 */
12069
12070
12071 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
12072 var startIndex = 0,
12073 endIndex = MP2T_PACKET_LENGTH,
12074 packet,
12075 type,
12076 pesType,
12077 pusi,
12078 parsed;
12079 var endLoop = false; // Start walking from start of segment to get first audio packet
12080
12081 while (endIndex <= bytes.byteLength) {
12082 // Look for a pair of start and end sync bytes in the data..
12083 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12084 // We found a packet
12085 packet = bytes.subarray(startIndex, endIndex);
12086 type = probe.ts.parseType(packet, pmt.pid);
12087
12088 switch (type) {
12089 case 'pes':
12090 pesType = probe.ts.parsePesType(packet, pmt.table);
12091 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12092
12093 if (pesType === 'audio' && pusi) {
12094 parsed = probe.ts.parsePesTime(packet);
12095
12096 if (parsed) {
12097 parsed.type = 'audio';
12098 result.audio.push(parsed);
12099 endLoop = true;
12100 }
12101 }
12102
12103 break;
12104 }
12105
12106 if (endLoop) {
12107 break;
12108 }
12109
12110 startIndex += MP2T_PACKET_LENGTH;
12111 endIndex += MP2T_PACKET_LENGTH;
12112 continue;
12113 } // If we get here, we have somehow become de-synchronized and we need to step
12114 // forward one byte at a time until we find a pair of sync bytes that denote
12115 // a packet
12116
12117
12118 startIndex++;
12119 endIndex++;
12120 } // Start walking from end of segment to get last audio packet
12121
12122
12123 endIndex = bytes.byteLength;
12124 startIndex = endIndex - MP2T_PACKET_LENGTH;
12125 endLoop = false;
12126
12127 while (startIndex >= 0) {
12128 // Look for a pair of start and end sync bytes in the data..
12129 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12130 // We found a packet
12131 packet = bytes.subarray(startIndex, endIndex);
12132 type = probe.ts.parseType(packet, pmt.pid);
12133
12134 switch (type) {
12135 case 'pes':
12136 pesType = probe.ts.parsePesType(packet, pmt.table);
12137 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12138
12139 if (pesType === 'audio' && pusi) {
12140 parsed = probe.ts.parsePesTime(packet);
12141
12142 if (parsed) {
12143 parsed.type = 'audio';
12144 result.audio.push(parsed);
12145 endLoop = true;
12146 }
12147 }
12148
12149 break;
12150 }
12151
12152 if (endLoop) {
12153 break;
12154 }
12155
12156 startIndex -= MP2T_PACKET_LENGTH;
12157 endIndex -= MP2T_PACKET_LENGTH;
12158 continue;
12159 } // If we get here, we have somehow become de-synchronized and we need to step
12160 // forward one byte at a time until we find a pair of sync bytes that denote
12161 // a packet
12162
12163
12164 startIndex--;
12165 endIndex--;
12166 }
12167 };
12168 /**
12169 * walks through the segment data from the start and end to get timing information
12170 * for the first and last video pes packets as well as timing information for the first
12171 * key frame.
12172 */
12173
12174
12175 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
12176 var startIndex = 0,
12177 endIndex = MP2T_PACKET_LENGTH,
12178 packet,
12179 type,
12180 pesType,
12181 pusi,
12182 parsed,
12183 frame,
12184 i,
12185 pes;
12186 var endLoop = false;
12187 var currentFrame = {
12188 data: [],
12189 size: 0
12190 }; // Start walking from start of segment to get first video packet
12191
12192 while (endIndex < bytes.byteLength) {
12193 // Look for a pair of start and end sync bytes in the data..
12194 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12195 // We found a packet
12196 packet = bytes.subarray(startIndex, endIndex);
12197 type = probe.ts.parseType(packet, pmt.pid);
12198
12199 switch (type) {
12200 case 'pes':
12201 pesType = probe.ts.parsePesType(packet, pmt.table);
12202 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12203
12204 if (pesType === 'video') {
12205 if (pusi && !endLoop) {
12206 parsed = probe.ts.parsePesTime(packet);
12207
12208 if (parsed) {
12209 parsed.type = 'video';
12210 result.video.push(parsed);
12211 endLoop = true;
12212 }
12213 }
12214
12215 if (!result.firstKeyFrame) {
12216 if (pusi) {
12217 if (currentFrame.size !== 0) {
12218 frame = new Uint8Array(currentFrame.size);
12219 i = 0;
12220
12221 while (currentFrame.data.length) {
12222 pes = currentFrame.data.shift();
12223 frame.set(pes, i);
12224 i += pes.byteLength;
12225 }
12226
12227 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
12228 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
12229 // the keyframe seems to work fine with HLS playback
12230 // and definitely preferable to a crash with TypeError...
12231
12232 if (firstKeyFrame) {
12233 result.firstKeyFrame = firstKeyFrame;
12234 result.firstKeyFrame.type = 'video';
12235 } else {
12236 // eslint-disable-next-line
12237 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
12238 }
12239 }
12240
12241 currentFrame.size = 0;
12242 }
12243 }
12244
12245 currentFrame.data.push(packet);
12246 currentFrame.size += packet.byteLength;
12247 }
12248 }
12249
12250 break;
12251 }
12252
12253 if (endLoop && result.firstKeyFrame) {
12254 break;
12255 }
12256
12257 startIndex += MP2T_PACKET_LENGTH;
12258 endIndex += MP2T_PACKET_LENGTH;
12259 continue;
12260 } // If we get here, we have somehow become de-synchronized and we need to step
12261 // forward one byte at a time until we find a pair of sync bytes that denote
12262 // a packet
12263
12264
12265 startIndex++;
12266 endIndex++;
12267 } // Start walking from end of segment to get last video packet
12268
12269
12270 endIndex = bytes.byteLength;
12271 startIndex = endIndex - MP2T_PACKET_LENGTH;
12272 endLoop = false;
12273
12274 while (startIndex >= 0) {
12275 // Look for a pair of start and end sync bytes in the data..
12276 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12277 // We found a packet
12278 packet = bytes.subarray(startIndex, endIndex);
12279 type = probe.ts.parseType(packet, pmt.pid);
12280
12281 switch (type) {
12282 case 'pes':
12283 pesType = probe.ts.parsePesType(packet, pmt.table);
12284 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12285
12286 if (pesType === 'video' && pusi) {
12287 parsed = probe.ts.parsePesTime(packet);
12288
12289 if (parsed) {
12290 parsed.type = 'video';
12291 result.video.push(parsed);
12292 endLoop = true;
12293 }
12294 }
12295
12296 break;
12297 }
12298
12299 if (endLoop) {
12300 break;
12301 }
12302
12303 startIndex -= MP2T_PACKET_LENGTH;
12304 endIndex -= MP2T_PACKET_LENGTH;
12305 continue;
12306 } // If we get here, we have somehow become de-synchronized and we need to step
12307 // forward one byte at a time until we find a pair of sync bytes that denote
12308 // a packet
12309
12310
12311 startIndex--;
12312 endIndex--;
12313 }
12314 };
12315 /**
12316 * Adjusts the timestamp information for the segment to account for
12317 * rollover and convert to seconds based on pes packet timescale (90khz clock)
12318 */
12319
12320
12321 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
12322 if (segmentInfo.audio && segmentInfo.audio.length) {
12323 var audioBaseTimestamp = baseTimestamp;
12324
12325 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
12326 audioBaseTimestamp = segmentInfo.audio[0].dts;
12327 }
12328
12329 segmentInfo.audio.forEach(function (info) {
12330 info.dts = handleRollover(info.dts, audioBaseTimestamp);
12331 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
12332
12333 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12334 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12335 });
12336 }
12337
12338 if (segmentInfo.video && segmentInfo.video.length) {
12339 var videoBaseTimestamp = baseTimestamp;
12340
12341 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
12342 videoBaseTimestamp = segmentInfo.video[0].dts;
12343 }
12344
12345 segmentInfo.video.forEach(function (info) {
12346 info.dts = handleRollover(info.dts, videoBaseTimestamp);
12347 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
12348
12349 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12350 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12351 });
12352
12353 if (segmentInfo.firstKeyFrame) {
12354 var frame = segmentInfo.firstKeyFrame;
12355 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
12356 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
12357
12358 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
12359 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
12360 }
12361 }
12362 };
12363 /**
12364 * inspects the aac data stream for start and end time information
12365 */
12366
12367
12368 var inspectAac_ = function inspectAac_(bytes) {
12369 var endLoop = false,
12370 audioCount = 0,
12371 sampleRate = null,
12372 timestamp = null,
12373 frameSize = 0,
12374 byteIndex = 0,
12375 packet;
12376
12377 while (bytes.length - byteIndex >= 3) {
12378 var type = probe.aac.parseType(bytes, byteIndex);
12379
12380 switch (type) {
12381 case 'timed-metadata':
12382 // Exit early because we don't have enough to parse
12383 // the ID3 tag header
12384 if (bytes.length - byteIndex < 10) {
12385 endLoop = true;
12386 break;
12387 }
12388
12389 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12390 // to emit a full packet
12391
12392 if (frameSize > bytes.length) {
12393 endLoop = true;
12394 break;
12395 }
12396
12397 if (timestamp === null) {
12398 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12399 timestamp = probe.aac.parseAacTimestamp(packet);
12400 }
12401
12402 byteIndex += frameSize;
12403 break;
12404
12405 case 'audio':
12406 // Exit early because we don't have enough to parse
12407 // the ADTS frame header
12408 if (bytes.length - byteIndex < 7) {
12409 endLoop = true;
12410 break;
12411 }
12412
12413 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12414 // to emit a full packet
12415
12416 if (frameSize > bytes.length) {
12417 endLoop = true;
12418 break;
12419 }
12420
12421 if (sampleRate === null) {
12422 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12423 sampleRate = probe.aac.parseSampleRate(packet);
12424 }
12425
12426 audioCount++;
12427 byteIndex += frameSize;
12428 break;
12429
12430 default:
12431 byteIndex++;
12432 break;
12433 }
12434
12435 if (endLoop) {
12436 return null;
12437 }
12438 }
12439
12440 if (sampleRate === null || timestamp === null) {
12441 return null;
12442 }
12443
12444 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
12445 var result = {
12446 audio: [{
12447 type: 'audio',
12448 dts: timestamp,
12449 pts: timestamp
12450 }, {
12451 type: 'audio',
12452 dts: timestamp + audioCount * 1024 * audioTimescale,
12453 pts: timestamp + audioCount * 1024 * audioTimescale
12454 }]
12455 };
12456 return result;
12457 };
12458 /**
12459 * inspects the transport stream segment data for start and end time information
12460 * of the audio and video tracks (when present) as well as the first key frame's
12461 * start time.
12462 */
12463
12464
12465 var inspectTs_ = function inspectTs_(bytes) {
12466 var pmt = {
12467 pid: null,
12468 table: null
12469 };
12470 var result = {};
12471 parsePsi_(bytes, pmt);
12472
12473 for (var pid in pmt.table) {
12474 if (pmt.table.hasOwnProperty(pid)) {
12475 var type = pmt.table[pid];
12476
12477 switch (type) {
12478 case streamTypes.H264_STREAM_TYPE:
12479 result.video = [];
12480 parseVideoPes_(bytes, pmt, result);
12481
12482 if (result.video.length === 0) {
12483 delete result.video;
12484 }
12485
12486 break;
12487
12488 case streamTypes.ADTS_STREAM_TYPE:
12489 result.audio = [];
12490 parseAudioPes_(bytes, pmt, result);
12491
12492 if (result.audio.length === 0) {
12493 delete result.audio;
12494 }
12495
12496 break;
12497 }
12498 }
12499 }
12500
12501 return result;
12502 };
12503 /**
12504 * Inspects segment byte data and returns an object with start and end timing information
12505 *
12506 * @param {Uint8Array} bytes The segment byte data
12507 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
12508 * timestamps for rollover. This value must be in 90khz clock.
12509 * @return {Object} Object containing start and end frame timing info of segment.
12510 */
12511
12512
12513 var inspect = function inspect(bytes, baseTimestamp) {
12514 var isAacData = probe.aac.isLikelyAacData(bytes);
12515 var result;
12516
12517 if (isAacData) {
12518 result = inspectAac_(bytes);
12519 } else {
12520 result = inspectTs_(bytes);
12521 }
12522
12523 if (!result || !result.audio && !result.video) {
12524 return null;
12525 }
12526
12527 adjustTimestamp_(result, baseTimestamp);
12528 return result;
12529 };
12530
12531 var tsInspector = {
12532 inspect: inspect,
12533 parseAudioPes_: parseAudioPes_
12534 };
12535 /* global self */
12536
12537 /**
12538 * Re-emits transmuxer events by converting them into messages to the
12539 * world outside the worker.
12540 *
12541 * @param {Object} transmuxer the transmuxer to wire events on
12542 * @private
12543 */
12544
12545 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
12546 transmuxer.on('data', function (segment) {
12547 // transfer ownership of the underlying ArrayBuffer
12548 // instead of doing a copy to save memory
12549 // ArrayBuffers are transferable but generic TypedArrays are not
12550 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
12551 var initArray = segment.initSegment;
12552 segment.initSegment = {
12553 data: initArray.buffer,
12554 byteOffset: initArray.byteOffset,
12555 byteLength: initArray.byteLength
12556 };
12557 var typedArray = segment.data;
12558 segment.data = typedArray.buffer;
12559 self.postMessage({
12560 action: 'data',
12561 segment: segment,
12562 byteOffset: typedArray.byteOffset,
12563 byteLength: typedArray.byteLength
12564 }, [segment.data]);
12565 });
12566 transmuxer.on('done', function (data) {
12567 self.postMessage({
12568 action: 'done'
12569 });
12570 });
12571 transmuxer.on('gopInfo', function (gopInfo) {
12572 self.postMessage({
12573 action: 'gopInfo',
12574 gopInfo: gopInfo
12575 });
12576 });
12577 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
12578 var videoSegmentTimingInfo = {
12579 start: {
12580 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12581 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12582 },
12583 end: {
12584 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12585 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12586 },
12587 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12588 };
12589
12590 if (timingInfo.prependedContentDuration) {
12591 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12592 }
12593
12594 self.postMessage({
12595 action: 'videoSegmentTimingInfo',
12596 videoSegmentTimingInfo: videoSegmentTimingInfo
12597 });
12598 });
12599 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
12600 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
12601 var audioSegmentTimingInfo = {
12602 start: {
12603 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12604 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12605 },
12606 end: {
12607 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12608 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12609 },
12610 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12611 };
12612
12613 if (timingInfo.prependedContentDuration) {
12614 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12615 }
12616
12617 self.postMessage({
12618 action: 'audioSegmentTimingInfo',
12619 audioSegmentTimingInfo: audioSegmentTimingInfo
12620 });
12621 });
12622 transmuxer.on('id3Frame', function (id3Frame) {
12623 self.postMessage({
12624 action: 'id3Frame',
12625 id3Frame: id3Frame
12626 });
12627 });
12628 transmuxer.on('caption', function (caption) {
12629 self.postMessage({
12630 action: 'caption',
12631 caption: caption
12632 });
12633 });
12634 transmuxer.on('trackinfo', function (trackInfo) {
12635 self.postMessage({
12636 action: 'trackinfo',
12637 trackInfo: trackInfo
12638 });
12639 });
12640 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
12641 // convert to video TS since we prioritize video time over audio
12642 self.postMessage({
12643 action: 'audioTimingInfo',
12644 audioTimingInfo: {
12645 start: clock.videoTsToSeconds(audioTimingInfo.start),
12646 end: clock.videoTsToSeconds(audioTimingInfo.end)
12647 }
12648 });
12649 });
12650 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
12651 self.postMessage({
12652 action: 'videoTimingInfo',
12653 videoTimingInfo: {
12654 start: clock.videoTsToSeconds(videoTimingInfo.start),
12655 end: clock.videoTsToSeconds(videoTimingInfo.end)
12656 }
12657 });
12658 });
12659 transmuxer.on('log', function (log) {
12660 self.postMessage({
12661 action: 'log',
12662 log: log
12663 });
12664 });
12665 };
12666 /**
12667 * All incoming messages route through this hash. If no function exists
12668 * to handle an incoming message, then we ignore the message.
12669 *
12670 * @class MessageHandlers
12671 * @param {Object} options the options to initialize with
12672 */
12673
12674
12675 var MessageHandlers = /*#__PURE__*/function () {
12676 function MessageHandlers(self, options) {
12677 this.options = options || {};
12678 this.self = self;
12679 this.init();
12680 }
12681 /**
12682 * initialize our web worker and wire all the events.
12683 */
12684
12685
12686 var _proto = MessageHandlers.prototype;
12687
12688 _proto.init = function init() {
12689 if (this.transmuxer) {
12690 this.transmuxer.dispose();
12691 }
12692
12693 this.transmuxer = new transmuxer.Transmuxer(this.options);
12694 wireTransmuxerEvents(this.self, this.transmuxer);
12695 };
12696
12697 _proto.pushMp4Captions = function pushMp4Captions(data) {
12698 if (!this.captionParser) {
12699 this.captionParser = new captionParser();
12700 this.captionParser.init();
12701 }
12702
12703 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12704 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
12705 this.self.postMessage({
12706 action: 'mp4Captions',
12707 captions: parsed && parsed.captions || [],
12708 logs: parsed && parsed.logs || [],
12709 data: segment.buffer
12710 }, [segment.buffer]);
12711 };
12712
12713 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
12714 var timescales = _ref.timescales,
12715 data = _ref.data;
12716 var startTime = probe$2.startTime(timescales, data);
12717 this.self.postMessage({
12718 action: 'probeMp4StartTime',
12719 startTime: startTime,
12720 data: data
12721 }, [data.buffer]);
12722 };
12723
12724 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
12725 var data = _ref2.data;
12726 var tracks = probe$2.tracks(data);
12727 this.self.postMessage({
12728 action: 'probeMp4Tracks',
12729 tracks: tracks,
12730 data: data
12731 }, [data.buffer]);
12732 }
12733 /**
12734 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
12735 * internal "media time," as well as whether it contains video and/or audio.
12736 *
12737 * @private
12738 * @param {Uint8Array} bytes - segment bytes
12739 * @param {number} baseStartTime
12740 * Relative reference timestamp used when adjusting frame timestamps for rollover.
12741 * This value should be in seconds, as it's converted to a 90khz clock within the
12742 * function body.
12743 * @return {Object} The start time of the current segment in "media time" as well as
12744 * whether it contains video and/or audio
12745 */
12746 ;
12747
12748 _proto.probeTs = function probeTs(_ref3) {
12749 var data = _ref3.data,
12750 baseStartTime = _ref3.baseStartTime;
12751 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
12752 var timeInfo = tsInspector.inspect(data, tsStartTime);
12753 var result = null;
12754
12755 if (timeInfo) {
12756 result = {
12757 // each type's time info comes back as an array of 2 times, start and end
12758 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
12759 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
12760 };
12761
12762 if (result.hasVideo) {
12763 result.videoStart = timeInfo.video[0].ptsTime;
12764 }
12765
12766 if (result.hasAudio) {
12767 result.audioStart = timeInfo.audio[0].ptsTime;
12768 }
12769 }
12770
12771 this.self.postMessage({
12772 action: 'probeTs',
12773 result: result,
12774 data: data
12775 }, [data.buffer]);
12776 };
12777
12778 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
12779 if (this.captionParser) {
12780 this.captionParser.clearAllCaptions();
12781 }
12782 };
12783
12784 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
12785 if (this.captionParser) {
12786 this.captionParser.clearParsedCaptions();
12787 }
12788 }
12789 /**
12790 * Adds data (a ts segment) to the start of the transmuxer pipeline for
12791 * processing.
12792 *
12793 * @param {ArrayBuffer} data data to push into the muxer
12794 */
12795 ;
12796
12797 _proto.push = function push(data) {
12798 // Cast array buffer to correct type for transmuxer
12799 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12800 this.transmuxer.push(segment);
12801 }
12802 /**
12803 * Recreate the transmuxer so that the next segment added via `push`
12804 * start with a fresh transmuxer.
12805 */
12806 ;
12807
12808 _proto.reset = function reset() {
12809 this.transmuxer.reset();
12810 }
12811 /**
12812 * Set the value that will be used as the `baseMediaDecodeTime` time for the
12813 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
12814 * set relative to the first based on the PTS values.
12815 *
12816 * @param {Object} data used to set the timestamp offset in the muxer
12817 */
12818 ;
12819
12820 _proto.setTimestampOffset = function setTimestampOffset(data) {
12821 var timestampOffset = data.timestampOffset || 0;
12822 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
12823 };
12824
12825 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
12826 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
12827 };
12828
12829 _proto.setRemux = function setRemux(data) {
12830 this.transmuxer.setRemux(data.remux);
12831 }
12832 /**
12833 * Forces the pipeline to finish processing the last segment and emit it's
12834 * results.
12835 *
12836 * @param {Object} data event data, not really used
12837 */
12838 ;
12839
12840 _proto.flush = function flush(data) {
12841 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
12842
12843 self.postMessage({
12844 action: 'done',
12845 type: 'transmuxed'
12846 });
12847 };
12848
12849 _proto.endTimeline = function endTimeline() {
12850 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
12851 // timelines
12852
12853 self.postMessage({
12854 action: 'endedtimeline',
12855 type: 'transmuxed'
12856 });
12857 };
12858
12859 _proto.alignGopsWith = function alignGopsWith(data) {
12860 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
12861 };
12862
12863 return MessageHandlers;
12864 }();
12865 /**
12866 * Our web worker interface so that things can talk to mux.js
12867 * that will be running in a web worker. the scope is passed to this by
12868 * webworkify.
12869 *
12870 * @param {Object} self the scope for the web worker
12871 */
12872
12873
12874 self.onmessage = function (event) {
12875 if (event.data.action === 'init' && event.data.options) {
12876 this.messageHandlers = new MessageHandlers(self, event.data.options);
12877 return;
12878 }
12879
12880 if (!this.messageHandlers) {
12881 this.messageHandlers = new MessageHandlers(self);
12882 }
12883
12884 if (event.data && event.data.action && event.data.action !== 'init') {
12885 if (this.messageHandlers[event.data.action]) {
12886 this.messageHandlers[event.data.action](event.data);
12887 }
12888 }
12889 };
12890}));
12891var TransmuxWorker = factory(workerCode$1);
12892/* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
12893
12894var handleData_ = function handleData_(event, transmuxedData, callback) {
12895 var _event$data$segment = event.data.segment,
12896 type = _event$data$segment.type,
12897 initSegment = _event$data$segment.initSegment,
12898 captions = _event$data$segment.captions,
12899 captionStreams = _event$data$segment.captionStreams,
12900 metadata = _event$data$segment.metadata,
12901 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
12902 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
12903 transmuxedData.buffer.push({
12904 captions: captions,
12905 captionStreams: captionStreams,
12906 metadata: metadata
12907 });
12908 var boxes = event.data.segment.boxes || {
12909 data: event.data.segment.data
12910 };
12911 var result = {
12912 type: type,
12913 // cast ArrayBuffer to TypedArray
12914 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
12915 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
12916 };
12917
12918 if (typeof videoFrameDtsTime !== 'undefined') {
12919 result.videoFrameDtsTime = videoFrameDtsTime;
12920 }
12921
12922 if (typeof videoFramePtsTime !== 'undefined') {
12923 result.videoFramePtsTime = videoFramePtsTime;
12924 }
12925
12926 callback(result);
12927};
12928var handleDone_ = function handleDone_(_ref) {
12929 var transmuxedData = _ref.transmuxedData,
12930 callback = _ref.callback;
12931 // Previously we only returned data on data events,
12932 // not on done events. Clear out the buffer to keep that consistent.
12933 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
12934 // have received
12935
12936 callback(transmuxedData);
12937};
12938var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
12939 transmuxedData.gopInfo = event.data.gopInfo;
12940};
12941var processTransmux = function processTransmux(options) {
12942 var transmuxer = options.transmuxer,
12943 bytes = options.bytes,
12944 audioAppendStart = options.audioAppendStart,
12945 gopsToAlignWith = options.gopsToAlignWith,
12946 remux = options.remux,
12947 onData = options.onData,
12948 onTrackInfo = options.onTrackInfo,
12949 onAudioTimingInfo = options.onAudioTimingInfo,
12950 onVideoTimingInfo = options.onVideoTimingInfo,
12951 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
12952 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
12953 onId3 = options.onId3,
12954 onCaptions = options.onCaptions,
12955 onDone = options.onDone,
12956 onEndedTimeline = options.onEndedTimeline,
12957 onTransmuxerLog = options.onTransmuxerLog,
12958 isEndOfTimeline = options.isEndOfTimeline;
12959 var transmuxedData = {
12960 buffer: []
12961 };
12962 var waitForEndedTimelineEvent = isEndOfTimeline;
12963
12964 var handleMessage = function handleMessage(event) {
12965 if (transmuxer.currentTransmux !== options) {
12966 // disposed
12967 return;
12968 }
12969
12970 if (event.data.action === 'data') {
12971 handleData_(event, transmuxedData, onData);
12972 }
12973
12974 if (event.data.action === 'trackinfo') {
12975 onTrackInfo(event.data.trackInfo);
12976 }
12977
12978 if (event.data.action === 'gopInfo') {
12979 handleGopInfo_(event, transmuxedData);
12980 }
12981
12982 if (event.data.action === 'audioTimingInfo') {
12983 onAudioTimingInfo(event.data.audioTimingInfo);
12984 }
12985
12986 if (event.data.action === 'videoTimingInfo') {
12987 onVideoTimingInfo(event.data.videoTimingInfo);
12988 }
12989
12990 if (event.data.action === 'videoSegmentTimingInfo') {
12991 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
12992 }
12993
12994 if (event.data.action === 'audioSegmentTimingInfo') {
12995 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
12996 }
12997
12998 if (event.data.action === 'id3Frame') {
12999 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
13000 }
13001
13002 if (event.data.action === 'caption') {
13003 onCaptions(event.data.caption);
13004 }
13005
13006 if (event.data.action === 'endedtimeline') {
13007 waitForEndedTimelineEvent = false;
13008 onEndedTimeline();
13009 }
13010
13011 if (event.data.action === 'log') {
13012 onTransmuxerLog(event.data.log);
13013 } // wait for the transmuxed event since we may have audio and video
13014
13015
13016 if (event.data.type !== 'transmuxed') {
13017 return;
13018 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
13019 // of a timeline, that means there may still be data events before the segment
13020 // processing can be considerred complete. In that case, the final event should be
13021 // an "endedtimeline" event with the type "transmuxed."
13022
13023
13024 if (waitForEndedTimelineEvent) {
13025 return;
13026 }
13027
13028 transmuxer.onmessage = null;
13029 handleDone_({
13030 transmuxedData: transmuxedData,
13031 callback: onDone
13032 });
13033 /* eslint-disable no-use-before-define */
13034
13035 dequeue(transmuxer);
13036 /* eslint-enable */
13037 };
13038
13039 transmuxer.onmessage = handleMessage;
13040
13041 if (audioAppendStart) {
13042 transmuxer.postMessage({
13043 action: 'setAudioAppendStart',
13044 appendStart: audioAppendStart
13045 });
13046 } // allow empty arrays to be passed to clear out GOPs
13047
13048
13049 if (Array.isArray(gopsToAlignWith)) {
13050 transmuxer.postMessage({
13051 action: 'alignGopsWith',
13052 gopsToAlignWith: gopsToAlignWith
13053 });
13054 }
13055
13056 if (typeof remux !== 'undefined') {
13057 transmuxer.postMessage({
13058 action: 'setRemux',
13059 remux: remux
13060 });
13061 }
13062
13063 if (bytes.byteLength) {
13064 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
13065 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
13066 transmuxer.postMessage({
13067 action: 'push',
13068 // Send the typed-array of data as an ArrayBuffer so that
13069 // it can be sent as a "Transferable" and avoid the costly
13070 // memory copy
13071 data: buffer,
13072 // To recreate the original typed-array, we need information
13073 // about what portion of the ArrayBuffer it was a view into
13074 byteOffset: byteOffset,
13075 byteLength: bytes.byteLength
13076 }, [buffer]);
13077 }
13078
13079 if (isEndOfTimeline) {
13080 transmuxer.postMessage({
13081 action: 'endTimeline'
13082 });
13083 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
13084 // the end of the segment
13085
13086
13087 transmuxer.postMessage({
13088 action: 'flush'
13089 });
13090};
13091var dequeue = function dequeue(transmuxer) {
13092 transmuxer.currentTransmux = null;
13093
13094 if (transmuxer.transmuxQueue.length) {
13095 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
13096
13097 if (typeof transmuxer.currentTransmux === 'function') {
13098 transmuxer.currentTransmux();
13099 } else {
13100 processTransmux(transmuxer.currentTransmux);
13101 }
13102 }
13103};
13104var processAction = function processAction(transmuxer, action) {
13105 transmuxer.postMessage({
13106 action: action
13107 });
13108 dequeue(transmuxer);
13109};
13110var enqueueAction = function enqueueAction(action, transmuxer) {
13111 if (!transmuxer.currentTransmux) {
13112 transmuxer.currentTransmux = action;
13113 processAction(transmuxer, action);
13114 return;
13115 }
13116
13117 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
13118};
13119var reset = function reset(transmuxer) {
13120 enqueueAction('reset', transmuxer);
13121};
13122var endTimeline = function endTimeline(transmuxer) {
13123 enqueueAction('endTimeline', transmuxer);
13124};
13125var transmux = function transmux(options) {
13126 if (!options.transmuxer.currentTransmux) {
13127 options.transmuxer.currentTransmux = options;
13128 processTransmux(options);
13129 return;
13130 }
13131
13132 options.transmuxer.transmuxQueue.push(options);
13133};
13134var createTransmuxer = function createTransmuxer(options) {
13135 var transmuxer = new TransmuxWorker();
13136 transmuxer.currentTransmux = null;
13137 transmuxer.transmuxQueue = [];
13138 var term = transmuxer.terminate;
13139
13140 transmuxer.terminate = function () {
13141 transmuxer.currentTransmux = null;
13142 transmuxer.transmuxQueue.length = 0;
13143 return term.call(transmuxer);
13144 };
13145
13146 transmuxer.postMessage({
13147 action: 'init',
13148 options: options
13149 });
13150 return transmuxer;
13151};
13152var segmentTransmuxer = {
13153 reset: reset,
13154 endTimeline: endTimeline,
13155 transmux: transmux,
13156 createTransmuxer: createTransmuxer
13157};
13158
13159var workerCallback = function workerCallback(options) {
13160 var transmuxer = options.transmuxer;
13161 var endAction = options.endAction || options.action;
13162 var callback = options.callback;
13163
13164 var message = _extends__default["default"]({}, options, {
13165 endAction: null,
13166 transmuxer: null,
13167 callback: null
13168 });
13169
13170 var listenForEndEvent = function listenForEndEvent(event) {
13171 if (event.data.action !== endAction) {
13172 return;
13173 }
13174
13175 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
13176
13177 if (event.data.data) {
13178 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
13179
13180 if (options.data) {
13181 options.data = event.data.data;
13182 }
13183 }
13184
13185 callback(event.data);
13186 };
13187
13188 transmuxer.addEventListener('message', listenForEndEvent);
13189
13190 if (options.data) {
13191 var isArrayBuffer = options.data instanceof ArrayBuffer;
13192 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
13193 message.byteLength = options.data.byteLength;
13194 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
13195 transmuxer.postMessage(message, transfers);
13196 } else {
13197 transmuxer.postMessage(message);
13198 }
13199};
13200
13201var REQUEST_ERRORS = {
13202 FAILURE: 2,
13203 TIMEOUT: -101,
13204 ABORTED: -102
13205};
13206/**
13207 * Abort all requests
13208 *
13209 * @param {Object} activeXhrs - an object that tracks all XHR requests
13210 */
13211
13212var abortAll = function abortAll(activeXhrs) {
13213 activeXhrs.forEach(function (xhr) {
13214 xhr.abort();
13215 });
13216};
13217/**
13218 * Gather important bandwidth stats once a request has completed
13219 *
13220 * @param {Object} request - the XHR request from which to gather stats
13221 */
13222
13223
13224var getRequestStats = function getRequestStats(request) {
13225 return {
13226 bandwidth: request.bandwidth,
13227 bytesReceived: request.bytesReceived || 0,
13228 roundTripTime: request.roundTripTime || 0
13229 };
13230};
13231/**
13232 * If possible gather bandwidth stats as a request is in
13233 * progress
13234 *
13235 * @param {Event} progressEvent - an event object from an XHR's progress event
13236 */
13237
13238
13239var getProgressStats = function getProgressStats(progressEvent) {
13240 var request = progressEvent.target;
13241 var roundTripTime = Date.now() - request.requestTime;
13242 var stats = {
13243 bandwidth: Infinity,
13244 bytesReceived: 0,
13245 roundTripTime: roundTripTime || 0
13246 };
13247 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
13248 // because we should only use bandwidth stats on progress to determine when
13249 // abort a request early due to insufficient bandwidth
13250
13251 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
13252 return stats;
13253};
13254/**
13255 * Handle all error conditions in one place and return an object
13256 * with all the information
13257 *
13258 * @param {Error|null} error - if non-null signals an error occured with the XHR
13259 * @param {Object} request - the XHR request that possibly generated the error
13260 */
13261
13262
13263var handleErrors = function handleErrors(error, request) {
13264 if (request.timedout) {
13265 return {
13266 status: request.status,
13267 message: 'HLS request timed-out at URL: ' + request.uri,
13268 code: REQUEST_ERRORS.TIMEOUT,
13269 xhr: request
13270 };
13271 }
13272
13273 if (request.aborted) {
13274 return {
13275 status: request.status,
13276 message: 'HLS request aborted at URL: ' + request.uri,
13277 code: REQUEST_ERRORS.ABORTED,
13278 xhr: request
13279 };
13280 }
13281
13282 if (error) {
13283 return {
13284 status: request.status,
13285 message: 'HLS request errored at URL: ' + request.uri,
13286 code: REQUEST_ERRORS.FAILURE,
13287 xhr: request
13288 };
13289 }
13290
13291 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
13292 return {
13293 status: request.status,
13294 message: 'Empty HLS response at URL: ' + request.uri,
13295 code: REQUEST_ERRORS.FAILURE,
13296 xhr: request
13297 };
13298 }
13299
13300 return null;
13301};
13302/**
13303 * Handle responses for key data and convert the key data to the correct format
13304 * for the decryption step later
13305 *
13306 * @param {Object} segment - a simplified copy of the segmentInfo object
13307 * from SegmentLoader
13308 * @param {Array} objects - objects to add the key bytes to.
13309 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13310 * this request
13311 */
13312
13313
13314var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
13315 return function (error, request) {
13316 var response = request.response;
13317 var errorObj = handleErrors(error, request);
13318
13319 if (errorObj) {
13320 return finishProcessingFn(errorObj, segment);
13321 }
13322
13323 if (response.byteLength !== 16) {
13324 return finishProcessingFn({
13325 status: request.status,
13326 message: 'Invalid HLS key at URL: ' + request.uri,
13327 code: REQUEST_ERRORS.FAILURE,
13328 xhr: request
13329 }, segment);
13330 }
13331
13332 var view = new DataView(response);
13333 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
13334
13335 for (var i = 0; i < objects.length; i++) {
13336 objects[i].bytes = bytes;
13337 }
13338
13339 return finishProcessingFn(null, segment);
13340 };
13341};
13342
13343var parseInitSegment = function parseInitSegment(segment, _callback) {
13344 var type = containers.detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
13345 // only know how to parse mp4 init segments at the moment
13346
13347 if (type !== 'mp4') {
13348 var uri = segment.map.resolvedUri || segment.map.uri;
13349 return _callback({
13350 internal: true,
13351 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
13352 code: REQUEST_ERRORS.FAILURE
13353 });
13354 }
13355
13356 workerCallback({
13357 action: 'probeMp4Tracks',
13358 data: segment.map.bytes,
13359 transmuxer: segment.transmuxer,
13360 callback: function callback(_ref) {
13361 var tracks = _ref.tracks,
13362 data = _ref.data;
13363 // transfer bytes back to us
13364 segment.map.bytes = data;
13365 tracks.forEach(function (track) {
13366 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
13367
13368 if (segment.map.tracks[track.type]) {
13369 return;
13370 }
13371
13372 segment.map.tracks[track.type] = track;
13373
13374 if (typeof track.id === 'number' && track.timescale) {
13375 segment.map.timescales = segment.map.timescales || {};
13376 segment.map.timescales[track.id] = track.timescale;
13377 }
13378 });
13379 return _callback(null);
13380 }
13381 });
13382};
13383/**
13384 * Handle init-segment responses
13385 *
13386 * @param {Object} segment - a simplified copy of the segmentInfo object
13387 * from SegmentLoader
13388 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13389 * this request
13390 */
13391
13392
13393var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
13394 var segment = _ref2.segment,
13395 finishProcessingFn = _ref2.finishProcessingFn;
13396 return function (error, request) {
13397 var errorObj = handleErrors(error, request);
13398
13399 if (errorObj) {
13400 return finishProcessingFn(errorObj, segment);
13401 }
13402
13403 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
13404 // until the key request is done to decrypt.
13405
13406 if (segment.map.key) {
13407 segment.map.encryptedBytes = bytes;
13408 return finishProcessingFn(null, segment);
13409 }
13410
13411 segment.map.bytes = bytes;
13412 parseInitSegment(segment, function (parseError) {
13413 if (parseError) {
13414 parseError.xhr = request;
13415 parseError.status = request.status;
13416 return finishProcessingFn(parseError, segment);
13417 }
13418
13419 finishProcessingFn(null, segment);
13420 });
13421 };
13422};
13423/**
13424 * Response handler for segment-requests being sure to set the correct
13425 * property depending on whether the segment is encryped or not
13426 * Also records and keeps track of stats that are used for ABR purposes
13427 *
13428 * @param {Object} segment - a simplified copy of the segmentInfo object
13429 * from SegmentLoader
13430 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13431 * this request
13432 */
13433
13434
13435var handleSegmentResponse = function handleSegmentResponse(_ref3) {
13436 var segment = _ref3.segment,
13437 finishProcessingFn = _ref3.finishProcessingFn,
13438 responseType = _ref3.responseType;
13439 return function (error, request) {
13440 var errorObj = handleErrors(error, request);
13441
13442 if (errorObj) {
13443 return finishProcessingFn(errorObj, segment);
13444 }
13445
13446 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
13447 // thrown for two primary cases:
13448 // 1. the mime type override stops working, or is not implemented for a specific
13449 // browser
13450 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
13451 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
13452 segment.stats = getRequestStats(request);
13453
13454 if (segment.key) {
13455 segment.encryptedBytes = new Uint8Array(newBytes);
13456 } else {
13457 segment.bytes = new Uint8Array(newBytes);
13458 }
13459
13460 return finishProcessingFn(null, segment);
13461 };
13462};
13463
13464var transmuxAndNotify = function transmuxAndNotify(_ref4) {
13465 var segment = _ref4.segment,
13466 bytes = _ref4.bytes,
13467 trackInfoFn = _ref4.trackInfoFn,
13468 timingInfoFn = _ref4.timingInfoFn,
13469 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
13470 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
13471 id3Fn = _ref4.id3Fn,
13472 captionsFn = _ref4.captionsFn,
13473 isEndOfTimeline = _ref4.isEndOfTimeline,
13474 endedTimelineFn = _ref4.endedTimelineFn,
13475 dataFn = _ref4.dataFn,
13476 doneFn = _ref4.doneFn,
13477 onTransmuxerLog = _ref4.onTransmuxerLog;
13478 var fmp4Tracks = segment.map && segment.map.tracks || {};
13479 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
13480 // One reason for this is that in the case of full segments, we want to trust start
13481 // times from the probe, rather than the transmuxer.
13482
13483 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
13484 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
13485 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
13486 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
13487
13488 var finish = function finish() {
13489 return transmux({
13490 bytes: bytes,
13491 transmuxer: segment.transmuxer,
13492 audioAppendStart: segment.audioAppendStart,
13493 gopsToAlignWith: segment.gopsToAlignWith,
13494 remux: isMuxed,
13495 onData: function onData(result) {
13496 result.type = result.type === 'combined' ? 'video' : result.type;
13497 dataFn(segment, result);
13498 },
13499 onTrackInfo: function onTrackInfo(trackInfo) {
13500 if (trackInfoFn) {
13501 if (isMuxed) {
13502 trackInfo.isMuxed = true;
13503 }
13504
13505 trackInfoFn(segment, trackInfo);
13506 }
13507 },
13508 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
13509 // we only want the first start value we encounter
13510 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
13511 audioStartFn(audioTimingInfo.start);
13512 audioStartFn = null;
13513 } // we want to continually update the end time
13514
13515
13516 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
13517 audioEndFn(audioTimingInfo.end);
13518 }
13519 },
13520 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
13521 // we only want the first start value we encounter
13522 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
13523 videoStartFn(videoTimingInfo.start);
13524 videoStartFn = null;
13525 } // we want to continually update the end time
13526
13527
13528 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
13529 videoEndFn(videoTimingInfo.end);
13530 }
13531 },
13532 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
13533 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
13534 },
13535 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
13536 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
13537 },
13538 onId3: function onId3(id3Frames, dispatchType) {
13539 id3Fn(segment, id3Frames, dispatchType);
13540 },
13541 onCaptions: function onCaptions(captions) {
13542 captionsFn(segment, [captions]);
13543 },
13544 isEndOfTimeline: isEndOfTimeline,
13545 onEndedTimeline: function onEndedTimeline() {
13546 endedTimelineFn();
13547 },
13548 onTransmuxerLog: onTransmuxerLog,
13549 onDone: function onDone(result) {
13550 if (!doneFn) {
13551 return;
13552 }
13553
13554 result.type = result.type === 'combined' ? 'video' : result.type;
13555 doneFn(null, segment, result);
13556 }
13557 });
13558 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
13559 // Meaning cached frame data may corrupt our notion of where this segment
13560 // really starts. To get around this, probe for the info needed.
13561
13562
13563 workerCallback({
13564 action: 'probeTs',
13565 transmuxer: segment.transmuxer,
13566 data: bytes,
13567 baseStartTime: segment.baseStartTime,
13568 callback: function callback(data) {
13569 segment.bytes = bytes = data.data;
13570 var probeResult = data.result;
13571
13572 if (probeResult) {
13573 trackInfoFn(segment, {
13574 hasAudio: probeResult.hasAudio,
13575 hasVideo: probeResult.hasVideo,
13576 isMuxed: isMuxed
13577 });
13578 trackInfoFn = null;
13579
13580 if (probeResult.hasAudio && !isMuxed) {
13581 audioStartFn(probeResult.audioStart);
13582 }
13583
13584 if (probeResult.hasVideo) {
13585 videoStartFn(probeResult.videoStart);
13586 }
13587
13588 audioStartFn = null;
13589 videoStartFn = null;
13590 }
13591
13592 finish();
13593 }
13594 });
13595};
13596
13597var handleSegmentBytes = function handleSegmentBytes(_ref5) {
13598 var segment = _ref5.segment,
13599 bytes = _ref5.bytes,
13600 trackInfoFn = _ref5.trackInfoFn,
13601 timingInfoFn = _ref5.timingInfoFn,
13602 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
13603 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
13604 id3Fn = _ref5.id3Fn,
13605 captionsFn = _ref5.captionsFn,
13606 isEndOfTimeline = _ref5.isEndOfTimeline,
13607 endedTimelineFn = _ref5.endedTimelineFn,
13608 dataFn = _ref5.dataFn,
13609 doneFn = _ref5.doneFn,
13610 onTransmuxerLog = _ref5.onTransmuxerLog;
13611 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
13612 // We should have a handler that fetches the number of bytes required
13613 // to check if something is fmp4. This will allow us to save bandwidth
13614 // because we can only blacklist a playlist and abort requests
13615 // by codec after trackinfo triggers.
13616
13617 if (containers.isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
13618 segment.isFmp4 = true;
13619 var tracks = segment.map.tracks;
13620 var trackInfo = {
13621 isFmp4: true,
13622 hasVideo: !!tracks.video,
13623 hasAudio: !!tracks.audio
13624 }; // if we have a audio track, with a codec that is not set to
13625 // encrypted audio
13626
13627 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
13628 trackInfo.audioCodec = tracks.audio.codec;
13629 } // if we have a video track, with a codec that is not set to
13630 // encrypted video
13631
13632
13633 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
13634 trackInfo.videoCodec = tracks.video.codec;
13635 }
13636
13637 if (tracks.video && tracks.audio) {
13638 trackInfo.isMuxed = true;
13639 } // since we don't support appending fmp4 data on progress, we know we have the full
13640 // segment here
13641
13642
13643 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
13644 // time. The end time can be roughly calculated by the receiver using the duration.
13645 //
13646 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
13647 // that is the true start of the segment (where the playback engine should begin
13648 // decoding).
13649
13650 var finishLoading = function finishLoading(captions) {
13651 // if the track still has audio at this point it is only possible
13652 // for it to be audio only. See `tracks.video && tracks.audio` if statement
13653 // above.
13654 // we make sure to use segment.bytes here as that
13655 dataFn(segment, {
13656 data: bytesAsUint8Array,
13657 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
13658 });
13659
13660 if (captions && captions.length) {
13661 captionsFn(segment, captions);
13662 }
13663
13664 doneFn(null, segment, {});
13665 };
13666
13667 workerCallback({
13668 action: 'probeMp4StartTime',
13669 timescales: segment.map.timescales,
13670 data: bytesAsUint8Array,
13671 transmuxer: segment.transmuxer,
13672 callback: function callback(_ref6) {
13673 var data = _ref6.data,
13674 startTime = _ref6.startTime;
13675 // transfer bytes back to us
13676 bytes = data.buffer;
13677 segment.bytes = bytesAsUint8Array = data;
13678
13679 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
13680 timingInfoFn(segment, 'audio', 'start', startTime);
13681 }
13682
13683 if (trackInfo.hasVideo) {
13684 timingInfoFn(segment, 'video', 'start', startTime);
13685 } // Run through the CaptionParser in case there are captions.
13686 // Initialize CaptionParser if it hasn't been yet
13687
13688
13689 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
13690 finishLoading();
13691 return;
13692 }
13693
13694 workerCallback({
13695 action: 'pushMp4Captions',
13696 endAction: 'mp4Captions',
13697 transmuxer: segment.transmuxer,
13698 data: bytesAsUint8Array,
13699 timescales: segment.map.timescales,
13700 trackIds: [tracks.video.id],
13701 callback: function callback(message) {
13702 // transfer bytes back to us
13703 bytes = message.data.buffer;
13704 segment.bytes = bytesAsUint8Array = message.data;
13705 message.logs.forEach(function (log) {
13706 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
13707 stream: 'mp4CaptionParser'
13708 }));
13709 });
13710 finishLoading(message.captions);
13711 }
13712 });
13713 }
13714 });
13715 return;
13716 } // VTT or other segments that don't need processing
13717
13718
13719 if (!segment.transmuxer) {
13720 doneFn(null, segment, {});
13721 return;
13722 }
13723
13724 if (typeof segment.container === 'undefined') {
13725 segment.container = containers.detectContainerForBytes(bytesAsUint8Array);
13726 }
13727
13728 if (segment.container !== 'ts' && segment.container !== 'aac') {
13729 trackInfoFn(segment, {
13730 hasAudio: false,
13731 hasVideo: false
13732 });
13733 doneFn(null, segment, {});
13734 return;
13735 } // ts or aac
13736
13737
13738 transmuxAndNotify({
13739 segment: segment,
13740 bytes: bytes,
13741 trackInfoFn: trackInfoFn,
13742 timingInfoFn: timingInfoFn,
13743 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13744 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13745 id3Fn: id3Fn,
13746 captionsFn: captionsFn,
13747 isEndOfTimeline: isEndOfTimeline,
13748 endedTimelineFn: endedTimelineFn,
13749 dataFn: dataFn,
13750 doneFn: doneFn,
13751 onTransmuxerLog: onTransmuxerLog
13752 });
13753};
13754
13755var decrypt = function decrypt(_ref7, callback) {
13756 var id = _ref7.id,
13757 key = _ref7.key,
13758 encryptedBytes = _ref7.encryptedBytes,
13759 decryptionWorker = _ref7.decryptionWorker;
13760
13761 var decryptionHandler = function decryptionHandler(event) {
13762 if (event.data.source === id) {
13763 decryptionWorker.removeEventListener('message', decryptionHandler);
13764 var decrypted = event.data.decrypted;
13765 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
13766 }
13767 };
13768
13769 decryptionWorker.addEventListener('message', decryptionHandler);
13770 var keyBytes;
13771
13772 if (key.bytes.slice) {
13773 keyBytes = key.bytes.slice();
13774 } else {
13775 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
13776 } // incrementally decrypt the bytes
13777
13778
13779 decryptionWorker.postMessage(createTransferableMessage({
13780 source: id,
13781 encrypted: encryptedBytes,
13782 key: keyBytes,
13783 iv: key.iv
13784 }), [encryptedBytes.buffer, keyBytes.buffer]);
13785};
13786/**
13787 * Decrypt the segment via the decryption web worker
13788 *
13789 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13790 * routines
13791 * @param {Object} segment - a simplified copy of the segmentInfo object
13792 * from SegmentLoader
13793 * @param {Function} trackInfoFn - a callback that receives track info
13794 * @param {Function} timingInfoFn - a callback that receives timing info
13795 * @param {Function} videoSegmentTimingInfoFn
13796 * a callback that receives video timing info based on media times and
13797 * any adjustments made by the transmuxer
13798 * @param {Function} audioSegmentTimingInfoFn
13799 * a callback that receives audio timing info based on media times and
13800 * any adjustments made by the transmuxer
13801 * @param {boolean} isEndOfTimeline
13802 * true if this segment represents the last segment in a timeline
13803 * @param {Function} endedTimelineFn
13804 * a callback made when a timeline is ended, will only be called if
13805 * isEndOfTimeline is true
13806 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13807 * and ready to use
13808 * @param {Function} doneFn - a callback that is executed after decryption has completed
13809 */
13810
13811
13812var decryptSegment = function decryptSegment(_ref8) {
13813 var decryptionWorker = _ref8.decryptionWorker,
13814 segment = _ref8.segment,
13815 trackInfoFn = _ref8.trackInfoFn,
13816 timingInfoFn = _ref8.timingInfoFn,
13817 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
13818 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
13819 id3Fn = _ref8.id3Fn,
13820 captionsFn = _ref8.captionsFn,
13821 isEndOfTimeline = _ref8.isEndOfTimeline,
13822 endedTimelineFn = _ref8.endedTimelineFn,
13823 dataFn = _ref8.dataFn,
13824 doneFn = _ref8.doneFn,
13825 onTransmuxerLog = _ref8.onTransmuxerLog;
13826 decrypt({
13827 id: segment.requestId,
13828 key: segment.key,
13829 encryptedBytes: segment.encryptedBytes,
13830 decryptionWorker: decryptionWorker
13831 }, function (decryptedBytes) {
13832 segment.bytes = decryptedBytes;
13833 handleSegmentBytes({
13834 segment: segment,
13835 bytes: segment.bytes,
13836 trackInfoFn: trackInfoFn,
13837 timingInfoFn: timingInfoFn,
13838 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13839 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13840 id3Fn: id3Fn,
13841 captionsFn: captionsFn,
13842 isEndOfTimeline: isEndOfTimeline,
13843 endedTimelineFn: endedTimelineFn,
13844 dataFn: dataFn,
13845 doneFn: doneFn,
13846 onTransmuxerLog: onTransmuxerLog
13847 });
13848 });
13849};
13850/**
13851 * This function waits for all XHRs to finish (with either success or failure)
13852 * before continueing processing via it's callback. The function gathers errors
13853 * from each request into a single errors array so that the error status for
13854 * each request can be examined later.
13855 *
13856 * @param {Object} activeXhrs - an object that tracks all XHR requests
13857 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13858 * routines
13859 * @param {Function} trackInfoFn - a callback that receives track info
13860 * @param {Function} timingInfoFn - a callback that receives timing info
13861 * @param {Function} videoSegmentTimingInfoFn
13862 * a callback that receives video timing info based on media times and
13863 * any adjustments made by the transmuxer
13864 * @param {Function} audioSegmentTimingInfoFn
13865 * a callback that receives audio timing info based on media times and
13866 * any adjustments made by the transmuxer
13867 * @param {Function} id3Fn - a callback that receives ID3 metadata
13868 * @param {Function} captionsFn - a callback that receives captions
13869 * @param {boolean} isEndOfTimeline
13870 * true if this segment represents the last segment in a timeline
13871 * @param {Function} endedTimelineFn
13872 * a callback made when a timeline is ended, will only be called if
13873 * isEndOfTimeline is true
13874 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13875 * and ready to use
13876 * @param {Function} doneFn - a callback that is executed after all resources have been
13877 * downloaded and any decryption completed
13878 */
13879
13880
13881var waitForCompletion = function waitForCompletion(_ref9) {
13882 var activeXhrs = _ref9.activeXhrs,
13883 decryptionWorker = _ref9.decryptionWorker,
13884 trackInfoFn = _ref9.trackInfoFn,
13885 timingInfoFn = _ref9.timingInfoFn,
13886 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
13887 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
13888 id3Fn = _ref9.id3Fn,
13889 captionsFn = _ref9.captionsFn,
13890 isEndOfTimeline = _ref9.isEndOfTimeline,
13891 endedTimelineFn = _ref9.endedTimelineFn,
13892 dataFn = _ref9.dataFn,
13893 doneFn = _ref9.doneFn,
13894 onTransmuxerLog = _ref9.onTransmuxerLog;
13895 var count = 0;
13896 var didError = false;
13897 return function (error, segment) {
13898 if (didError) {
13899 return;
13900 }
13901
13902 if (error) {
13903 didError = true; // If there are errors, we have to abort any outstanding requests
13904
13905 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
13906 // handle the aborted events from those requests, there are some cases where we may
13907 // never get an aborted event. For instance, if the network connection is lost and
13908 // there were two requests, the first may have triggered an error immediately, while
13909 // the second request remains unsent. In that case, the aborted algorithm will not
13910 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
13911 //
13912 // We also can't rely on the ready state of the XHR, since the request that
13913 // triggered the connection error may also show as a ready state of 0 (unsent).
13914 // Therefore, we have to finish this group of requests immediately after the first
13915 // seen error.
13916
13917 return doneFn(error, segment);
13918 }
13919
13920 count += 1;
13921
13922 if (count === activeXhrs.length) {
13923 var segmentFinish = function segmentFinish() {
13924 if (segment.encryptedBytes) {
13925 return decryptSegment({
13926 decryptionWorker: decryptionWorker,
13927 segment: segment,
13928 trackInfoFn: trackInfoFn,
13929 timingInfoFn: timingInfoFn,
13930 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13931 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13932 id3Fn: id3Fn,
13933 captionsFn: captionsFn,
13934 isEndOfTimeline: isEndOfTimeline,
13935 endedTimelineFn: endedTimelineFn,
13936 dataFn: dataFn,
13937 doneFn: doneFn,
13938 onTransmuxerLog: onTransmuxerLog
13939 });
13940 } // Otherwise, everything is ready just continue
13941
13942
13943 handleSegmentBytes({
13944 segment: segment,
13945 bytes: segment.bytes,
13946 trackInfoFn: trackInfoFn,
13947 timingInfoFn: timingInfoFn,
13948 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13949 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13950 id3Fn: id3Fn,
13951 captionsFn: captionsFn,
13952 isEndOfTimeline: isEndOfTimeline,
13953 endedTimelineFn: endedTimelineFn,
13954 dataFn: dataFn,
13955 doneFn: doneFn,
13956 onTransmuxerLog: onTransmuxerLog
13957 });
13958 }; // Keep track of when *all* of the requests have completed
13959
13960
13961 segment.endOfAllRequests = Date.now();
13962
13963 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
13964 return decrypt({
13965 decryptionWorker: decryptionWorker,
13966 // add -init to the "id" to differentiate between segment
13967 // and init segment decryption, just in case they happen
13968 // at the same time at some point in the future.
13969 id: segment.requestId + '-init',
13970 encryptedBytes: segment.map.encryptedBytes,
13971 key: segment.map.key
13972 }, function (decryptedBytes) {
13973 segment.map.bytes = decryptedBytes;
13974 parseInitSegment(segment, function (parseError) {
13975 if (parseError) {
13976 abortAll(activeXhrs);
13977 return doneFn(parseError, segment);
13978 }
13979
13980 segmentFinish();
13981 });
13982 });
13983 }
13984
13985 segmentFinish();
13986 }
13987 };
13988};
13989/**
13990 * Calls the abort callback if any request within the batch was aborted. Will only call
13991 * the callback once per batch of requests, even if multiple were aborted.
13992 *
13993 * @param {Object} loadendState - state to check to see if the abort function was called
13994 * @param {Function} abortFn - callback to call for abort
13995 */
13996
13997
13998var handleLoadEnd = function handleLoadEnd(_ref10) {
13999 var loadendState = _ref10.loadendState,
14000 abortFn = _ref10.abortFn;
14001 return function (event) {
14002 var request = event.target;
14003
14004 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
14005 abortFn();
14006 loadendState.calledAbortFn = true;
14007 }
14008 };
14009};
14010/**
14011 * Simple progress event callback handler that gathers some stats before
14012 * executing a provided callback with the `segment` object
14013 *
14014 * @param {Object} segment - a simplified copy of the segmentInfo object
14015 * from SegmentLoader
14016 * @param {Function} progressFn - a callback that is executed each time a progress event
14017 * is received
14018 * @param {Function} trackInfoFn - a callback that receives track info
14019 * @param {Function} timingInfoFn - a callback that receives timing info
14020 * @param {Function} videoSegmentTimingInfoFn
14021 * a callback that receives video timing info based on media times and
14022 * any adjustments made by the transmuxer
14023 * @param {Function} audioSegmentTimingInfoFn
14024 * a callback that receives audio timing info based on media times and
14025 * any adjustments made by the transmuxer
14026 * @param {boolean} isEndOfTimeline
14027 * true if this segment represents the last segment in a timeline
14028 * @param {Function} endedTimelineFn
14029 * a callback made when a timeline is ended, will only be called if
14030 * isEndOfTimeline is true
14031 * @param {Function} dataFn - a callback that is executed when segment bytes are available
14032 * and ready to use
14033 * @param {Event} event - the progress event object from XMLHttpRequest
14034 */
14035
14036
14037var handleProgress = function handleProgress(_ref11) {
14038 var segment = _ref11.segment,
14039 progressFn = _ref11.progressFn;
14040 _ref11.trackInfoFn;
14041 _ref11.timingInfoFn;
14042 _ref11.videoSegmentTimingInfoFn;
14043 _ref11.audioSegmentTimingInfoFn;
14044 _ref11.id3Fn;
14045 _ref11.captionsFn;
14046 _ref11.isEndOfTimeline;
14047 _ref11.endedTimelineFn;
14048 _ref11.dataFn;
14049 return function (event) {
14050 var request = event.target;
14051
14052 if (request.aborted) {
14053 return;
14054 }
14055
14056 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
14057
14058 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
14059 segment.stats.firstBytesReceivedAt = Date.now();
14060 }
14061
14062 return progressFn(event, segment);
14063 };
14064};
14065/**
14066 * Load all resources and does any processing necessary for a media-segment
14067 *
14068 * Features:
14069 * decrypts the media-segment if it has a key uri and an iv
14070 * aborts *all* requests if *any* one request fails
14071 *
14072 * The segment object, at minimum, has the following format:
14073 * {
14074 * resolvedUri: String,
14075 * [transmuxer]: Object,
14076 * [byterange]: {
14077 * offset: Number,
14078 * length: Number
14079 * },
14080 * [key]: {
14081 * resolvedUri: String
14082 * [byterange]: {
14083 * offset: Number,
14084 * length: Number
14085 * },
14086 * iv: {
14087 * bytes: Uint32Array
14088 * }
14089 * },
14090 * [map]: {
14091 * resolvedUri: String,
14092 * [byterange]: {
14093 * offset: Number,
14094 * length: Number
14095 * },
14096 * [bytes]: Uint8Array
14097 * }
14098 * }
14099 * ...where [name] denotes optional properties
14100 *
14101 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
14102 * @param {Object} xhrOptions - the base options to provide to all xhr requests
14103 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
14104 * decryption routines
14105 * @param {Object} segment - a simplified copy of the segmentInfo object
14106 * from SegmentLoader
14107 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
14108 * aborted
14109 * @param {Function} progressFn - a callback that receives progress events from the main
14110 * segment's xhr request
14111 * @param {Function} trackInfoFn - a callback that receives track info
14112 * @param {Function} timingInfoFn - a callback that receives timing info
14113 * @param {Function} videoSegmentTimingInfoFn
14114 * a callback that receives video timing info based on media times and
14115 * any adjustments made by the transmuxer
14116 * @param {Function} audioSegmentTimingInfoFn
14117 * a callback that receives audio timing info based on media times and
14118 * any adjustments made by the transmuxer
14119 * @param {Function} id3Fn - a callback that receives ID3 metadata
14120 * @param {Function} captionsFn - a callback that receives captions
14121 * @param {boolean} isEndOfTimeline
14122 * true if this segment represents the last segment in a timeline
14123 * @param {Function} endedTimelineFn
14124 * a callback made when a timeline is ended, will only be called if
14125 * isEndOfTimeline is true
14126 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
14127 * request, transmuxed if needed
14128 * @param {Function} doneFn - a callback that is executed only once all requests have
14129 * succeeded or failed
14130 * @return {Function} a function that, when invoked, immediately aborts all
14131 * outstanding requests
14132 */
14133
14134
14135var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
14136 var xhr = _ref12.xhr,
14137 xhrOptions = _ref12.xhrOptions,
14138 decryptionWorker = _ref12.decryptionWorker,
14139 segment = _ref12.segment,
14140 abortFn = _ref12.abortFn,
14141 progressFn = _ref12.progressFn,
14142 trackInfoFn = _ref12.trackInfoFn,
14143 timingInfoFn = _ref12.timingInfoFn,
14144 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
14145 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
14146 id3Fn = _ref12.id3Fn,
14147 captionsFn = _ref12.captionsFn,
14148 isEndOfTimeline = _ref12.isEndOfTimeline,
14149 endedTimelineFn = _ref12.endedTimelineFn,
14150 dataFn = _ref12.dataFn,
14151 doneFn = _ref12.doneFn,
14152 onTransmuxerLog = _ref12.onTransmuxerLog;
14153 var activeXhrs = [];
14154 var finishProcessingFn = waitForCompletion({
14155 activeXhrs: activeXhrs,
14156 decryptionWorker: decryptionWorker,
14157 trackInfoFn: trackInfoFn,
14158 timingInfoFn: timingInfoFn,
14159 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14160 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14161 id3Fn: id3Fn,
14162 captionsFn: captionsFn,
14163 isEndOfTimeline: isEndOfTimeline,
14164 endedTimelineFn: endedTimelineFn,
14165 dataFn: dataFn,
14166 doneFn: doneFn,
14167 onTransmuxerLog: onTransmuxerLog
14168 }); // optionally, request the decryption key
14169
14170 if (segment.key && !segment.key.bytes) {
14171 var objects = [segment.key];
14172
14173 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
14174 objects.push(segment.map.key);
14175 }
14176
14177 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14178 uri: segment.key.resolvedUri,
14179 responseType: 'arraybuffer'
14180 });
14181 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
14182 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
14183 activeXhrs.push(keyXhr);
14184 } // optionally, request the associated media init segment
14185
14186
14187 if (segment.map && !segment.map.bytes) {
14188 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
14189
14190 if (differentMapKey) {
14191 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14192 uri: segment.map.key.resolvedUri,
14193 responseType: 'arraybuffer'
14194 });
14195 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
14196 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
14197 activeXhrs.push(mapKeyXhr);
14198 }
14199
14200 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14201 uri: segment.map.resolvedUri,
14202 responseType: 'arraybuffer',
14203 headers: segmentXhrHeaders(segment.map)
14204 });
14205 var initSegmentRequestCallback = handleInitSegmentResponse({
14206 segment: segment,
14207 finishProcessingFn: finishProcessingFn
14208 });
14209 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
14210 activeXhrs.push(initSegmentXhr);
14211 }
14212
14213 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14214 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
14215 responseType: 'arraybuffer',
14216 headers: segmentXhrHeaders(segment)
14217 });
14218 var segmentRequestCallback = handleSegmentResponse({
14219 segment: segment,
14220 finishProcessingFn: finishProcessingFn,
14221 responseType: segmentRequestOptions.responseType
14222 });
14223 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
14224 segmentXhr.addEventListener('progress', handleProgress({
14225 segment: segment,
14226 progressFn: progressFn,
14227 trackInfoFn: trackInfoFn,
14228 timingInfoFn: timingInfoFn,
14229 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14230 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14231 id3Fn: id3Fn,
14232 captionsFn: captionsFn,
14233 isEndOfTimeline: isEndOfTimeline,
14234 endedTimelineFn: endedTimelineFn,
14235 dataFn: dataFn
14236 }));
14237 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
14238 // multiple times, provide a shared state object
14239
14240 var loadendState = {};
14241 activeXhrs.forEach(function (activeXhr) {
14242 activeXhr.addEventListener('loadend', handleLoadEnd({
14243 loadendState: loadendState,
14244 abortFn: abortFn
14245 }));
14246 });
14247 return function () {
14248 return abortAll(activeXhrs);
14249 };
14250};
14251
14252/**
14253 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
14254 * codec strings, or translating codec strings into objects that can be examined.
14255 */
14256var logFn$1 = logger('CodecUtils');
14257/**
14258 * Returns a set of codec strings parsed from the playlist or the default
14259 * codec strings if no codecs were specified in the playlist
14260 *
14261 * @param {Playlist} media the current media playlist
14262 * @return {Object} an object with the video and audio codecs
14263 */
14264
14265var getCodecs = function getCodecs(media) {
14266 // if the codecs were explicitly specified, use them instead of the
14267 // defaults
14268 var mediaAttributes = media.attributes || {};
14269
14270 if (mediaAttributes.CODECS) {
14271 return codecs_js.parseCodecs(mediaAttributes.CODECS);
14272 }
14273};
14274
14275var isMaat = function isMaat(master, media) {
14276 var mediaAttributes = media.attributes || {};
14277 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14278};
14279var isMuxed = function isMuxed(master, media) {
14280 if (!isMaat(master, media)) {
14281 return true;
14282 }
14283
14284 var mediaAttributes = media.attributes || {};
14285 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14286
14287 for (var groupId in audioGroup) {
14288 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
14289 // or there are listed playlists (the case for DASH, as the manifest will have already
14290 // provided all of the details necessary to generate the audio playlist, as opposed to
14291 // HLS' externally requested playlists), then the content is demuxed.
14292 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
14293 return true;
14294 }
14295 }
14296
14297 return false;
14298};
14299var unwrapCodecList = function unwrapCodecList(codecList) {
14300 var codecs = {};
14301 codecList.forEach(function (_ref) {
14302 var mediaType = _ref.mediaType,
14303 type = _ref.type,
14304 details = _ref.details;
14305 codecs[mediaType] = codecs[mediaType] || [];
14306 codecs[mediaType].push(codecs_js.translateLegacyCodec("" + type + details));
14307 });
14308 Object.keys(codecs).forEach(function (mediaType) {
14309 if (codecs[mediaType].length > 1) {
14310 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
14311 codecs[mediaType] = null;
14312 return;
14313 }
14314
14315 codecs[mediaType] = codecs[mediaType][0];
14316 });
14317 return codecs;
14318};
14319var codecCount = function codecCount(codecObj) {
14320 var count = 0;
14321
14322 if (codecObj.audio) {
14323 count++;
14324 }
14325
14326 if (codecObj.video) {
14327 count++;
14328 }
14329
14330 return count;
14331};
14332/**
14333 * Calculates the codec strings for a working configuration of
14334 * SourceBuffers to play variant streams in a master playlist. If
14335 * there is no possible working configuration, an empty object will be
14336 * returned.
14337 *
14338 * @param master {Object} the m3u8 object for the master playlist
14339 * @param media {Object} the m3u8 object for the variant playlist
14340 * @return {Object} the codec strings.
14341 *
14342 * @private
14343 */
14344
14345var codecsForPlaylist = function codecsForPlaylist(master, media) {
14346 var mediaAttributes = media.attributes || {};
14347 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
14348 // Put another way, there is no way to have a video-only multiple-audio HLS!
14349
14350 if (isMaat(master, media) && !codecInfo.audio) {
14351 if (!isMuxed(master, media)) {
14352 // It is possible for codecs to be specified on the audio media group playlist but
14353 // not on the rendition playlist. This is mostly the case for DASH, where audio and
14354 // video are always separate (and separately specified).
14355 var defaultCodecs = unwrapCodecList(codecs_js.codecsFromDefault(master, mediaAttributes.AUDIO) || []);
14356
14357 if (defaultCodecs.audio) {
14358 codecInfo.audio = defaultCodecs.audio;
14359 }
14360 }
14361 }
14362
14363 return codecInfo;
14364};
14365
14366var logFn = logger('PlaylistSelector');
14367
14368var representationToString = function representationToString(representation) {
14369 if (!representation || !representation.playlist) {
14370 return;
14371 }
14372
14373 var playlist = representation.playlist;
14374 return JSON.stringify({
14375 id: playlist.id,
14376 bandwidth: representation.bandwidth,
14377 width: representation.width,
14378 height: representation.height,
14379 codecs: playlist.attributes && playlist.attributes.CODECS || ''
14380 });
14381}; // Utilities
14382
14383/**
14384 * Returns the CSS value for the specified property on an element
14385 * using `getComputedStyle`. Firefox has a long-standing issue where
14386 * getComputedStyle() may return null when running in an iframe with
14387 * `display: none`.
14388 *
14389 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
14390 * @param {HTMLElement} el the htmlelement to work on
14391 * @param {string} the proprety to get the style for
14392 */
14393
14394
14395var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
14396 if (!el) {
14397 return '';
14398 }
14399
14400 var result = window__default["default"].getComputedStyle(el);
14401
14402 if (!result) {
14403 return '';
14404 }
14405
14406 return result[property];
14407};
14408/**
14409 * Resuable stable sort function
14410 *
14411 * @param {Playlists} array
14412 * @param {Function} sortFn Different comparators
14413 * @function stableSort
14414 */
14415
14416
14417var stableSort = function stableSort(array, sortFn) {
14418 var newArray = array.slice();
14419 array.sort(function (left, right) {
14420 var cmp = sortFn(left, right);
14421
14422 if (cmp === 0) {
14423 return newArray.indexOf(left) - newArray.indexOf(right);
14424 }
14425
14426 return cmp;
14427 });
14428};
14429/**
14430 * A comparator function to sort two playlist object by bandwidth.
14431 *
14432 * @param {Object} left a media playlist object
14433 * @param {Object} right a media playlist object
14434 * @return {number} Greater than zero if the bandwidth attribute of
14435 * left is greater than the corresponding attribute of right. Less
14436 * than zero if the bandwidth of right is greater than left and
14437 * exactly zero if the two are equal.
14438 */
14439
14440
14441var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
14442 var leftBandwidth;
14443 var rightBandwidth;
14444
14445 if (left.attributes.BANDWIDTH) {
14446 leftBandwidth = left.attributes.BANDWIDTH;
14447 }
14448
14449 leftBandwidth = leftBandwidth || window__default["default"].Number.MAX_VALUE;
14450
14451 if (right.attributes.BANDWIDTH) {
14452 rightBandwidth = right.attributes.BANDWIDTH;
14453 }
14454
14455 rightBandwidth = rightBandwidth || window__default["default"].Number.MAX_VALUE;
14456 return leftBandwidth - rightBandwidth;
14457};
14458/**
14459 * A comparator function to sort two playlist object by resolution (width).
14460 *
14461 * @param {Object} left a media playlist object
14462 * @param {Object} right a media playlist object
14463 * @return {number} Greater than zero if the resolution.width attribute of
14464 * left is greater than the corresponding attribute of right. Less
14465 * than zero if the resolution.width of right is greater than left and
14466 * exactly zero if the two are equal.
14467 */
14468
14469var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
14470 var leftWidth;
14471 var rightWidth;
14472
14473 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
14474 leftWidth = left.attributes.RESOLUTION.width;
14475 }
14476
14477 leftWidth = leftWidth || window__default["default"].Number.MAX_VALUE;
14478
14479 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
14480 rightWidth = right.attributes.RESOLUTION.width;
14481 }
14482
14483 rightWidth = rightWidth || window__default["default"].Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
14484 // have the same media dimensions/ resolution
14485
14486 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
14487 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
14488 }
14489
14490 return leftWidth - rightWidth;
14491};
14492/**
14493 * Chooses the appropriate media playlist based on bandwidth and player size
14494 *
14495 * @param {Object} master
14496 * Object representation of the master manifest
14497 * @param {number} playerBandwidth
14498 * Current calculated bandwidth of the player
14499 * @param {number} playerWidth
14500 * Current width of the player element (should account for the device pixel ratio)
14501 * @param {number} playerHeight
14502 * Current height of the player element (should account for the device pixel ratio)
14503 * @param {boolean} limitRenditionByPlayerDimensions
14504 * True if the player width and height should be used during the selection, false otherwise
14505 * @param {Object} masterPlaylistController
14506 * the current masterPlaylistController object
14507 * @return {Playlist} the highest bitrate playlist less than the
14508 * currently detected bandwidth, accounting for some amount of
14509 * bandwidth variance
14510 */
14511
14512var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
14513 // If we end up getting called before `master` is available, exit early
14514 if (!master) {
14515 return;
14516 }
14517
14518 var options = {
14519 bandwidth: playerBandwidth,
14520 width: playerWidth,
14521 height: playerHeight,
14522 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
14523 };
14524 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
14525
14526 if (Playlist.isAudioOnly(master)) {
14527 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
14528 // at the buttom of this function for debugging.
14529
14530 options.audioOnly = true;
14531 } // convert the playlists to an intermediary representation to make comparisons easier
14532
14533
14534 var sortedPlaylistReps = playlists.map(function (playlist) {
14535 var bandwidth;
14536 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
14537 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
14538 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
14539 bandwidth = bandwidth || window__default["default"].Number.MAX_VALUE;
14540 return {
14541 bandwidth: bandwidth,
14542 width: width,
14543 height: height,
14544 playlist: playlist
14545 };
14546 });
14547 stableSort(sortedPlaylistReps, function (left, right) {
14548 return left.bandwidth - right.bandwidth;
14549 }); // filter out any playlists that have been excluded due to
14550 // incompatible configurations
14551
14552 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14553 return !Playlist.isIncompatible(rep.playlist);
14554 }); // filter out any playlists that have been disabled manually through the representations
14555 // api or blacklisted temporarily due to playback errors.
14556
14557 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14558 return Playlist.isEnabled(rep.playlist);
14559 });
14560
14561 if (!enabledPlaylistReps.length) {
14562 // if there are no enabled playlists, then they have all been blacklisted or disabled
14563 // by the user through the representations api. In this case, ignore blacklisting and
14564 // fallback to what the user wants by using playlists the user has not disabled.
14565 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14566 return !Playlist.isDisabled(rep.playlist);
14567 });
14568 } // filter out any variant that has greater effective bitrate
14569 // than the current estimated bandwidth
14570
14571
14572 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
14573 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
14574 });
14575 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
14576 // and then taking the very first element
14577
14578 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
14579 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14580 })[0]; // if we're not going to limit renditions by player size, make an early decision.
14581
14582 if (limitRenditionByPlayerDimensions === false) {
14583 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14584
14585 if (_chosenRep && _chosenRep.playlist) {
14586 var type = 'sortedPlaylistReps';
14587
14588 if (bandwidthBestRep) {
14589 type = 'bandwidthBestRep';
14590 }
14591
14592 if (enabledPlaylistReps[0]) {
14593 type = 'enabledPlaylistReps';
14594 }
14595
14596 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
14597 return _chosenRep.playlist;
14598 }
14599
14600 logFn('could not choose a playlist with options', options);
14601 return null;
14602 } // filter out playlists without resolution information
14603
14604
14605 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
14606 return rep.width && rep.height;
14607 }); // sort variants by resolution
14608
14609 stableSort(haveResolution, function (left, right) {
14610 return left.width - right.width;
14611 }); // if we have the exact resolution as the player use it
14612
14613 var resolutionBestRepList = haveResolution.filter(function (rep) {
14614 return rep.width === playerWidth && rep.height === playerHeight;
14615 });
14616 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
14617
14618 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
14619 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14620 })[0];
14621 var resolutionPlusOneList;
14622 var resolutionPlusOneSmallest;
14623 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
14624 // if there is no match of exact resolution
14625
14626 if (!resolutionBestRep) {
14627 resolutionPlusOneList = haveResolution.filter(function (rep) {
14628 return rep.width > playerWidth || rep.height > playerHeight;
14629 }); // find all the variants have the same smallest resolution
14630
14631 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
14632 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
14633 }); // ensure that we also pick the highest bandwidth variant that
14634 // is just-larger-than the video player
14635
14636 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
14637 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
14638 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14639 })[0];
14640 }
14641
14642 var leastPixelDiffRep; // If this selector proves to be better than others,
14643 // resolutionPlusOneRep and resolutionBestRep and all
14644 // the code involving them should be removed.
14645
14646 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
14647 // find the variant that is closest to the player's pixel size
14648 var leastPixelDiffList = haveResolution.map(function (rep) {
14649 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
14650 return rep;
14651 }); // get the highest bandwidth, closest resolution playlist
14652
14653 stableSort(leastPixelDiffList, function (left, right) {
14654 // sort by highest bandwidth if pixelDiff is the same
14655 if (left.pixelDiff === right.pixelDiff) {
14656 return right.bandwidth - left.bandwidth;
14657 }
14658
14659 return left.pixelDiff - right.pixelDiff;
14660 });
14661 leastPixelDiffRep = leastPixelDiffList[0];
14662 } // fallback chain of variants
14663
14664
14665 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14666
14667 if (chosenRep && chosenRep.playlist) {
14668 var _type = 'sortedPlaylistReps';
14669
14670 if (leastPixelDiffRep) {
14671 _type = 'leastPixelDiffRep';
14672 } else if (resolutionPlusOneRep) {
14673 _type = 'resolutionPlusOneRep';
14674 } else if (resolutionBestRep) {
14675 _type = 'resolutionBestRep';
14676 } else if (bandwidthBestRep) {
14677 _type = 'bandwidthBestRep';
14678 } else if (enabledPlaylistReps[0]) {
14679 _type = 'enabledPlaylistReps';
14680 }
14681
14682 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
14683 return chosenRep.playlist;
14684 }
14685
14686 logFn('could not choose a playlist with options', options);
14687 return null;
14688};
14689
14690/**
14691 * Chooses the appropriate media playlist based on the most recent
14692 * bandwidth estimate and the player size.
14693 *
14694 * Expects to be called within the context of an instance of VhsHandler
14695 *
14696 * @return {Playlist} the highest bitrate playlist less than the
14697 * currently detected bandwidth, accounting for some amount of
14698 * bandwidth variance
14699 */
14700
14701var lastBandwidthSelector = function lastBandwidthSelector() {
14702 var pixelRatio = this.useDevicePixelRatio ? window__default["default"].devicePixelRatio || 1 : 1;
14703 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14704};
14705/**
14706 * Chooses the appropriate media playlist based on an
14707 * exponential-weighted moving average of the bandwidth after
14708 * filtering for player size.
14709 *
14710 * Expects to be called within the context of an instance of VhsHandler
14711 *
14712 * @param {number} decay - a number between 0 and 1. Higher values of
14713 * this parameter will cause previous bandwidth estimates to lose
14714 * significance more quickly.
14715 * @return {Function} a function which can be invoked to create a new
14716 * playlist selector function.
14717 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
14718 */
14719
14720var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
14721 var average = -1;
14722 var lastSystemBandwidth = -1;
14723
14724 if (decay < 0 || decay > 1) {
14725 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
14726 }
14727
14728 return function () {
14729 var pixelRatio = this.useDevicePixelRatio ? window__default["default"].devicePixelRatio || 1 : 1;
14730
14731 if (average < 0) {
14732 average = this.systemBandwidth;
14733 lastSystemBandwidth = this.systemBandwidth;
14734 } // stop the average value from decaying for every 250ms
14735 // when the systemBandwidth is constant
14736 // and
14737 // stop average from setting to a very low value when the
14738 // systemBandwidth becomes 0 in case of chunk cancellation
14739
14740
14741 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
14742 average = decay * this.systemBandwidth + (1 - decay) * average;
14743 lastSystemBandwidth = this.systemBandwidth;
14744 }
14745
14746 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14747 };
14748};
14749/**
14750 * Chooses the appropriate media playlist based on the potential to rebuffer
14751 *
14752 * @param {Object} settings
14753 * Object of information required to use this selector
14754 * @param {Object} settings.master
14755 * Object representation of the master manifest
14756 * @param {number} settings.currentTime
14757 * The current time of the player
14758 * @param {number} settings.bandwidth
14759 * Current measured bandwidth
14760 * @param {number} settings.duration
14761 * Duration of the media
14762 * @param {number} settings.segmentDuration
14763 * Segment duration to be used in round trip time calculations
14764 * @param {number} settings.timeUntilRebuffer
14765 * Time left in seconds until the player has to rebuffer
14766 * @param {number} settings.currentTimeline
14767 * The current timeline segments are being loaded from
14768 * @param {SyncController} settings.syncController
14769 * SyncController for determining if we have a sync point for a given playlist
14770 * @return {Object|null}
14771 * {Object} return.playlist
14772 * The highest bandwidth playlist with the least amount of rebuffering
14773 * {Number} return.rebufferingImpact
14774 * The amount of time in seconds switching to this playlist will rebuffer. A
14775 * negative value means that switching will cause zero rebuffering.
14776 */
14777
14778var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
14779 var master = settings.master,
14780 currentTime = settings.currentTime,
14781 bandwidth = settings.bandwidth,
14782 duration = settings.duration,
14783 segmentDuration = settings.segmentDuration,
14784 timeUntilRebuffer = settings.timeUntilRebuffer,
14785 currentTimeline = settings.currentTimeline,
14786 syncController = settings.syncController; // filter out any playlists that have been excluded due to
14787 // incompatible configurations
14788
14789 var compatiblePlaylists = master.playlists.filter(function (playlist) {
14790 return !Playlist.isIncompatible(playlist);
14791 }); // filter out any playlists that have been disabled manually through the representations
14792 // api or blacklisted temporarily due to playback errors.
14793
14794 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
14795
14796 if (!enabledPlaylists.length) {
14797 // if there are no enabled playlists, then they have all been blacklisted or disabled
14798 // by the user through the representations api. In this case, ignore blacklisting and
14799 // fallback to what the user wants by using playlists the user has not disabled.
14800 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
14801 return !Playlist.isDisabled(playlist);
14802 });
14803 }
14804
14805 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
14806 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
14807 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
14808 // sync request first. This will double the request time
14809
14810 var numRequests = syncPoint ? 1 : 2;
14811 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
14812 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
14813 return {
14814 playlist: playlist,
14815 rebufferingImpact: rebufferingImpact
14816 };
14817 });
14818 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
14819 return estimate.rebufferingImpact <= 0;
14820 }); // Sort by bandwidth DESC
14821
14822 stableSort(noRebufferingPlaylists, function (a, b) {
14823 return comparePlaylistBandwidth(b.playlist, a.playlist);
14824 });
14825
14826 if (noRebufferingPlaylists.length) {
14827 return noRebufferingPlaylists[0];
14828 }
14829
14830 stableSort(rebufferingEstimates, function (a, b) {
14831 return a.rebufferingImpact - b.rebufferingImpact;
14832 });
14833 return rebufferingEstimates[0] || null;
14834};
14835/**
14836 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
14837 * one with video. If no renditions with video exist, return the lowest audio rendition.
14838 *
14839 * Expects to be called within the context of an instance of VhsHandler
14840 *
14841 * @return {Object|null}
14842 * {Object} return.playlist
14843 * The lowest bitrate playlist that contains a video codec. If no such rendition
14844 * exists pick the lowest audio rendition.
14845 */
14846
14847var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
14848 var _this = this;
14849
14850 // filter out any playlists that have been excluded due to
14851 // incompatible configurations or playback errors
14852 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
14853
14854 stableSort(playlists, function (a, b) {
14855 return comparePlaylistBandwidth(a, b);
14856 }); // Parse and assume that playlists with no video codec have no video
14857 // (this is not necessarily true, although it is generally true).
14858 //
14859 // If an entire manifest has no valid videos everything will get filtered
14860 // out.
14861
14862 var playlistsWithVideo = playlists.filter(function (playlist) {
14863 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
14864 });
14865 return playlistsWithVideo[0] || null;
14866};
14867
14868/**
14869 * Combine all segments into a single Uint8Array
14870 *
14871 * @param {Object} segmentObj
14872 * @return {Uint8Array} concatenated bytes
14873 * @private
14874 */
14875var concatSegments = function concatSegments(segmentObj) {
14876 var offset = 0;
14877 var tempBuffer;
14878
14879 if (segmentObj.bytes) {
14880 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
14881
14882 segmentObj.segments.forEach(function (segment) {
14883 tempBuffer.set(segment, offset);
14884 offset += segment.byteLength;
14885 });
14886 }
14887
14888 return tempBuffer;
14889};
14890
14891/**
14892 * @file text-tracks.js
14893 */
14894/**
14895 * Create captions text tracks on video.js if they do not exist
14896 *
14897 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14898 * @param {Object} tech the video.js tech
14899 * @param {Object} captionStream the caption stream to create
14900 * @private
14901 */
14902
14903var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
14904 if (!inbandTextTracks[captionStream]) {
14905 tech.trigger({
14906 type: 'usage',
14907 name: 'vhs-608'
14908 });
14909 tech.trigger({
14910 type: 'usage',
14911 name: 'hls-608'
14912 });
14913 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
14914
14915 if (/^cc708_/.test(captionStream)) {
14916 instreamId = 'SERVICE' + captionStream.split('_')[1];
14917 }
14918
14919 var track = tech.textTracks().getTrackById(instreamId);
14920
14921 if (track) {
14922 // Resuse an existing track with a CC# id because this was
14923 // very likely created by videojs-contrib-hls from information
14924 // in the m3u8 for us to use
14925 inbandTextTracks[captionStream] = track;
14926 } else {
14927 // This section gets called when we have caption services that aren't specified in the manifest.
14928 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
14929 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
14930 var label = captionStream;
14931 var language = captionStream;
14932 var def = false;
14933 var captionService = captionServices[instreamId];
14934
14935 if (captionService) {
14936 label = captionService.label;
14937 language = captionService.language;
14938 def = captionService.default;
14939 } // Otherwise, create a track with the default `CC#` label and
14940 // without a language
14941
14942
14943 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
14944 kind: 'captions',
14945 id: instreamId,
14946 // TODO: investigate why this doesn't seem to turn the caption on by default
14947 default: def,
14948 label: label,
14949 language: language
14950 }, false).track;
14951 }
14952 }
14953};
14954/**
14955 * Add caption text track data to a source handler given an array of captions
14956 *
14957 * @param {Object}
14958 * @param {Object} inbandTextTracks the inband text tracks
14959 * @param {number} timestampOffset the timestamp offset of the source buffer
14960 * @param {Array} captionArray an array of caption data
14961 * @private
14962 */
14963
14964var addCaptionData = function addCaptionData(_ref) {
14965 var inbandTextTracks = _ref.inbandTextTracks,
14966 captionArray = _ref.captionArray,
14967 timestampOffset = _ref.timestampOffset;
14968
14969 if (!captionArray) {
14970 return;
14971 }
14972
14973 var Cue = window__default["default"].WebKitDataCue || window__default["default"].VTTCue;
14974 captionArray.forEach(function (caption) {
14975 var track = caption.stream;
14976 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
14977 });
14978};
14979/**
14980 * Define properties on a cue for backwards compatability,
14981 * but warn the user that the way that they are using it
14982 * is depricated and will be removed at a later date.
14983 *
14984 * @param {Cue} cue the cue to add the properties on
14985 * @private
14986 */
14987
14988var deprecateOldCue = function deprecateOldCue(cue) {
14989 Object.defineProperties(cue.frame, {
14990 id: {
14991 get: function get() {
14992 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
14993 return cue.value.key;
14994 }
14995 },
14996 value: {
14997 get: function get() {
14998 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
14999 return cue.value.data;
15000 }
15001 },
15002 privateData: {
15003 get: function get() {
15004 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
15005 return cue.value.data;
15006 }
15007 }
15008 });
15009};
15010/**
15011 * Add metadata text track data to a source handler given an array of metadata
15012 *
15013 * @param {Object}
15014 * @param {Object} inbandTextTracks the inband text tracks
15015 * @param {Array} metadataArray an array of meta data
15016 * @param {number} timestampOffset the timestamp offset of the source buffer
15017 * @param {number} videoDuration the duration of the video
15018 * @private
15019 */
15020
15021
15022var addMetadata = function addMetadata(_ref2) {
15023 var inbandTextTracks = _ref2.inbandTextTracks,
15024 metadataArray = _ref2.metadataArray,
15025 timestampOffset = _ref2.timestampOffset,
15026 videoDuration = _ref2.videoDuration;
15027
15028 if (!metadataArray) {
15029 return;
15030 }
15031
15032 var Cue = window__default["default"].WebKitDataCue || window__default["default"].VTTCue;
15033 var metadataTrack = inbandTextTracks.metadataTrack_;
15034
15035 if (!metadataTrack) {
15036 return;
15037 }
15038
15039 metadataArray.forEach(function (metadata) {
15040 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
15041 // ignore this bit of metadata.
15042 // This likely occurs when you have an non-timed ID3 tag like TIT2,
15043 // which is the "Title/Songname/Content description" frame
15044
15045 if (typeof time !== 'number' || window__default["default"].isNaN(time) || time < 0 || !(time < Infinity)) {
15046 return;
15047 }
15048
15049 metadata.frames.forEach(function (frame) {
15050 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
15051 cue.frame = frame;
15052 cue.value = frame;
15053 deprecateOldCue(cue);
15054 metadataTrack.addCue(cue);
15055 });
15056 });
15057
15058 if (!metadataTrack.cues || !metadataTrack.cues.length) {
15059 return;
15060 } // Updating the metadeta cues so that
15061 // the endTime of each cue is the startTime of the next cue
15062 // the endTime of last cue is the duration of the video
15063
15064
15065 var cues = metadataTrack.cues;
15066 var cuesArray = []; // Create a copy of the TextTrackCueList...
15067 // ...disregarding cues with a falsey value
15068
15069 for (var i = 0; i < cues.length; i++) {
15070 if (cues[i]) {
15071 cuesArray.push(cues[i]);
15072 }
15073 } // Group cues by their startTime value
15074
15075
15076 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
15077 var timeSlot = obj[cue.startTime] || [];
15078 timeSlot.push(cue);
15079 obj[cue.startTime] = timeSlot;
15080 return obj;
15081 }, {}); // Sort startTimes by ascending order
15082
15083 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
15084 return Number(a) - Number(b);
15085 }); // Map each cue group's endTime to the next group's startTime
15086
15087 sortedStartTimes.forEach(function (startTime, idx) {
15088 var cueGroup = cuesGroupedByStartTime[startTime];
15089 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
15090
15091 cueGroup.forEach(function (cue) {
15092 cue.endTime = nextTime;
15093 });
15094 });
15095};
15096/**
15097 * Create metadata text track on video.js if it does not exist
15098 *
15099 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
15100 * @param {string} dispatchType the inband metadata track dispatch type
15101 * @param {Object} tech the video.js tech
15102 * @private
15103 */
15104
15105var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
15106 if (inbandTextTracks.metadataTrack_) {
15107 return;
15108 }
15109
15110 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
15111 kind: 'metadata',
15112 label: 'Timed Metadata'
15113 }, false).track;
15114 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
15115};
15116/**
15117 * Remove cues from a track on video.js.
15118 *
15119 * @param {Double} start start of where we should remove the cue
15120 * @param {Double} end end of where the we should remove the cue
15121 * @param {Object} track the text track to remove the cues from
15122 * @private
15123 */
15124
15125var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
15126 var i;
15127 var cue;
15128
15129 if (!track) {
15130 return;
15131 }
15132
15133 if (!track.cues) {
15134 return;
15135 }
15136
15137 i = track.cues.length;
15138
15139 while (i--) {
15140 cue = track.cues[i]; // Remove any cue within the provided start and end time
15141
15142 if (cue.startTime >= start && cue.endTime <= end) {
15143 track.removeCue(cue);
15144 }
15145 }
15146};
15147/**
15148 * Remove duplicate cues from a track on video.js (a cue is considered a
15149 * duplicate if it has the same time interval and text as another)
15150 *
15151 * @param {Object} track the text track to remove the duplicate cues from
15152 * @private
15153 */
15154
15155var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
15156 var cues = track.cues;
15157
15158 if (!cues) {
15159 return;
15160 }
15161
15162 for (var i = 0; i < cues.length; i++) {
15163 var duplicates = [];
15164 var occurrences = 0;
15165
15166 for (var j = 0; j < cues.length; j++) {
15167 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
15168 occurrences++;
15169
15170 if (occurrences > 1) {
15171 duplicates.push(cues[j]);
15172 }
15173 }
15174 }
15175
15176 if (duplicates.length) {
15177 duplicates.forEach(function (dupe) {
15178 return track.removeCue(dupe);
15179 });
15180 }
15181 }
15182};
15183
15184/**
15185 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
15186 * front of current time.
15187 *
15188 * @param {Array} buffer
15189 * The current buffer of gop information
15190 * @param {number} currentTime
15191 * The current time
15192 * @param {Double} mapping
15193 * Offset to map display time to stream presentation time
15194 * @return {Array}
15195 * List of gops considered safe to append over
15196 */
15197
15198var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
15199 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
15200 return [];
15201 } // pts value for current time + 3 seconds to give a bit more wiggle room
15202
15203
15204 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
15205 var i;
15206
15207 for (i = 0; i < buffer.length; i++) {
15208 if (buffer[i].pts > currentTimePts) {
15209 break;
15210 }
15211 }
15212
15213 return buffer.slice(i);
15214};
15215/**
15216 * Appends gop information (timing and byteLength) received by the transmuxer for the
15217 * gops appended in the last call to appendBuffer
15218 *
15219 * @param {Array} buffer
15220 * The current buffer of gop information
15221 * @param {Array} gops
15222 * List of new gop information
15223 * @param {boolean} replace
15224 * If true, replace the buffer with the new gop information. If false, append the
15225 * new gop information to the buffer in the right location of time.
15226 * @return {Array}
15227 * Updated list of gop information
15228 */
15229
15230var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
15231 if (!gops.length) {
15232 return buffer;
15233 }
15234
15235 if (replace) {
15236 // If we are in safe append mode, then completely overwrite the gop buffer
15237 // with the most recent appeneded data. This will make sure that when appending
15238 // future segments, we only try to align with gops that are both ahead of current
15239 // time and in the last segment appended.
15240 return gops.slice();
15241 }
15242
15243 var start = gops[0].pts;
15244 var i = 0;
15245
15246 for (i; i < buffer.length; i++) {
15247 if (buffer[i].pts >= start) {
15248 break;
15249 }
15250 }
15251
15252 return buffer.slice(0, i).concat(gops);
15253};
15254/**
15255 * Removes gop information in buffer that overlaps with provided start and end
15256 *
15257 * @param {Array} buffer
15258 * The current buffer of gop information
15259 * @param {Double} start
15260 * position to start the remove at
15261 * @param {Double} end
15262 * position to end the remove at
15263 * @param {Double} mapping
15264 * Offset to map display time to stream presentation time
15265 */
15266
15267var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
15268 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
15269 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
15270 var updatedBuffer = buffer.slice();
15271 var i = buffer.length;
15272
15273 while (i--) {
15274 if (buffer[i].pts <= endPts) {
15275 break;
15276 }
15277 }
15278
15279 if (i === -1) {
15280 // no removal because end of remove range is before start of buffer
15281 return updatedBuffer;
15282 }
15283
15284 var j = i + 1;
15285
15286 while (j--) {
15287 if (buffer[j].pts <= startPts) {
15288 break;
15289 }
15290 } // clamp remove range start to 0 index
15291
15292
15293 j = Math.max(j, 0);
15294 updatedBuffer.splice(j, i - j + 1);
15295 return updatedBuffer;
15296};
15297
15298var shallowEqual = function shallowEqual(a, b) {
15299 // if both are undefined
15300 // or one or the other is undefined
15301 // they are not equal
15302 if (!a && !b || !a && b || a && !b) {
15303 return false;
15304 } // they are the same object and thus, equal
15305
15306
15307 if (a === b) {
15308 return true;
15309 } // sort keys so we can make sure they have
15310 // all the same keys later.
15311
15312
15313 var akeys = Object.keys(a).sort();
15314 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
15315
15316 if (akeys.length !== bkeys.length) {
15317 return false;
15318 }
15319
15320 for (var i = 0; i < akeys.length; i++) {
15321 var key = akeys[i]; // different sorted keys, not equal
15322
15323 if (key !== bkeys[i]) {
15324 return false;
15325 } // different values, not equal
15326
15327
15328 if (a[key] !== b[key]) {
15329 return false;
15330 }
15331 }
15332
15333 return true;
15334};
15335
15336// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
15337var QUOTA_EXCEEDED_ERR = 22;
15338
15339/**
15340 * The segment loader has no recourse except to fetch a segment in the
15341 * current playlist and use the internal timestamps in that segment to
15342 * generate a syncPoint. This function returns a good candidate index
15343 * for that process.
15344 *
15345 * @param {Array} segments - the segments array from a playlist.
15346 * @return {number} An index of a segment from the playlist to load
15347 */
15348
15349var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
15350 segments = segments || [];
15351 var timelineSegments = [];
15352 var time = 0;
15353
15354 for (var i = 0; i < segments.length; i++) {
15355 var segment = segments[i];
15356
15357 if (currentTimeline === segment.timeline) {
15358 timelineSegments.push(i);
15359 time += segment.duration;
15360
15361 if (time > targetTime) {
15362 return i;
15363 }
15364 }
15365 }
15366
15367 if (timelineSegments.length === 0) {
15368 return 0;
15369 } // default to the last timeline segment
15370
15371
15372 return timelineSegments[timelineSegments.length - 1];
15373}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
15374// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
15375// as a start to prevent any potential issues with removing content too close to the
15376// playhead.
15377
15378var MIN_BACK_BUFFER = 1; // in ms
15379
15380var CHECK_BUFFER_DELAY = 500;
15381
15382var finite = function finite(num) {
15383 return typeof num === 'number' && isFinite(num);
15384}; // With most content hovering around 30fps, if a segment has a duration less than a half
15385// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
15386// not accurately reflect the rest of the content.
15387
15388
15389var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
15390var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
15391 // Although these checks should most likely cover non 'main' types, for now it narrows
15392 // the scope of our checks.
15393 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
15394 return null;
15395 }
15396
15397 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
15398 return 'Neither audio nor video found in segment.';
15399 }
15400
15401 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
15402 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
15403 }
15404
15405 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
15406 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
15407 }
15408
15409 return null;
15410};
15411/**
15412 * Calculates a time value that is safe to remove from the back buffer without interrupting
15413 * playback.
15414 *
15415 * @param {TimeRange} seekable
15416 * The current seekable range
15417 * @param {number} currentTime
15418 * The current time of the player
15419 * @param {number} targetDuration
15420 * The target duration of the current playlist
15421 * @return {number}
15422 * Time that is safe to remove from the back buffer without interrupting playback
15423 */
15424
15425var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
15426 // 30 seconds before the playhead provides a safe default for trimming.
15427 //
15428 // Choosing a reasonable default is particularly important for high bitrate content and
15429 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
15430 // throw an APPEND_BUFFER_ERR.
15431 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
15432
15433 if (seekable.length) {
15434 // Some live playlists may have a shorter window of content than the full allowed back
15435 // buffer. For these playlists, don't save content that's no longer within the window.
15436 trimTime = Math.max(trimTime, seekable.start(0));
15437 } // Don't remove within target duration of the current time to avoid the possibility of
15438 // removing the GOP currently being played, as removing it can cause playback stalls.
15439
15440
15441 var maxTrimTime = currentTime - targetDuration;
15442 return Math.min(maxTrimTime, trimTime);
15443};
15444var segmentInfoString = function segmentInfoString(segmentInfo) {
15445 var startOfSegment = segmentInfo.startOfSegment,
15446 duration = segmentInfo.duration,
15447 segment = segmentInfo.segment,
15448 part = segmentInfo.part,
15449 _segmentInfo$playlist = segmentInfo.playlist,
15450 seq = _segmentInfo$playlist.mediaSequence,
15451 id = _segmentInfo$playlist.id,
15452 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
15453 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
15454 index = segmentInfo.mediaIndex,
15455 partIndex = segmentInfo.partIndex,
15456 timeline = segmentInfo.timeline;
15457 var segmentLen = segments.length - 1;
15458 var selection = 'mediaIndex/partIndex increment';
15459
15460 if (segmentInfo.getMediaInfoForTime) {
15461 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
15462 } else if (segmentInfo.isSyncRequest) {
15463 selection = 'getSyncSegmentCandidate (isSyncRequest)';
15464 }
15465
15466 if (segmentInfo.independent) {
15467 selection += " with independent " + segmentInfo.independent;
15468 }
15469
15470 var hasPartIndex = typeof partIndex === 'number';
15471 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
15472 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
15473 preloadSegment: segment
15474 }) - 1 : 0;
15475 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
15476};
15477
15478var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
15479 return mediaType + "TimingInfo";
15480};
15481/**
15482 * Returns the timestamp offset to use for the segment.
15483 *
15484 * @param {number} segmentTimeline
15485 * The timeline of the segment
15486 * @param {number} currentTimeline
15487 * The timeline currently being followed by the loader
15488 * @param {number} startOfSegment
15489 * The estimated segment start
15490 * @param {TimeRange[]} buffered
15491 * The loader's buffer
15492 * @param {boolean} overrideCheck
15493 * If true, no checks are made to see if the timestamp offset value should be set,
15494 * but sets it directly to a value.
15495 *
15496 * @return {number|null}
15497 * Either a number representing a new timestamp offset, or null if the segment is
15498 * part of the same timeline
15499 */
15500
15501
15502var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
15503 var segmentTimeline = _ref.segmentTimeline,
15504 currentTimeline = _ref.currentTimeline,
15505 startOfSegment = _ref.startOfSegment,
15506 buffered = _ref.buffered,
15507 overrideCheck = _ref.overrideCheck;
15508
15509 // Check to see if we are crossing a discontinuity to see if we need to set the
15510 // timestamp offset on the transmuxer and source buffer.
15511 //
15512 // Previously, we changed the timestampOffset if the start of this segment was less than
15513 // the currently set timestampOffset, but this isn't desirable as it can produce bad
15514 // behavior, especially around long running live streams.
15515 if (!overrideCheck && segmentTimeline === currentTimeline) {
15516 return null;
15517 } // When changing renditions, it's possible to request a segment on an older timeline. For
15518 // instance, given two renditions with the following:
15519 //
15520 // #EXTINF:10
15521 // segment1
15522 // #EXT-X-DISCONTINUITY
15523 // #EXTINF:10
15524 // segment2
15525 // #EXTINF:10
15526 // segment3
15527 //
15528 // And the current player state:
15529 //
15530 // current time: 8
15531 // buffer: 0 => 20
15532 //
15533 // The next segment on the current rendition would be segment3, filling the buffer from
15534 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
15535 // then the next segment to be requested will be segment1 from the new rendition in
15536 // order to fill time 8 and onwards. Using the buffered end would result in repeated
15537 // content (since it would position segment1 of the new rendition starting at 20s). This
15538 // case can be identified when the new segment's timeline is a prior value. Instead of
15539 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
15540 // more accurate to the actual start time of the segment.
15541
15542
15543 if (segmentTimeline < currentTimeline) {
15544 return startOfSegment;
15545 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
15546 // value uses the end of the last segment if it is available. While this value
15547 // should often be correct, it's better to rely on the buffered end, as the new
15548 // content post discontinuity should line up with the buffered end as if it were
15549 // time 0 for the new content.
15550
15551
15552 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
15553};
15554/**
15555 * Returns whether or not the loader should wait for a timeline change from the timeline
15556 * change controller before processing the segment.
15557 *
15558 * Primary timing in VHS goes by video. This is different from most media players, as
15559 * audio is more often used as the primary timing source. For the foreseeable future, VHS
15560 * will continue to use video as the primary timing source, due to the current logic and
15561 * expectations built around it.
15562
15563 * Since the timing follows video, in order to maintain sync, the video loader is
15564 * responsible for setting both audio and video source buffer timestamp offsets.
15565 *
15566 * Setting different values for audio and video source buffers could lead to
15567 * desyncing. The following examples demonstrate some of the situations where this
15568 * distinction is important. Note that all of these cases involve demuxed content. When
15569 * content is muxed, the audio and video are packaged together, therefore syncing
15570 * separate media playlists is not an issue.
15571 *
15572 * CASE 1: Audio prepares to load a new timeline before video:
15573 *
15574 * Timeline: 0 1
15575 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15576 * Audio Loader: ^
15577 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15578 * Video Loader ^
15579 *
15580 * In the above example, the audio loader is preparing to load the 6th segment, the first
15581 * after a discontinuity, while the video loader is still loading the 5th segment, before
15582 * the discontinuity.
15583 *
15584 * If the audio loader goes ahead and loads and appends the 6th segment before the video
15585 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
15586 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
15587 * the audio loader must provide the audioAppendStart value to trim the content in the
15588 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
15589 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
15590 * segment until that value is provided.
15591 *
15592 * CASE 2: Video prepares to load a new timeline before audio:
15593 *
15594 * Timeline: 0 1
15595 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15596 * Audio Loader: ^
15597 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15598 * Video Loader ^
15599 *
15600 * In the above example, the video loader is preparing to load the 6th segment, the first
15601 * after a discontinuity, while the audio loader is still loading the 5th segment, before
15602 * the discontinuity.
15603 *
15604 * If the video loader goes ahead and loads and appends the 6th segment, then once the
15605 * segment is loaded and processed, both the video and audio timestamp offsets will be
15606 * set, since video is used as the primary timing source. This is to ensure content lines
15607 * up appropriately, as any modifications to the video timing are reflected by audio when
15608 * the video loader sets the audio and video timestamp offsets to the same value. However,
15609 * setting the timestamp offset for audio before audio has had a chance to change
15610 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
15611 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
15612 *
15613 * CASE 3: When seeking, audio prepares to load a new timeline before video
15614 *
15615 * Timeline: 0 1
15616 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15617 * Audio Loader: ^
15618 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15619 * Video Loader ^
15620 *
15621 * In the above example, both audio and video loaders are loading segments from timeline
15622 * 0, but imagine that the seek originated from timeline 1.
15623 *
15624 * When seeking to a new timeline, the timestamp offset will be set based on the expected
15625 * segment start of the loaded video segment. In order to maintain sync, the audio loader
15626 * must wait for the video loader to load its segment and update both the audio and video
15627 * timestamp offsets before it may load and append its own segment. This is the case
15628 * whether the seek results in a mismatched segment request (e.g., the audio loader
15629 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
15630 * loaders choose to load the same segment index from each playlist, as the segments may
15631 * not be aligned perfectly, even for matching segment indexes.
15632 *
15633 * @param {Object} timelinechangeController
15634 * @param {number} currentTimeline
15635 * The timeline currently being followed by the loader
15636 * @param {number} segmentTimeline
15637 * The timeline of the segment being loaded
15638 * @param {('main'|'audio')} loaderType
15639 * The loader type
15640 * @param {boolean} audioDisabled
15641 * Whether the audio is disabled for the loader. This should only be true when the
15642 * loader may have muxed audio in its segment, but should not append it, e.g., for
15643 * the main loader when an alternate audio playlist is active.
15644 *
15645 * @return {boolean}
15646 * Whether the loader should wait for a timeline change from the timeline change
15647 * controller before processing the segment
15648 */
15649
15650var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
15651 var timelineChangeController = _ref2.timelineChangeController,
15652 currentTimeline = _ref2.currentTimeline,
15653 segmentTimeline = _ref2.segmentTimeline,
15654 loaderType = _ref2.loaderType,
15655 audioDisabled = _ref2.audioDisabled;
15656
15657 if (currentTimeline === segmentTimeline) {
15658 return false;
15659 }
15660
15661 if (loaderType === 'audio') {
15662 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
15663 type: 'main'
15664 }); // Audio loader should wait if:
15665 //
15666 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
15667 // * main hasn't yet changed to the timeline audio is looking to load
15668
15669 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
15670 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
15671 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
15672 // loader's segments (or the content is audio/video only and handled by the main
15673 // loader).
15674
15675
15676 if (loaderType === 'main' && audioDisabled) {
15677 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
15678 type: 'audio'
15679 }); // Main loader should wait for the audio loader if audio is not pending a timeline
15680 // change to the current timeline.
15681 //
15682 // Since the main loader is responsible for setting the timestamp offset for both
15683 // audio and video, the main loader must wait for audio to be about to change to its
15684 // timeline before setting the offset, otherwise, if audio is behind in loading,
15685 // segments from the previous timeline would be adjusted by the new timestamp offset.
15686 //
15687 // This requirement means that video will not cross a timeline until the audio is
15688 // about to cross to it, so that way audio and video will always cross the timeline
15689 // together.
15690 //
15691 // In addition to normal timeline changes, these rules also apply to the start of a
15692 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
15693 // that these rules apply to the first timeline change because if they did not, it's
15694 // possible that the main loader will cross two timelines before the audio loader has
15695 // crossed one. Logic may be implemented to handle the startup as a special case, but
15696 // it's easier to simply treat all timeline changes the same.
15697
15698 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
15699 return false;
15700 }
15701
15702 return true;
15703 }
15704
15705 return false;
15706};
15707var mediaDuration = function mediaDuration(timingInfos) {
15708 var maxDuration = 0;
15709 ['video', 'audio'].forEach(function (type) {
15710 var typeTimingInfo = timingInfos[type + "TimingInfo"];
15711
15712 if (!typeTimingInfo) {
15713 return;
15714 }
15715
15716 var start = typeTimingInfo.start,
15717 end = typeTimingInfo.end;
15718 var duration;
15719
15720 if (typeof start === 'bigint' || typeof end === 'bigint') {
15721 duration = window__default["default"].BigInt(end) - window__default["default"].BigInt(start);
15722 } else if (typeof start === 'number' && typeof end === 'number') {
15723 duration = end - start;
15724 }
15725
15726 if (typeof duration !== 'undefined' && duration > maxDuration) {
15727 maxDuration = duration;
15728 }
15729 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
15730 // as we only need BigInt when we are above that.
15731
15732 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
15733 maxDuration = Number(maxDuration);
15734 }
15735
15736 return maxDuration;
15737};
15738var segmentTooLong = function segmentTooLong(_ref3) {
15739 var segmentDuration = _ref3.segmentDuration,
15740 maxDuration = _ref3.maxDuration;
15741
15742 // 0 duration segments are most likely due to metadata only segments or a lack of
15743 // information.
15744 if (!segmentDuration) {
15745 return false;
15746 } // For HLS:
15747 //
15748 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
15749 // The EXTINF duration of each Media Segment in the Playlist
15750 // file, when rounded to the nearest integer, MUST be less than or equal
15751 // to the target duration; longer segments can trigger playback stalls
15752 // or other errors.
15753 //
15754 // For DASH, the mpd-parser uses the largest reported segment duration as the target
15755 // duration. Although that reported duration is occasionally approximate (i.e., not
15756 // exact), a strict check may report that a segment is too long more often in DASH.
15757
15758
15759 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
15760};
15761var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
15762 // Right now we aren't following DASH's timing model exactly, so only perform
15763 // this check for HLS content.
15764 if (sourceType !== 'hls') {
15765 return null;
15766 }
15767
15768 var segmentDuration = mediaDuration({
15769 audioTimingInfo: segmentInfo.audioTimingInfo,
15770 videoTimingInfo: segmentInfo.videoTimingInfo
15771 }); // Don't report if we lack information.
15772 //
15773 // If the segment has a duration of 0 it is either a lack of information or a
15774 // metadata only segment and shouldn't be reported here.
15775
15776 if (!segmentDuration) {
15777 return null;
15778 }
15779
15780 var targetDuration = segmentInfo.playlist.targetDuration;
15781 var isSegmentWayTooLong = segmentTooLong({
15782 segmentDuration: segmentDuration,
15783 maxDuration: targetDuration * 2
15784 });
15785 var isSegmentSlightlyTooLong = segmentTooLong({
15786 segmentDuration: segmentDuration,
15787 maxDuration: targetDuration
15788 });
15789 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
15790
15791 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
15792 return {
15793 severity: isSegmentWayTooLong ? 'warn' : 'info',
15794 message: segmentTooLongMessage
15795 };
15796 }
15797
15798 return null;
15799};
15800/**
15801 * An object that manages segment loading and appending.
15802 *
15803 * @class SegmentLoader
15804 * @param {Object} options required and optional options
15805 * @extends videojs.EventTarget
15806 */
15807
15808var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
15809 _inheritsLoose__default["default"](SegmentLoader, _videojs$EventTarget);
15810
15811 function SegmentLoader(settings, options) {
15812 var _this;
15813
15814 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
15815
15816 if (!settings) {
15817 throw new TypeError('Initialization settings are required');
15818 }
15819
15820 if (typeof settings.currentTime !== 'function') {
15821 throw new TypeError('No currentTime getter specified');
15822 }
15823
15824 if (!settings.mediaSource) {
15825 throw new TypeError('No MediaSource specified');
15826 } // public properties
15827
15828
15829 _this.bandwidth = settings.bandwidth;
15830 _this.throughput = {
15831 rate: 0,
15832 count: 0
15833 };
15834 _this.roundTrip = NaN;
15835
15836 _this.resetStats_();
15837
15838 _this.mediaIndex = null;
15839 _this.partIndex = null; // private settings
15840
15841 _this.hasPlayed_ = settings.hasPlayed;
15842 _this.currentTime_ = settings.currentTime;
15843 _this.seekable_ = settings.seekable;
15844 _this.seeking_ = settings.seeking;
15845 _this.duration_ = settings.duration;
15846 _this.mediaSource_ = settings.mediaSource;
15847 _this.vhs_ = settings.vhs;
15848 _this.loaderType_ = settings.loaderType;
15849 _this.currentMediaInfo_ = void 0;
15850 _this.startingMediaInfo_ = void 0;
15851 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
15852 _this.goalBufferLength_ = settings.goalBufferLength;
15853 _this.sourceType_ = settings.sourceType;
15854 _this.sourceUpdater_ = settings.sourceUpdater;
15855 _this.inbandTextTracks_ = settings.inbandTextTracks;
15856 _this.state_ = 'INIT';
15857 _this.timelineChangeController_ = settings.timelineChangeController;
15858 _this.shouldSaveSegmentTimingInfo_ = true;
15859 _this.parse708captions_ = settings.parse708captions;
15860 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
15861 _this.captionServices_ = settings.captionServices;
15862 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
15863
15864 _this.checkBufferTimeout_ = null;
15865 _this.error_ = void 0;
15866 _this.currentTimeline_ = -1;
15867 _this.pendingSegment_ = null;
15868 _this.xhrOptions_ = null;
15869 _this.pendingSegments_ = [];
15870 _this.audioDisabled_ = false;
15871 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
15872
15873 _this.gopBuffer_ = [];
15874 _this.timeMapping_ = 0;
15875 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
15876 _this.appendInitSegment_ = {
15877 audio: true,
15878 video: true
15879 };
15880 _this.playlistOfLastInitSegment_ = {
15881 audio: null,
15882 video: null
15883 };
15884 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
15885 // information yet to start the loading process (e.g., if the audio loader wants to
15886 // load a segment from the next timeline but the main loader hasn't yet crossed that
15887 // timeline), then the load call will be added to the queue until it is ready to be
15888 // processed.
15889
15890 _this.loadQueue_ = [];
15891 _this.metadataQueue_ = {
15892 id3: [],
15893 caption: []
15894 };
15895 _this.waitingOnRemove_ = false;
15896 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
15897
15898 _this.activeInitSegmentId_ = null;
15899 _this.initSegments_ = {}; // HLSe playback
15900
15901 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
15902 _this.keyCache_ = {};
15903 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
15904 // between a time in the display time and a segment index within
15905 // a playlist
15906
15907 _this.syncController_ = settings.syncController;
15908 _this.syncPoint_ = {
15909 segmentIndex: 0,
15910 time: 0
15911 };
15912 _this.transmuxer_ = _this.createTransmuxer_();
15913
15914 _this.triggerSyncInfoUpdate_ = function () {
15915 return _this.trigger('syncinfoupdate');
15916 };
15917
15918 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
15919
15920 _this.mediaSource_.addEventListener('sourceopen', function () {
15921 if (!_this.isEndOfStream_()) {
15922 _this.ended_ = false;
15923 }
15924 }); // ...for determining the fetch location
15925
15926
15927 _this.fetchAtBuffer_ = false;
15928 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
15929 Object.defineProperty(_assertThisInitialized__default["default"](_this), 'state', {
15930 get: function get() {
15931 return this.state_;
15932 },
15933 set: function set(newState) {
15934 if (newState !== this.state_) {
15935 this.logger_(this.state_ + " -> " + newState);
15936 this.state_ = newState;
15937 this.trigger('statechange');
15938 }
15939 }
15940 });
15941
15942 _this.sourceUpdater_.on('ready', function () {
15943 if (_this.hasEnoughInfoToAppend_()) {
15944 _this.processCallQueue_();
15945 }
15946 }); // Only the main loader needs to listen for pending timeline changes, as the main
15947 // loader should wait for audio to be ready to change its timeline so that both main
15948 // and audio timelines change together. For more details, see the
15949 // shouldWaitForTimelineChange function.
15950
15951
15952 if (_this.loaderType_ === 'main') {
15953 _this.timelineChangeController_.on('pendingtimelinechange', function () {
15954 if (_this.hasEnoughInfoToAppend_()) {
15955 _this.processCallQueue_();
15956 }
15957 });
15958 } // The main loader only listens on pending timeline changes, but the audio loader,
15959 // since its loads follow main, needs to listen on timeline changes. For more details,
15960 // see the shouldWaitForTimelineChange function.
15961
15962
15963 if (_this.loaderType_ === 'audio') {
15964 _this.timelineChangeController_.on('timelinechange', function () {
15965 if (_this.hasEnoughInfoToLoad_()) {
15966 _this.processLoadQueue_();
15967 }
15968
15969 if (_this.hasEnoughInfoToAppend_()) {
15970 _this.processCallQueue_();
15971 }
15972 });
15973 }
15974
15975 return _this;
15976 }
15977
15978 var _proto = SegmentLoader.prototype;
15979
15980 _proto.createTransmuxer_ = function createTransmuxer_() {
15981 return segmentTransmuxer.createTransmuxer({
15982 remux: false,
15983 alignGopsAtEnd: this.safeAppend_,
15984 keepOriginalTimestamps: true,
15985 parse708captions: this.parse708captions_,
15986 captionServices: this.captionServices_
15987 });
15988 }
15989 /**
15990 * reset all of our media stats
15991 *
15992 * @private
15993 */
15994 ;
15995
15996 _proto.resetStats_ = function resetStats_() {
15997 this.mediaBytesTransferred = 0;
15998 this.mediaRequests = 0;
15999 this.mediaRequestsAborted = 0;
16000 this.mediaRequestsTimedout = 0;
16001 this.mediaRequestsErrored = 0;
16002 this.mediaTransferDuration = 0;
16003 this.mediaSecondsLoaded = 0;
16004 this.mediaAppends = 0;
16005 }
16006 /**
16007 * dispose of the SegmentLoader and reset to the default state
16008 */
16009 ;
16010
16011 _proto.dispose = function dispose() {
16012 this.trigger('dispose');
16013 this.state = 'DISPOSED';
16014 this.pause();
16015 this.abort_();
16016
16017 if (this.transmuxer_) {
16018 this.transmuxer_.terminate();
16019 }
16020
16021 this.resetStats_();
16022
16023 if (this.checkBufferTimeout_) {
16024 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16025 }
16026
16027 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
16028 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
16029 }
16030
16031 this.off();
16032 };
16033
16034 _proto.setAudio = function setAudio(enable) {
16035 this.audioDisabled_ = !enable;
16036
16037 if (enable) {
16038 this.appendInitSegment_.audio = true;
16039 } else {
16040 // remove current track audio if it gets disabled
16041 this.sourceUpdater_.removeAudio(0, this.duration_());
16042 }
16043 }
16044 /**
16045 * abort anything that is currently doing on with the SegmentLoader
16046 * and reset to a default state
16047 */
16048 ;
16049
16050 _proto.abort = function abort() {
16051 if (this.state !== 'WAITING') {
16052 if (this.pendingSegment_) {
16053 this.pendingSegment_ = null;
16054 }
16055
16056 return;
16057 }
16058
16059 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
16060 // since we are no longer "waiting" on any requests. XHR callback is not always run
16061 // when the request is aborted. This will prevent the loader from being stuck in the
16062 // WAITING state indefinitely.
16063
16064 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
16065 // next segment
16066
16067 if (!this.paused()) {
16068 this.monitorBuffer_();
16069 }
16070 }
16071 /**
16072 * abort all pending xhr requests and null any pending segements
16073 *
16074 * @private
16075 */
16076 ;
16077
16078 _proto.abort_ = function abort_() {
16079 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
16080 this.pendingSegment_.abortRequests();
16081 } // clear out the segment being processed
16082
16083
16084 this.pendingSegment_ = null;
16085 this.callQueue_ = [];
16086 this.loadQueue_ = [];
16087 this.metadataQueue_.id3 = [];
16088 this.metadataQueue_.caption = [];
16089 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
16090 this.waitingOnRemove_ = false;
16091 window__default["default"].clearTimeout(this.quotaExceededErrorRetryTimeout_);
16092 this.quotaExceededErrorRetryTimeout_ = null;
16093 };
16094
16095 _proto.checkForAbort_ = function checkForAbort_(requestId) {
16096 // If the state is APPENDING, then aborts will not modify the state, meaning the first
16097 // callback that happens should reset the state to READY so that loading can continue.
16098 if (this.state === 'APPENDING' && !this.pendingSegment_) {
16099 this.state = 'READY';
16100 return true;
16101 }
16102
16103 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
16104 return true;
16105 }
16106
16107 return false;
16108 }
16109 /**
16110 * set an error on the segment loader and null out any pending segements
16111 *
16112 * @param {Error} error the error to set on the SegmentLoader
16113 * @return {Error} the error that was set or that is currently set
16114 */
16115 ;
16116
16117 _proto.error = function error(_error) {
16118 if (typeof _error !== 'undefined') {
16119 this.logger_('error occurred:', _error);
16120 this.error_ = _error;
16121 }
16122
16123 this.pendingSegment_ = null;
16124 return this.error_;
16125 };
16126
16127 _proto.endOfStream = function endOfStream() {
16128 this.ended_ = true;
16129
16130 if (this.transmuxer_) {
16131 // need to clear out any cached data to prepare for the new segment
16132 segmentTransmuxer.reset(this.transmuxer_);
16133 }
16134
16135 this.gopBuffer_.length = 0;
16136 this.pause();
16137 this.trigger('ended');
16138 }
16139 /**
16140 * Indicates which time ranges are buffered
16141 *
16142 * @return {TimeRange}
16143 * TimeRange object representing the current buffered ranges
16144 */
16145 ;
16146
16147 _proto.buffered_ = function buffered_() {
16148 var trackInfo = this.getMediaInfo_();
16149
16150 if (!this.sourceUpdater_ || !trackInfo) {
16151 return videojs__default["default"].createTimeRanges();
16152 }
16153
16154 if (this.loaderType_ === 'main') {
16155 var hasAudio = trackInfo.hasAudio,
16156 hasVideo = trackInfo.hasVideo,
16157 isMuxed = trackInfo.isMuxed;
16158
16159 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
16160 return this.sourceUpdater_.buffered();
16161 }
16162
16163 if (hasVideo) {
16164 return this.sourceUpdater_.videoBuffered();
16165 }
16166 } // One case that can be ignored for now is audio only with alt audio,
16167 // as we don't yet have proper support for that.
16168
16169
16170 return this.sourceUpdater_.audioBuffered();
16171 }
16172 /**
16173 * Gets and sets init segment for the provided map
16174 *
16175 * @param {Object} map
16176 * The map object representing the init segment to get or set
16177 * @param {boolean=} set
16178 * If true, the init segment for the provided map should be saved
16179 * @return {Object}
16180 * map object for desired init segment
16181 */
16182 ;
16183
16184 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
16185 if (set === void 0) {
16186 set = false;
16187 }
16188
16189 if (!map) {
16190 return null;
16191 }
16192
16193 var id = initSegmentId(map);
16194 var storedMap = this.initSegments_[id];
16195
16196 if (set && !storedMap && map.bytes) {
16197 this.initSegments_[id] = storedMap = {
16198 resolvedUri: map.resolvedUri,
16199 byterange: map.byterange,
16200 bytes: map.bytes,
16201 tracks: map.tracks,
16202 timescales: map.timescales
16203 };
16204 }
16205
16206 return storedMap || map;
16207 }
16208 /**
16209 * Gets and sets key for the provided key
16210 *
16211 * @param {Object} key
16212 * The key object representing the key to get or set
16213 * @param {boolean=} set
16214 * If true, the key for the provided key should be saved
16215 * @return {Object}
16216 * Key object for desired key
16217 */
16218 ;
16219
16220 _proto.segmentKey = function segmentKey(key, set) {
16221 if (set === void 0) {
16222 set = false;
16223 }
16224
16225 if (!key) {
16226 return null;
16227 }
16228
16229 var id = segmentKeyId(key);
16230 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
16231 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
16232
16233 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
16234 this.keyCache_[id] = storedKey = {
16235 resolvedUri: key.resolvedUri,
16236 bytes: key.bytes
16237 };
16238 }
16239
16240 var result = {
16241 resolvedUri: (storedKey || key).resolvedUri
16242 };
16243
16244 if (storedKey) {
16245 result.bytes = storedKey.bytes;
16246 }
16247
16248 return result;
16249 }
16250 /**
16251 * Returns true if all configuration required for loading is present, otherwise false.
16252 *
16253 * @return {boolean} True if the all configuration is ready for loading
16254 * @private
16255 */
16256 ;
16257
16258 _proto.couldBeginLoading_ = function couldBeginLoading_() {
16259 return this.playlist_ && !this.paused();
16260 }
16261 /**
16262 * load a playlist and start to fill the buffer
16263 */
16264 ;
16265
16266 _proto.load = function load() {
16267 // un-pause
16268 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
16269 // specified
16270
16271 if (!this.playlist_) {
16272 return;
16273 } // if all the configuration is ready, initialize and begin loading
16274
16275
16276 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16277 return this.init_();
16278 } // if we're in the middle of processing a segment already, don't
16279 // kick off an additional segment request
16280
16281
16282 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
16283 return;
16284 }
16285
16286 this.state = 'READY';
16287 }
16288 /**
16289 * Once all the starting parameters have been specified, begin
16290 * operation. This method should only be invoked from the INIT
16291 * state.
16292 *
16293 * @private
16294 */
16295 ;
16296
16297 _proto.init_ = function init_() {
16298 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
16299 // audio data from the muxed content should be removed
16300
16301 this.resetEverything();
16302 return this.monitorBuffer_();
16303 }
16304 /**
16305 * set a playlist on the segment loader
16306 *
16307 * @param {PlaylistLoader} media the playlist to set on the segment loader
16308 */
16309 ;
16310
16311 _proto.playlist = function playlist(newPlaylist, options) {
16312 if (options === void 0) {
16313 options = {};
16314 }
16315
16316 if (!newPlaylist) {
16317 return;
16318 }
16319
16320 var oldPlaylist = this.playlist_;
16321 var segmentInfo = this.pendingSegment_;
16322 this.playlist_ = newPlaylist;
16323 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
16324 // is always our zero-time so force a sync update each time the playlist
16325 // is refreshed from the server
16326 //
16327 // Use the INIT state to determine if playback has started, as the playlist sync info
16328 // should be fixed once requests begin (as sync points are generated based on sync
16329 // info), but not before then.
16330
16331 if (this.state === 'INIT') {
16332 newPlaylist.syncInfo = {
16333 mediaSequence: newPlaylist.mediaSequence,
16334 time: 0
16335 }; // Setting the date time mapping means mapping the program date time (if available)
16336 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
16337 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
16338 // be updated as the playlist is refreshed before the loader starts loading, the
16339 // program date time mapping needs to be updated as well.
16340 //
16341 // This mapping is only done for the main loader because a program date time should
16342 // map equivalently between playlists.
16343
16344 if (this.loaderType_ === 'main') {
16345 this.syncController_.setDateTimeMappingForStart(newPlaylist);
16346 }
16347 }
16348
16349 var oldId = null;
16350
16351 if (oldPlaylist) {
16352 if (oldPlaylist.id) {
16353 oldId = oldPlaylist.id;
16354 } else if (oldPlaylist.uri) {
16355 oldId = oldPlaylist.uri;
16356 }
16357 }
16358
16359 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
16360 // in LIVE, we always want to update with new playlists (including refreshes)
16361
16362 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
16363 // buffering now
16364
16365 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16366 return this.init_();
16367 }
16368
16369 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
16370 if (this.mediaIndex !== null) {
16371 // we must reset/resync the segment loader when we switch renditions and
16372 // the segment loader is already synced to the previous rendition
16373 // on playlist changes we want it to be possible to fetch
16374 // at the buffer for vod but not for live. So we use resetLoader
16375 // for live and resyncLoader for vod. We want this because
16376 // if a playlist uses independent and non-independent segments/parts the
16377 // buffer may not accurately reflect the next segment that we should try
16378 // downloading.
16379 if (!newPlaylist.endList) {
16380 this.resetLoader();
16381 } else {
16382 this.resyncLoader();
16383 }
16384 }
16385
16386 this.currentMediaInfo_ = void 0;
16387 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
16388
16389 return;
16390 } // we reloaded the same playlist so we are in a live scenario
16391 // and we will likely need to adjust the mediaIndex
16392
16393
16394 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
16395 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
16396 // this is important because we can abort a request and this value must be
16397 // equal to the last appended mediaIndex
16398
16399 if (this.mediaIndex !== null) {
16400 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
16401 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
16402 // new playlist was incremented by 1.
16403
16404 if (this.mediaIndex < 0) {
16405 this.mediaIndex = null;
16406 this.partIndex = null;
16407 } else {
16408 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
16409 // unless parts fell off of the playlist for this segment.
16410 // In that case we need to reset partIndex and resync
16411
16412 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
16413 var mediaIndex = this.mediaIndex;
16414 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
16415 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
16416 // as the part was dropped from our current playlists segment.
16417 // The mediaIndex will still be valid so keep that around.
16418
16419 this.mediaIndex = mediaIndex;
16420 }
16421 }
16422 } // update the mediaIndex on the SegmentInfo object
16423 // this is important because we will update this.mediaIndex with this value
16424 // in `handleAppendsDone_` after the segment has been successfully appended
16425
16426
16427 if (segmentInfo) {
16428 segmentInfo.mediaIndex -= mediaSequenceDiff;
16429
16430 if (segmentInfo.mediaIndex < 0) {
16431 segmentInfo.mediaIndex = null;
16432 segmentInfo.partIndex = null;
16433 } else {
16434 // we need to update the referenced segment so that timing information is
16435 // saved for the new playlist's segment, however, if the segment fell off the
16436 // playlist, we can leave the old reference and just lose the timing info
16437 if (segmentInfo.mediaIndex >= 0) {
16438 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
16439 }
16440
16441 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
16442 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
16443 }
16444 }
16445 }
16446
16447 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
16448 }
16449 /**
16450 * Prevent the loader from fetching additional segments. If there
16451 * is a segment request outstanding, it will finish processing
16452 * before the loader halts. A segment loader can be unpaused by
16453 * calling load().
16454 */
16455 ;
16456
16457 _proto.pause = function pause() {
16458 if (this.checkBufferTimeout_) {
16459 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16460 this.checkBufferTimeout_ = null;
16461 }
16462 }
16463 /**
16464 * Returns whether the segment loader is fetching additional
16465 * segments when given the opportunity. This property can be
16466 * modified through calls to pause() and load().
16467 */
16468 ;
16469
16470 _proto.paused = function paused() {
16471 return this.checkBufferTimeout_ === null;
16472 }
16473 /**
16474 * Delete all the buffered data and reset the SegmentLoader
16475 *
16476 * @param {Function} [done] an optional callback to be executed when the remove
16477 * operation is complete
16478 */
16479 ;
16480
16481 _proto.resetEverything = function resetEverything(done) {
16482 this.ended_ = false;
16483 this.appendInitSegment_ = {
16484 audio: true,
16485 video: true
16486 };
16487 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
16488 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
16489 // we then clamp the value to duration if necessary.
16490
16491 this.remove(0, Infinity, done); // clears fmp4 captions
16492
16493 if (this.transmuxer_) {
16494 this.transmuxer_.postMessage({
16495 action: 'clearAllMp4Captions'
16496 }); // reset the cache in the transmuxer
16497
16498 this.transmuxer_.postMessage({
16499 action: 'reset'
16500 });
16501 }
16502 }
16503 /**
16504 * Force the SegmentLoader to resync and start loading around the currentTime instead
16505 * of starting at the end of the buffer
16506 *
16507 * Useful for fast quality changes
16508 */
16509 ;
16510
16511 _proto.resetLoader = function resetLoader() {
16512 this.fetchAtBuffer_ = false;
16513 this.resyncLoader();
16514 }
16515 /**
16516 * Force the SegmentLoader to restart synchronization and make a conservative guess
16517 * before returning to the simple walk-forward method
16518 */
16519 ;
16520
16521 _proto.resyncLoader = function resyncLoader() {
16522 if (this.transmuxer_) {
16523 // need to clear out any cached data to prepare for the new segment
16524 segmentTransmuxer.reset(this.transmuxer_);
16525 }
16526
16527 this.mediaIndex = null;
16528 this.partIndex = null;
16529 this.syncPoint_ = null;
16530 this.isPendingTimestampOffset_ = false;
16531 this.callQueue_ = [];
16532 this.loadQueue_ = [];
16533 this.metadataQueue_.id3 = [];
16534 this.metadataQueue_.caption = [];
16535 this.abort();
16536
16537 if (this.transmuxer_) {
16538 this.transmuxer_.postMessage({
16539 action: 'clearParsedMp4Captions'
16540 });
16541 }
16542 }
16543 /**
16544 * Remove any data in the source buffer between start and end times
16545 *
16546 * @param {number} start - the start time of the region to remove from the buffer
16547 * @param {number} end - the end time of the region to remove from the buffer
16548 * @param {Function} [done] - an optional callback to be executed when the remove
16549 * @param {boolean} force - force all remove operations to happen
16550 * operation is complete
16551 */
16552 ;
16553
16554 _proto.remove = function remove(start, end, done, force) {
16555 if (done === void 0) {
16556 done = function done() {};
16557 }
16558
16559 if (force === void 0) {
16560 force = false;
16561 }
16562
16563 // clamp end to duration if we need to remove everything.
16564 // This is due to a browser bug that causes issues if we remove to Infinity.
16565 // videojs/videojs-contrib-hls#1225
16566 if (end === Infinity) {
16567 end = this.duration_();
16568 } // skip removes that would throw an error
16569 // commonly happens during a rendition switch at the start of a video
16570 // from start 0 to end 0
16571
16572
16573 if (end <= start) {
16574 this.logger_('skipping remove because end ${end} is <= start ${start}');
16575 return;
16576 }
16577
16578 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
16579 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
16580
16581 return;
16582 } // set it to one to complete this function's removes
16583
16584
16585 var removesRemaining = 1;
16586
16587 var removeFinished = function removeFinished() {
16588 removesRemaining--;
16589
16590 if (removesRemaining === 0) {
16591 done();
16592 }
16593 };
16594
16595 if (force || !this.audioDisabled_) {
16596 removesRemaining++;
16597 this.sourceUpdater_.removeAudio(start, end, removeFinished);
16598 } // While it would be better to only remove video if the main loader has video, this
16599 // should be safe with audio only as removeVideo will call back even if there's no
16600 // video buffer.
16601 //
16602 // In theory we can check to see if there's video before calling the remove, but in
16603 // the event that we're switching between renditions and from video to audio only
16604 // (when we add support for that), we may need to clear the video contents despite
16605 // what the new media will contain.
16606
16607
16608 if (force || this.loaderType_ === 'main') {
16609 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
16610 removesRemaining++;
16611 this.sourceUpdater_.removeVideo(start, end, removeFinished);
16612 } // remove any captions and ID3 tags
16613
16614
16615 for (var track in this.inbandTextTracks_) {
16616 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
16617 }
16618
16619 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
16620
16621 removeFinished();
16622 }
16623 /**
16624 * (re-)schedule monitorBufferTick_ to run as soon as possible
16625 *
16626 * @private
16627 */
16628 ;
16629
16630 _proto.monitorBuffer_ = function monitorBuffer_() {
16631 if (this.checkBufferTimeout_) {
16632 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16633 }
16634
16635 this.checkBufferTimeout_ = window__default["default"].setTimeout(this.monitorBufferTick_.bind(this), 1);
16636 }
16637 /**
16638 * As long as the SegmentLoader is in the READY state, periodically
16639 * invoke fillBuffer_().
16640 *
16641 * @private
16642 */
16643 ;
16644
16645 _proto.monitorBufferTick_ = function monitorBufferTick_() {
16646 if (this.state === 'READY') {
16647 this.fillBuffer_();
16648 }
16649
16650 if (this.checkBufferTimeout_) {
16651 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16652 }
16653
16654 this.checkBufferTimeout_ = window__default["default"].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
16655 }
16656 /**
16657 * fill the buffer with segements unless the sourceBuffers are
16658 * currently updating
16659 *
16660 * Note: this function should only ever be called by monitorBuffer_
16661 * and never directly
16662 *
16663 * @private
16664 */
16665 ;
16666
16667 _proto.fillBuffer_ = function fillBuffer_() {
16668 // TODO since the source buffer maintains a queue, and we shouldn't call this function
16669 // except when we're ready for the next segment, this check can most likely be removed
16670 if (this.sourceUpdater_.updating()) {
16671 return;
16672 } // see if we need to begin loading immediately
16673
16674
16675 var segmentInfo = this.chooseNextRequest_();
16676
16677 if (!segmentInfo) {
16678 return;
16679 }
16680
16681 if (typeof segmentInfo.timestampOffset === 'number') {
16682 this.isPendingTimestampOffset_ = false;
16683 this.timelineChangeController_.pendingTimelineChange({
16684 type: this.loaderType_,
16685 from: this.currentTimeline_,
16686 to: segmentInfo.timeline
16687 });
16688 }
16689
16690 this.loadSegment_(segmentInfo);
16691 }
16692 /**
16693 * Determines if we should call endOfStream on the media source based
16694 * on the state of the buffer or if appened segment was the final
16695 * segment in the playlist.
16696 *
16697 * @param {number} [mediaIndex] the media index of segment we last appended
16698 * @param {Object} [playlist] a media playlist object
16699 * @return {boolean} do we need to call endOfStream on the MediaSource
16700 */
16701 ;
16702
16703 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
16704 if (mediaIndex === void 0) {
16705 mediaIndex = this.mediaIndex;
16706 }
16707
16708 if (playlist === void 0) {
16709 playlist = this.playlist_;
16710 }
16711
16712 if (partIndex === void 0) {
16713 partIndex = this.partIndex;
16714 }
16715
16716 if (!playlist || !this.mediaSource_) {
16717 return false;
16718 }
16719
16720 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
16721
16722 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
16723
16724 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
16725 // so that MediaSources can trigger the `ended` event when it runs out of
16726 // buffered data instead of waiting for me
16727
16728 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
16729 }
16730 /**
16731 * Determines what request should be made given current segment loader state.
16732 *
16733 * @return {Object} a request object that describes the segment/part to load
16734 */
16735 ;
16736
16737 _proto.chooseNextRequest_ = function chooseNextRequest_() {
16738 var buffered = this.buffered_();
16739 var bufferedEnd = lastBufferedEnd(buffered) || 0;
16740 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
16741 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
16742 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
16743 var segments = this.playlist_.segments; // return no segment if:
16744 // 1. we don't have segments
16745 // 2. The video has not yet played and we already downloaded a segment
16746 // 3. we already have enough buffered time
16747
16748 if (!segments.length || preloaded || haveEnoughBuffer) {
16749 return null;
16750 }
16751
16752 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
16753 var next = {
16754 partIndex: null,
16755 mediaIndex: null,
16756 startOfSegment: null,
16757 playlist: this.playlist_,
16758 isSyncRequest: Boolean(!this.syncPoint_)
16759 };
16760
16761 if (next.isSyncRequest) {
16762 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
16763 } else if (this.mediaIndex !== null) {
16764 var segment = segments[this.mediaIndex];
16765 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
16766 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
16767
16768 if (segment.parts && segment.parts[partIndex + 1]) {
16769 next.mediaIndex = this.mediaIndex;
16770 next.partIndex = partIndex + 1;
16771 } else {
16772 next.mediaIndex = this.mediaIndex + 1;
16773 }
16774 } else {
16775 // Find the segment containing the end of the buffer or current time.
16776 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
16777 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
16778 playlist: this.playlist_,
16779 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
16780 startingPartIndex: this.syncPoint_.partIndex,
16781 startingSegmentIndex: this.syncPoint_.segmentIndex,
16782 startTime: this.syncPoint_.time
16783 }),
16784 segmentIndex = _Playlist$getMediaInf.segmentIndex,
16785 startTime = _Playlist$getMediaInf.startTime,
16786 _partIndex = _Playlist$getMediaInf.partIndex;
16787
16788 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
16789 next.mediaIndex = segmentIndex;
16790 next.startOfSegment = startTime;
16791 next.partIndex = _partIndex;
16792 }
16793
16794 var nextSegment = segments[next.mediaIndex];
16795 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
16796 // the next partIndex is invalid do not choose a next segment.
16797
16798 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
16799 return null;
16800 } // if the next segment has parts, and we don't have a partIndex.
16801 // Set partIndex to 0
16802
16803
16804 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
16805 next.partIndex = 0;
16806 nextPart = nextSegment.parts[0];
16807 } // if we have no buffered data then we need to make sure
16808 // that the next part we append is "independent" if possible.
16809 // So we check if the previous part is independent, and request
16810 // it if it is.
16811
16812
16813 if (!bufferedTime && nextPart && !nextPart.independent) {
16814 if (next.partIndex === 0) {
16815 var lastSegment = segments[next.mediaIndex - 1];
16816 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
16817
16818 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
16819 next.mediaIndex -= 1;
16820 next.partIndex = lastSegment.parts.length - 1;
16821 next.independent = 'previous segment';
16822 }
16823 } else if (nextSegment.parts[next.partIndex - 1].independent) {
16824 next.partIndex -= 1;
16825 next.independent = 'previous part';
16826 }
16827 }
16828
16829 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
16830 // 1. this is the last segment in the playlist
16831 // 2. end of stream has been called on the media source already
16832 // 3. the player is not seeking
16833
16834 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
16835 return null;
16836 }
16837
16838 return this.generateSegmentInfo_(next);
16839 };
16840
16841 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
16842 var independent = options.independent,
16843 playlist = options.playlist,
16844 mediaIndex = options.mediaIndex,
16845 startOfSegment = options.startOfSegment,
16846 isSyncRequest = options.isSyncRequest,
16847 partIndex = options.partIndex,
16848 forceTimestampOffset = options.forceTimestampOffset,
16849 getMediaInfoForTime = options.getMediaInfoForTime;
16850 var segment = playlist.segments[mediaIndex];
16851 var part = typeof partIndex === 'number' && segment.parts[partIndex];
16852 var segmentInfo = {
16853 requestId: 'segment-loader-' + Math.random(),
16854 // resolve the segment URL relative to the playlist
16855 uri: part && part.resolvedUri || segment.resolvedUri,
16856 // the segment's mediaIndex at the time it was requested
16857 mediaIndex: mediaIndex,
16858 partIndex: part ? partIndex : null,
16859 // whether or not to update the SegmentLoader's state with this
16860 // segment's mediaIndex
16861 isSyncRequest: isSyncRequest,
16862 startOfSegment: startOfSegment,
16863 // the segment's playlist
16864 playlist: playlist,
16865 // unencrypted bytes of the segment
16866 bytes: null,
16867 // when a key is defined for this segment, the encrypted bytes
16868 encryptedBytes: null,
16869 // The target timestampOffset for this segment when we append it
16870 // to the source buffer
16871 timestampOffset: null,
16872 // The timeline that the segment is in
16873 timeline: segment.timeline,
16874 // The expected duration of the segment in seconds
16875 duration: part && part.duration || segment.duration,
16876 // retain the segment in case the playlist updates while doing an async process
16877 segment: segment,
16878 part: part,
16879 byteLength: 0,
16880 transmuxer: this.transmuxer_,
16881 // type of getMediaInfoForTime that was used to get this segment
16882 getMediaInfoForTime: getMediaInfoForTime,
16883 independent: independent
16884 };
16885 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
16886 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
16887 segmentTimeline: segment.timeline,
16888 currentTimeline: this.currentTimeline_,
16889 startOfSegment: startOfSegment,
16890 buffered: this.buffered_(),
16891 overrideCheck: overrideCheck
16892 });
16893 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
16894
16895 if (typeof audioBufferedEnd === 'number') {
16896 // since the transmuxer is using the actual timing values, but the buffer is
16897 // adjusted by the timestamp offset, we must adjust the value here
16898 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
16899 }
16900
16901 if (this.sourceUpdater_.videoBuffered().length) {
16902 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
16903 // adjusted by the timestmap offset, we must adjust the value here
16904 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
16905 }
16906
16907 return segmentInfo;
16908 } // get the timestampoffset for a segment,
16909 // added so that vtt segment loader can override and prevent
16910 // adding timestamp offsets.
16911 ;
16912
16913 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
16914 return timestampOffsetForSegment(options);
16915 }
16916 /**
16917 * Determines if the network has enough bandwidth to complete the current segment
16918 * request in a timely manner. If not, the request will be aborted early and bandwidth
16919 * updated to trigger a playlist switch.
16920 *
16921 * @param {Object} stats
16922 * Object containing stats about the request timing and size
16923 * @private
16924 */
16925 ;
16926
16927 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
16928 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
16929 // TODO: Replace using timeout with a boolean indicating whether this playlist is
16930 // the lowestEnabledRendition.
16931 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
16932 !this.playlist_.attributes.BANDWIDTH) {
16933 return;
16934 } // Wait at least 1 second since the first byte of data has been received before
16935 // using the calculated bandwidth from the progress event to allow the bitrate
16936 // to stabilize
16937
16938
16939 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
16940 return;
16941 }
16942
16943 var currentTime = this.currentTime_();
16944 var measuredBandwidth = stats.bandwidth;
16945 var segmentDuration = this.pendingSegment_.duration;
16946 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
16947 // if we are only left with less than 1 second when the request completes.
16948 // A negative timeUntilRebuffering indicates we are already rebuffering
16949
16950 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
16951 // is larger than the estimated time until the player runs out of forward buffer
16952
16953 if (requestTimeRemaining <= timeUntilRebuffer$1) {
16954 return;
16955 }
16956
16957 var switchCandidate = minRebufferMaxBandwidthSelector({
16958 master: this.vhs_.playlists.master,
16959 currentTime: currentTime,
16960 bandwidth: measuredBandwidth,
16961 duration: this.duration_(),
16962 segmentDuration: segmentDuration,
16963 timeUntilRebuffer: timeUntilRebuffer$1,
16964 currentTimeline: this.currentTimeline_,
16965 syncController: this.syncController_
16966 });
16967
16968 if (!switchCandidate) {
16969 return;
16970 }
16971
16972 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
16973 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
16974 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
16975 // potential round trip time of the new request so that we are not too aggressive
16976 // with switching to a playlist that might save us a fraction of a second.
16977
16978 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
16979 minimumTimeSaving = 1;
16980 }
16981
16982 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
16983 return;
16984 } // set the bandwidth to that of the desired playlist being sure to scale by
16985 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
16986 // don't trigger a bandwidthupdate as the bandwidth is artifial
16987
16988
16989 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
16990 this.trigger('earlyabort');
16991 };
16992
16993 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
16994 this.logger_("Aborting " + segmentInfoString(segmentInfo));
16995 this.mediaRequestsAborted += 1;
16996 }
16997 /**
16998 * XHR `progress` event handler
16999 *
17000 * @param {Event}
17001 * The XHR `progress` event
17002 * @param {Object} simpleSegment
17003 * A simplified segment object copy
17004 * @private
17005 */
17006 ;
17007
17008 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
17009 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17010
17011 if (this.checkForAbort_(simpleSegment.requestId)) {
17012 return;
17013 }
17014
17015 this.trigger('progress');
17016 };
17017
17018 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
17019 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17020
17021 if (this.checkForAbort_(simpleSegment.requestId)) {
17022 return;
17023 }
17024
17025 if (this.checkForIllegalMediaSwitch(trackInfo)) {
17026 return;
17027 }
17028
17029 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
17030 // Guard against cases where we're not getting track info at all until we are
17031 // certain that all streams will provide it.
17032
17033 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
17034 this.appendInitSegment_ = {
17035 audio: true,
17036 video: true
17037 };
17038 this.startingMediaInfo_ = trackInfo;
17039 this.currentMediaInfo_ = trackInfo;
17040 this.logger_('trackinfo update', trackInfo);
17041 this.trigger('trackinfo');
17042 } // trackinfo may cause an abort if the trackinfo
17043 // causes a codec change to an unsupported codec.
17044
17045
17046 if (this.checkForAbort_(simpleSegment.requestId)) {
17047 return;
17048 } // set trackinfo on the pending segment so that
17049 // it can append.
17050
17051
17052 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
17053
17054 if (this.hasEnoughInfoToAppend_()) {
17055 this.processCallQueue_();
17056 }
17057 };
17058
17059 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
17060 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17061
17062 if (this.checkForAbort_(simpleSegment.requestId)) {
17063 return;
17064 }
17065
17066 var segmentInfo = this.pendingSegment_;
17067 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
17068 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
17069 segmentInfo[timingInfoProperty][timeType] = time;
17070 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
17071
17072 if (this.hasEnoughInfoToAppend_()) {
17073 this.processCallQueue_();
17074 }
17075 };
17076
17077 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
17078 var _this2 = this;
17079
17080 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17081
17082 if (this.checkForAbort_(simpleSegment.requestId)) {
17083 return;
17084 } // This could only happen with fmp4 segments, but
17085 // should still not happen in general
17086
17087
17088 if (captionData.length === 0) {
17089 this.logger_('SegmentLoader received no captions from a caption event');
17090 return;
17091 }
17092
17093 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
17094 // can be adjusted by the timestamp offset
17095
17096 if (!segmentInfo.hasAppendedData_) {
17097 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
17098 return;
17099 }
17100
17101 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
17102 var captionTracks = {}; // get total start/end and captions for each track/stream
17103
17104 captionData.forEach(function (caption) {
17105 // caption.stream is actually a track name...
17106 // set to the existing values in tracks or default values
17107 captionTracks[caption.stream] = captionTracks[caption.stream] || {
17108 // Infinity, as any other value will be less than this
17109 startTime: Infinity,
17110 captions: [],
17111 // 0 as an other value will be more than this
17112 endTime: 0
17113 };
17114 var captionTrack = captionTracks[caption.stream];
17115 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
17116 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
17117 captionTrack.captions.push(caption);
17118 });
17119 Object.keys(captionTracks).forEach(function (trackName) {
17120 var _captionTracks$trackN = captionTracks[trackName],
17121 startTime = _captionTracks$trackN.startTime,
17122 endTime = _captionTracks$trackN.endTime,
17123 captions = _captionTracks$trackN.captions;
17124 var inbandTextTracks = _this2.inbandTextTracks_;
17125
17126 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
17127
17128 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
17129 // We do this because a rendition change that also changes the timescale for captions
17130 // will result in captions being re-parsed for certain segments. If we add them again
17131 // without clearing we will have two of the same captions visible.
17132
17133 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
17134 addCaptionData({
17135 captionArray: captions,
17136 inbandTextTracks: inbandTextTracks,
17137 timestampOffset: timestampOffset
17138 });
17139 }); // Reset stored captions since we added parsed
17140 // captions to a text track at this point
17141
17142 if (this.transmuxer_) {
17143 this.transmuxer_.postMessage({
17144 action: 'clearParsedMp4Captions'
17145 });
17146 }
17147 };
17148
17149 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
17150 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17151
17152 if (this.checkForAbort_(simpleSegment.requestId)) {
17153 return;
17154 }
17155
17156 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
17157
17158 if (!segmentInfo.hasAppendedData_) {
17159 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
17160 return;
17161 }
17162
17163 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
17164 // audio/video source with a metadata track, and an alt audio with a metadata track.
17165 // However, this probably won't happen, and if it does it can be handled then.
17166
17167 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
17168 addMetadata({
17169 inbandTextTracks: this.inbandTextTracks_,
17170 metadataArray: id3Frames,
17171 timestampOffset: timestampOffset,
17172 videoDuration: this.duration_()
17173 });
17174 };
17175
17176 _proto.processMetadataQueue_ = function processMetadataQueue_() {
17177 this.metadataQueue_.id3.forEach(function (fn) {
17178 return fn();
17179 });
17180 this.metadataQueue_.caption.forEach(function (fn) {
17181 return fn();
17182 });
17183 this.metadataQueue_.id3 = [];
17184 this.metadataQueue_.caption = [];
17185 };
17186
17187 _proto.processCallQueue_ = function processCallQueue_() {
17188 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
17189 // functions may check the length of the load queue and default to pushing themselves
17190 // back onto the queue.
17191
17192 this.callQueue_ = [];
17193 callQueue.forEach(function (fun) {
17194 return fun();
17195 });
17196 };
17197
17198 _proto.processLoadQueue_ = function processLoadQueue_() {
17199 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
17200 // functions may check the length of the load queue and default to pushing themselves
17201 // back onto the queue.
17202
17203 this.loadQueue_ = [];
17204 loadQueue.forEach(function (fun) {
17205 return fun();
17206 });
17207 }
17208 /**
17209 * Determines whether the loader has enough info to load the next segment.
17210 *
17211 * @return {boolean}
17212 * Whether or not the loader has enough info to load the next segment
17213 */
17214 ;
17215
17216 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
17217 // Since primary timing goes by video, only the audio loader potentially needs to wait
17218 // to load.
17219 if (this.loaderType_ !== 'audio') {
17220 return true;
17221 }
17222
17223 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
17224 // enough info to load.
17225
17226 if (!segmentInfo) {
17227 return false;
17228 } // The first segment can and should be loaded immediately so that source buffers are
17229 // created together (before appending). Source buffer creation uses the presence of
17230 // audio and video data to determine whether to create audio/video source buffers, and
17231 // uses processed (transmuxed or parsed) media to determine the types required.
17232
17233
17234 if (!this.getCurrentMediaInfo_()) {
17235 return true;
17236 }
17237
17238 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
17239 // can be requested and downloaded and only wait before it is transmuxed or parsed.
17240 // But in practice, there are a few reasons why it is better to wait until a loader
17241 // is ready to append that segment before requesting and downloading:
17242 //
17243 // 1. Because audio and main loaders cross discontinuities together, if this loader
17244 // is waiting for the other to catch up, then instead of requesting another
17245 // segment and using up more bandwidth, by not yet loading, more bandwidth is
17246 // allotted to the loader currently behind.
17247 // 2. media-segment-request doesn't have to have logic to consider whether a segment
17248 // is ready to be processed or not, isolating the queueing behavior to the loader.
17249 // 3. The audio loader bases some of its segment properties on timing information
17250 // provided by the main loader, meaning that, if the logic for waiting on
17251 // processing was in media-segment-request, then it would also need to know how
17252 // to re-generate the segment information after the main loader caught up.
17253 shouldWaitForTimelineChange({
17254 timelineChangeController: this.timelineChangeController_,
17255 currentTimeline: this.currentTimeline_,
17256 segmentTimeline: segmentInfo.timeline,
17257 loaderType: this.loaderType_,
17258 audioDisabled: this.audioDisabled_
17259 })) {
17260 return false;
17261 }
17262
17263 return true;
17264 };
17265
17266 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
17267 if (segmentInfo === void 0) {
17268 segmentInfo = this.pendingSegment_;
17269 }
17270
17271 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
17272 };
17273
17274 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
17275 if (segmentInfo === void 0) {
17276 segmentInfo = this.pendingSegment_;
17277 }
17278
17279 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
17280 };
17281
17282 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
17283 if (!this.sourceUpdater_.ready()) {
17284 return false;
17285 } // If content needs to be removed or the loader is waiting on an append reattempt,
17286 // then no additional content should be appended until the prior append is resolved.
17287
17288
17289 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
17290 return false;
17291 }
17292
17293 var segmentInfo = this.pendingSegment_;
17294 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
17295 // we do not have information on this specific
17296 // segment yet
17297
17298 if (!segmentInfo || !trackInfo) {
17299 return false;
17300 }
17301
17302 var hasAudio = trackInfo.hasAudio,
17303 hasVideo = trackInfo.hasVideo,
17304 isMuxed = trackInfo.isMuxed;
17305
17306 if (hasVideo && !segmentInfo.videoTimingInfo) {
17307 return false;
17308 } // muxed content only relies on video timing information for now.
17309
17310
17311 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
17312 return false;
17313 }
17314
17315 if (shouldWaitForTimelineChange({
17316 timelineChangeController: this.timelineChangeController_,
17317 currentTimeline: this.currentTimeline_,
17318 segmentTimeline: segmentInfo.timeline,
17319 loaderType: this.loaderType_,
17320 audioDisabled: this.audioDisabled_
17321 })) {
17322 return false;
17323 }
17324
17325 return true;
17326 };
17327
17328 _proto.handleData_ = function handleData_(simpleSegment, result) {
17329 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17330
17331 if (this.checkForAbort_(simpleSegment.requestId)) {
17332 return;
17333 } // If there's anything in the call queue, then this data came later and should be
17334 // executed after the calls currently queued.
17335
17336
17337 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
17338 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
17339 return;
17340 }
17341
17342 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
17343
17344 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
17345
17346 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
17347 // logic may change behavior depending on the state, and changing state too early may
17348 // inflate our estimates of bandwidth. In the future this should be re-examined to
17349 // note more granular states.
17350 // don't process and append data if the mediaSource is closed
17351
17352 if (this.mediaSource_.readyState === 'closed') {
17353 return;
17354 } // if this request included an initialization segment, save that data
17355 // to the initSegment cache
17356
17357
17358 if (simpleSegment.map) {
17359 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
17360
17361 segmentInfo.segment.map = simpleSegment.map;
17362 } // if this request included a segment key, save that data in the cache
17363
17364
17365 if (simpleSegment.key) {
17366 this.segmentKey(simpleSegment.key, true);
17367 }
17368
17369 segmentInfo.isFmp4 = simpleSegment.isFmp4;
17370 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
17371
17372 if (segmentInfo.isFmp4) {
17373 this.trigger('fmp4');
17374 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
17375 } else {
17376 var trackInfo = this.getCurrentMediaInfo_();
17377 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
17378 var firstVideoFrameTimeForData;
17379
17380 if (useVideoTimingInfo) {
17381 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
17382 } // Segment loader knows more about segment timing than the transmuxer (in certain
17383 // aspects), so make any changes required for a more accurate start time.
17384 // Don't set the end time yet, as the segment may not be finished processing.
17385
17386
17387 segmentInfo.timingInfo.start = this.trueSegmentStart_({
17388 currentStart: segmentInfo.timingInfo.start,
17389 playlist: segmentInfo.playlist,
17390 mediaIndex: segmentInfo.mediaIndex,
17391 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
17392 useVideoTimingInfo: useVideoTimingInfo,
17393 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
17394 videoTimingInfo: segmentInfo.videoTimingInfo,
17395 audioTimingInfo: segmentInfo.audioTimingInfo
17396 });
17397 } // Init segments for audio and video only need to be appended in certain cases. Now
17398 // that data is about to be appended, we can check the final cases to determine
17399 // whether we should append an init segment.
17400
17401
17402 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
17403 // as we use the start of the segment to offset the best guess (playlist provided)
17404 // timestamp offset.
17405
17406 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
17407 // be appended or not.
17408
17409 if (segmentInfo.isSyncRequest) {
17410 // first save/update our timing info for this segment.
17411 // this is what allows us to choose an accurate segment
17412 // and the main reason we make a sync request.
17413 this.updateTimingInfoEnd_(segmentInfo);
17414 this.syncController_.saveSegmentTimingInfo({
17415 segmentInfo: segmentInfo,
17416 shouldSaveTimelineMapping: this.loaderType_ === 'main'
17417 });
17418 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
17419 // after taking into account its timing info, do not append it.
17420
17421 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
17422 this.logger_('sync segment was incorrect, not appending');
17423 return;
17424 } // otherwise append it like any other segment as our guess was correct.
17425
17426
17427 this.logger_('sync segment was correct, appending');
17428 } // Save some state so that in the future anything waiting on first append (and/or
17429 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
17430 // we need some notion of whether the timestamp offset or other relevant information
17431 // has had a chance to be set.
17432
17433
17434 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
17435
17436 this.processMetadataQueue_();
17437 this.appendData_(segmentInfo, result);
17438 };
17439
17440 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
17441 // alt audio doesn't manage timestamp offset
17442 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
17443 // segment for each chunk
17444 !segmentInfo.changedTimestampOffset) {
17445 // if the timestamp offset changed, the timeline may have changed, so we have to re-
17446 // append init segments
17447 this.appendInitSegment_ = {
17448 audio: true,
17449 video: true
17450 };
17451 }
17452
17453 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
17454 // make sure we append init segment on playlist changes, in case the media config
17455 // changed
17456 this.appendInitSegment_[type] = true;
17457 }
17458 };
17459
17460 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
17461 var type = _ref4.type,
17462 initSegment = _ref4.initSegment,
17463 map = _ref4.map,
17464 playlist = _ref4.playlist;
17465
17466 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
17467 // (Section 3) required to parse the applicable Media Segments. It applies to every
17468 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
17469 // or until the end of the playlist."
17470 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
17471 if (map) {
17472 var id = initSegmentId(map);
17473
17474 if (this.activeInitSegmentId_ === id) {
17475 // don't need to re-append the init segment if the ID matches
17476 return null;
17477 } // a map-specified init segment takes priority over any transmuxed (or otherwise
17478 // obtained) init segment
17479 //
17480 // this also caches the init segment for later use
17481
17482
17483 initSegment = this.initSegmentForMap(map, true).bytes;
17484 this.activeInitSegmentId_ = id;
17485 } // We used to always prepend init segments for video, however, that shouldn't be
17486 // necessary. Instead, we should only append on changes, similar to what we've always
17487 // done for audio. This is more important (though may not be that important) for
17488 // frame-by-frame appending for LHLS, simply because of the increased quantity of
17489 // appends.
17490
17491
17492 if (initSegment && this.appendInitSegment_[type]) {
17493 // Make sure we track the playlist that we last used for the init segment, so that
17494 // we can re-append the init segment in the event that we get data from a new
17495 // playlist. Discontinuities and track changes are handled in other sections.
17496 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
17497
17498 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
17499 // we are appending the muxer init segment
17500
17501 this.activeInitSegmentId_ = null;
17502 return initSegment;
17503 }
17504
17505 return null;
17506 };
17507
17508 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
17509 var _this3 = this;
17510
17511 var segmentInfo = _ref5.segmentInfo,
17512 type = _ref5.type,
17513 bytes = _ref5.bytes;
17514 var audioBuffered = this.sourceUpdater_.audioBuffered();
17515 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
17516 // should be cleared out during the buffer removals. However, log in case it helps
17517 // debug.
17518
17519 if (audioBuffered.length > 1) {
17520 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
17521 }
17522
17523 if (videoBuffered.length > 1) {
17524 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
17525 }
17526
17527 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
17528 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
17529 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
17530 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
17531
17532 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
17533 // Can't remove enough buffer to make room for new segment (or the browser doesn't
17534 // allow for appends of segments this size). In the future, it may be possible to
17535 // split up the segment and append in pieces, but for now, error out this playlist
17536 // in an attempt to switch to a more manageable rendition.
17537 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
17538 this.error({
17539 message: 'Quota exceeded error with append of a single segment of content',
17540 excludeUntil: Infinity
17541 });
17542 this.trigger('error');
17543 return;
17544 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
17545 // that the segment-loader should block on future events until this one is handled, so
17546 // that it doesn't keep moving onto further segments. Adding the call to the call
17547 // queue will prevent further appends until waitingOnRemove_ and
17548 // quotaExceededErrorRetryTimeout_ are cleared.
17549 //
17550 // Note that this will only block the current loader. In the case of demuxed content,
17551 // the other load may keep filling as fast as possible. In practice, this should be
17552 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
17553 // source buffer, or video fills without enough room for audio to append (and without
17554 // the availability of clearing out seconds of back buffer to make room for audio).
17555 // But it might still be good to handle this case in the future as a TODO.
17556
17557
17558 this.waitingOnRemove_ = true;
17559 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
17560 segmentInfo: segmentInfo,
17561 type: type,
17562 bytes: bytes
17563 }));
17564 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
17565 // before retrying.
17566
17567 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
17568 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
17569 this.remove(0, timeToRemoveUntil, function () {
17570 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
17571
17572 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
17573 // attempts (since we can't clear less than the minimum)
17574
17575 _this3.quotaExceededErrorRetryTimeout_ = window__default["default"].setTimeout(function () {
17576 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
17577
17578 _this3.quotaExceededErrorRetryTimeout_ = null;
17579
17580 _this3.processCallQueue_();
17581 }, MIN_BACK_BUFFER * 1000);
17582 }, true);
17583 };
17584
17585 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
17586 var segmentInfo = _ref6.segmentInfo,
17587 type = _ref6.type,
17588 bytes = _ref6.bytes;
17589
17590 // if there's no error, nothing to do
17591 if (!error) {
17592 return;
17593 }
17594
17595 if (error.code === QUOTA_EXCEEDED_ERR) {
17596 this.handleQuotaExceededError_({
17597 segmentInfo: segmentInfo,
17598 type: type,
17599 bytes: bytes
17600 }); // A quota exceeded error should be recoverable with a future re-append, so no need
17601 // to trigger an append error.
17602
17603 return;
17604 }
17605
17606 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
17607 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
17608 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
17609 //
17610 // Trigger a special error so that it can be handled separately from normal,
17611 // recoverable errors.
17612
17613 this.trigger('appenderror');
17614 };
17615
17616 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
17617 var segmentInfo = _ref7.segmentInfo,
17618 type = _ref7.type,
17619 initSegment = _ref7.initSegment,
17620 data = _ref7.data,
17621 bytes = _ref7.bytes;
17622
17623 // If this is a re-append, bytes were already created and don't need to be recreated
17624 if (!bytes) {
17625 var segments = [data];
17626 var byteLength = data.byteLength;
17627
17628 if (initSegment) {
17629 // if the media initialization segment is changing, append it before the content
17630 // segment
17631 segments.unshift(initSegment);
17632 byteLength += initSegment.byteLength;
17633 } // Technically we should be OK appending the init segment separately, however, we
17634 // haven't yet tested that, and prepending is how we have always done things.
17635
17636
17637 bytes = concatSegments({
17638 bytes: byteLength,
17639 segments: segments
17640 });
17641 }
17642
17643 this.sourceUpdater_.appendBuffer({
17644 segmentInfo: segmentInfo,
17645 type: type,
17646 bytes: bytes
17647 }, this.handleAppendError_.bind(this, {
17648 segmentInfo: segmentInfo,
17649 type: type,
17650 bytes: bytes
17651 }));
17652 };
17653
17654 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
17655 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
17656 return;
17657 }
17658
17659 var segment = this.pendingSegment_.segment;
17660 var timingInfoProperty = type + "TimingInfo";
17661
17662 if (!segment[timingInfoProperty]) {
17663 segment[timingInfoProperty] = {};
17664 }
17665
17666 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
17667 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
17668 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
17669 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
17670 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
17671
17672 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
17673 };
17674
17675 _proto.appendData_ = function appendData_(segmentInfo, result) {
17676 var type = result.type,
17677 data = result.data;
17678
17679 if (!data || !data.byteLength) {
17680 return;
17681 }
17682
17683 if (type === 'audio' && this.audioDisabled_) {
17684 return;
17685 }
17686
17687 var initSegment = this.getInitSegmentAndUpdateState_({
17688 type: type,
17689 initSegment: result.initSegment,
17690 playlist: segmentInfo.playlist,
17691 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
17692 });
17693 this.appendToSourceBuffer_({
17694 segmentInfo: segmentInfo,
17695 type: type,
17696 initSegment: initSegment,
17697 data: data
17698 });
17699 }
17700 /**
17701 * load a specific segment from a request into the buffer
17702 *
17703 * @private
17704 */
17705 ;
17706
17707 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
17708 var _this4 = this;
17709
17710 this.state = 'WAITING';
17711 this.pendingSegment_ = segmentInfo;
17712 this.trimBackBuffer_(segmentInfo);
17713
17714 if (typeof segmentInfo.timestampOffset === 'number') {
17715 if (this.transmuxer_) {
17716 this.transmuxer_.postMessage({
17717 action: 'clearAllMp4Captions'
17718 });
17719 }
17720 }
17721
17722 if (!this.hasEnoughInfoToLoad_()) {
17723 this.loadQueue_.push(function () {
17724 // regenerate the audioAppendStart, timestampOffset, etc as they
17725 // may have changed since this function was added to the queue.
17726 var options = _extends__default["default"]({}, segmentInfo, {
17727 forceTimestampOffset: true
17728 });
17729
17730 _extends__default["default"](segmentInfo, _this4.generateSegmentInfo_(options));
17731
17732 _this4.isPendingTimestampOffset_ = false;
17733
17734 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
17735 });
17736 return;
17737 }
17738
17739 this.updateTransmuxerAndRequestSegment_(segmentInfo);
17740 };
17741
17742 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
17743 var _this5 = this;
17744
17745 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
17746 // the transmuxer still needs to be updated before then.
17747 //
17748 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
17749 // offset must be passed to the transmuxer for stream correcting adjustments.
17750 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
17751 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
17752
17753 segmentInfo.gopsToAlignWith = [];
17754 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
17755
17756 this.transmuxer_.postMessage({
17757 action: 'reset'
17758 });
17759 this.transmuxer_.postMessage({
17760 action: 'setTimestampOffset',
17761 timestampOffset: segmentInfo.timestampOffset
17762 });
17763 }
17764
17765 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
17766 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
17767 var isWalkingForward = this.mediaIndex !== null;
17768 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
17769 // the first timeline
17770 segmentInfo.timeline > 0;
17771 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
17772 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
17773 // then this init segment has never been seen before and should be appended.
17774 //
17775 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
17776 // both to true and leave the decision of whether to append the init segment to append time.
17777
17778 if (simpleSegment.map && !simpleSegment.map.bytes) {
17779 this.logger_('going to request init segment.');
17780 this.appendInitSegment_ = {
17781 video: true,
17782 audio: true
17783 };
17784 }
17785
17786 segmentInfo.abortRequests = mediaSegmentRequest({
17787 xhr: this.vhs_.xhr,
17788 xhrOptions: this.xhrOptions_,
17789 decryptionWorker: this.decrypter_,
17790 segment: simpleSegment,
17791 abortFn: this.handleAbort_.bind(this, segmentInfo),
17792 progressFn: this.handleProgress_.bind(this),
17793 trackInfoFn: this.handleTrackInfo_.bind(this),
17794 timingInfoFn: this.handleTimingInfo_.bind(this),
17795 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
17796 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
17797 captionsFn: this.handleCaptions_.bind(this),
17798 isEndOfTimeline: isEndOfTimeline,
17799 endedTimelineFn: function endedTimelineFn() {
17800 _this5.logger_('received endedtimeline callback');
17801 },
17802 id3Fn: this.handleId3_.bind(this),
17803 dataFn: this.handleData_.bind(this),
17804 doneFn: this.segmentRequestFinished_.bind(this),
17805 onTransmuxerLog: function onTransmuxerLog(_ref8) {
17806 var message = _ref8.message,
17807 level = _ref8.level,
17808 stream = _ref8.stream;
17809
17810 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
17811 }
17812 });
17813 }
17814 /**
17815 * trim the back buffer so that we don't have too much data
17816 * in the source buffer
17817 *
17818 * @private
17819 *
17820 * @param {Object} segmentInfo - the current segment
17821 */
17822 ;
17823
17824 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
17825 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
17826 // buffer and a very conservative "garbage collector"
17827 // We manually clear out the old buffer to ensure
17828 // we don't trigger the QuotaExceeded error
17829 // on the source buffer during subsequent appends
17830
17831 if (removeToTime > 0) {
17832 this.remove(0, removeToTime);
17833 }
17834 }
17835 /**
17836 * created a simplified copy of the segment object with just the
17837 * information necessary to perform the XHR and decryption
17838 *
17839 * @private
17840 *
17841 * @param {Object} segmentInfo - the current segment
17842 * @return {Object} a simplified segment object copy
17843 */
17844 ;
17845
17846 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
17847 var segment = segmentInfo.segment;
17848 var part = segmentInfo.part;
17849 var simpleSegment = {
17850 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
17851 byterange: part ? part.byterange : segment.byterange,
17852 requestId: segmentInfo.requestId,
17853 transmuxer: segmentInfo.transmuxer,
17854 audioAppendStart: segmentInfo.audioAppendStart,
17855 gopsToAlignWith: segmentInfo.gopsToAlignWith,
17856 part: segmentInfo.part
17857 };
17858 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
17859
17860 if (previousSegment && previousSegment.timeline === segment.timeline) {
17861 // The baseStartTime of a segment is used to handle rollover when probing the TS
17862 // segment to retrieve timing information. Since the probe only looks at the media's
17863 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
17864 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
17865 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
17866 // seconds of media time, so should be used here. The previous segment is used since
17867 // the end of the previous segment should represent the beginning of the current
17868 // segment, so long as they are on the same timeline.
17869 if (previousSegment.videoTimingInfo) {
17870 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
17871 } else if (previousSegment.audioTimingInfo) {
17872 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
17873 }
17874 }
17875
17876 if (segment.key) {
17877 // if the media sequence is greater than 2^32, the IV will be incorrect
17878 // assuming 10s segments, that would be about 1300 years
17879 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
17880 simpleSegment.key = this.segmentKey(segment.key);
17881 simpleSegment.key.iv = iv;
17882 }
17883
17884 if (segment.map) {
17885 simpleSegment.map = this.initSegmentForMap(segment.map);
17886 }
17887
17888 return simpleSegment;
17889 };
17890
17891 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
17892 // every request counts as a media request even if it has been aborted
17893 // or canceled due to a timeout
17894 this.mediaRequests += 1;
17895
17896 if (stats) {
17897 this.mediaBytesTransferred += stats.bytesReceived;
17898 this.mediaTransferDuration += stats.roundTripTime;
17899 }
17900 };
17901
17902 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
17903 // byteLength will be used for throughput, and should be based on bytes receieved,
17904 // which we only know at the end of the request and should reflect total bytes
17905 // downloaded rather than just bytes processed from components of the segment
17906 this.pendingSegment_.byteLength = stats.bytesReceived;
17907
17908 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
17909 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
17910 return;
17911 }
17912
17913 this.bandwidth = stats.bandwidth;
17914 this.roundTrip = stats.roundTripTime;
17915 };
17916
17917 _proto.handleTimeout_ = function handleTimeout_() {
17918 // although the VTT segment loader bandwidth isn't really used, it's good to
17919 // maintain functinality between segment loaders
17920 this.mediaRequestsTimedout += 1;
17921 this.bandwidth = 1;
17922 this.roundTrip = NaN;
17923 this.trigger('bandwidthupdate');
17924 this.trigger('timeout');
17925 }
17926 /**
17927 * Handle the callback from the segmentRequest function and set the
17928 * associated SegmentLoader state and errors if necessary
17929 *
17930 * @private
17931 */
17932 ;
17933
17934 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
17935 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
17936 // check the call queue directly since this function doesn't need to deal with any
17937 // data, and can continue even if the source buffers are not set up and we didn't get
17938 // any data from the segment
17939 if (this.callQueue_.length) {
17940 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
17941 return;
17942 }
17943
17944 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
17945
17946 if (!this.pendingSegment_) {
17947 return;
17948 } // the request was aborted and the SegmentLoader has already started
17949 // another request. this can happen when the timeout for an aborted
17950 // request triggers due to a limitation in the XHR library
17951 // do not count this as any sort of request or we risk double-counting
17952
17953
17954 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
17955 return;
17956 } // an error occurred from the active pendingSegment_ so reset everything
17957
17958
17959 if (error) {
17960 this.pendingSegment_ = null;
17961 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
17962
17963 if (error.code === REQUEST_ERRORS.ABORTED) {
17964 return;
17965 }
17966
17967 this.pause(); // the error is really just that at least one of the requests timed-out
17968 // set the bandwidth to a very low value and trigger an ABR switch to
17969 // take emergency action
17970
17971 if (error.code === REQUEST_ERRORS.TIMEOUT) {
17972 this.handleTimeout_();
17973 return;
17974 } // if control-flow has arrived here, then the error is real
17975 // emit an error event to blacklist the current playlist
17976
17977
17978 this.mediaRequestsErrored += 1;
17979 this.error(error);
17980 this.trigger('error');
17981 return;
17982 }
17983
17984 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
17985 // generated for ABR purposes
17986
17987 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
17988 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
17989
17990 if (result.gopInfo) {
17991 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
17992 } // Although we may have already started appending on progress, we shouldn't switch the
17993 // state away from loading until we are officially done loading the segment data.
17994
17995
17996 this.state = 'APPENDING'; // used for testing
17997
17998 this.trigger('appending');
17999 this.waitForAppendsToComplete_(segmentInfo);
18000 };
18001
18002 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
18003 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
18004
18005 if (timelineMapping !== null) {
18006 this.timeMapping_ = timelineMapping;
18007 }
18008 };
18009
18010 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
18011 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
18012 this.mediaSecondsLoaded += segment.end - segment.start;
18013 } else {
18014 this.mediaSecondsLoaded += segment.duration;
18015 }
18016 };
18017
18018 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
18019 if (timestampOffset === null) {
18020 return false;
18021 } // note that we're potentially using the same timestamp offset for both video and
18022 // audio
18023
18024
18025 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18026 return true;
18027 }
18028
18029 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18030 return true;
18031 }
18032
18033 return false;
18034 };
18035
18036 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
18037 var currentStart = _ref9.currentStart,
18038 playlist = _ref9.playlist,
18039 mediaIndex = _ref9.mediaIndex,
18040 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
18041 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
18042 useVideoTimingInfo = _ref9.useVideoTimingInfo,
18043 videoTimingInfo = _ref9.videoTimingInfo,
18044 audioTimingInfo = _ref9.audioTimingInfo;
18045
18046 if (typeof currentStart !== 'undefined') {
18047 // if start was set once, keep using it
18048 return currentStart;
18049 }
18050
18051 if (!useVideoTimingInfo) {
18052 return audioTimingInfo.start;
18053 }
18054
18055 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
18056 // within that segment. Since the transmuxer maintains a cache of incomplete data
18057 // from and/or the last frame seen, the start time may reflect a frame that starts
18058 // in the previous segment. Check for that case and ensure the start time is
18059 // accurate for the segment.
18060
18061 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
18062 return firstVideoFrameTimeForData;
18063 }
18064
18065 return videoTimingInfo.start;
18066 };
18067
18068 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
18069 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
18070
18071 if (!trackInfo) {
18072 this.error({
18073 message: 'No starting media returned, likely due to an unsupported media format.',
18074 blacklistDuration: Infinity
18075 });
18076 this.trigger('error');
18077 return;
18078 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
18079 // on each queue this loader is responsible for to ensure that the appends are
18080 // complete.
18081
18082
18083 var hasAudio = trackInfo.hasAudio,
18084 hasVideo = trackInfo.hasVideo,
18085 isMuxed = trackInfo.isMuxed;
18086 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
18087 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
18088 segmentInfo.waitingOnAppends = 0; // segments with no data
18089
18090 if (!segmentInfo.hasAppendedData_) {
18091 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
18092 // When there's no audio or video data in the segment, there's no audio or video
18093 // timing information.
18094 //
18095 // If there's no audio or video timing information, then the timestamp offset
18096 // can't be adjusted to the appropriate value for the transmuxer and source
18097 // buffers.
18098 //
18099 // Therefore, the next segment should be used to set the timestamp offset.
18100 this.isPendingTimestampOffset_ = true;
18101 } // override settings for metadata only segments
18102
18103
18104 segmentInfo.timingInfo = {
18105 start: 0
18106 };
18107 segmentInfo.waitingOnAppends++;
18108
18109 if (!this.isPendingTimestampOffset_) {
18110 // update the timestampoffset
18111 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
18112 // no video/audio data.
18113
18114 this.processMetadataQueue_();
18115 } // append is "done" instantly with no data.
18116
18117
18118 this.checkAppendsDone_(segmentInfo);
18119 return;
18120 } // Since source updater could call back synchronously, do the increments first.
18121
18122
18123 if (waitForVideo) {
18124 segmentInfo.waitingOnAppends++;
18125 }
18126
18127 if (waitForAudio) {
18128 segmentInfo.waitingOnAppends++;
18129 }
18130
18131 if (waitForVideo) {
18132 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
18133 }
18134
18135 if (waitForAudio) {
18136 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
18137 }
18138 };
18139
18140 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
18141 if (this.checkForAbort_(segmentInfo.requestId)) {
18142 return;
18143 }
18144
18145 segmentInfo.waitingOnAppends--;
18146
18147 if (segmentInfo.waitingOnAppends === 0) {
18148 this.handleAppendsDone_();
18149 }
18150 };
18151
18152 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
18153 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
18154
18155 if (illegalMediaSwitchError) {
18156 this.error({
18157 message: illegalMediaSwitchError,
18158 blacklistDuration: Infinity
18159 });
18160 this.trigger('error');
18161 return true;
18162 }
18163
18164 return false;
18165 };
18166
18167 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
18168 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
18169 // priority, timing-wise, so we must wait
18170 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
18171 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
18172 this.loaderType_ !== 'main') {
18173 return;
18174 }
18175
18176 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
18177 // the timing info here comes from video. In the event that the audio is longer than
18178 // the video, this will trim the start of the audio.
18179 // This also trims any offset from 0 at the beginning of the media
18180
18181 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
18182 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
18183 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
18184 timingInfo: segmentInfo.timingInfo
18185 }); // In the event that there are part segment downloads, each will try to update the
18186 // timestamp offset. Retaining this bit of state prevents us from updating in the
18187 // future (within the same segment), however, there may be a better way to handle it.
18188
18189 segmentInfo.changedTimestampOffset = true;
18190
18191 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18192 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
18193 didChange = true;
18194 }
18195
18196 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18197 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
18198 didChange = true;
18199 }
18200
18201 if (didChange) {
18202 this.trigger('timestampoffset');
18203 }
18204 };
18205
18206 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
18207 var videoTimingInfo = _ref10.videoTimingInfo,
18208 audioTimingInfo = _ref10.audioTimingInfo,
18209 timingInfo = _ref10.timingInfo;
18210
18211 if (!this.useDtsForTimestampOffset_) {
18212 return timingInfo.start;
18213 }
18214
18215 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
18216 return videoTimingInfo.transmuxedDecodeStart;
18217 } // handle audio only
18218
18219
18220 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
18221 return audioTimingInfo.transmuxedDecodeStart;
18222 } // handle content not transmuxed (e.g., MP4)
18223
18224
18225 return timingInfo.start;
18226 };
18227
18228 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
18229 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
18230 var trackInfo = this.getMediaInfo_();
18231 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
18232 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
18233
18234 if (!prioritizedTimingInfo) {
18235 return;
18236 }
18237
18238 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
18239 // current example is the case of fmp4), so use the rough duration to calculate an
18240 // end time.
18241 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
18242 }
18243 /**
18244 * callback to run when appendBuffer is finished. detects if we are
18245 * in a good state to do things with the data we got, or if we need
18246 * to wait for more
18247 *
18248 * @private
18249 */
18250 ;
18251
18252 _proto.handleAppendsDone_ = function handleAppendsDone_() {
18253 // appendsdone can cause an abort
18254 if (this.pendingSegment_) {
18255 this.trigger('appendsdone');
18256 }
18257
18258 if (!this.pendingSegment_) {
18259 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
18260 // all appending cases?
18261
18262 if (!this.paused()) {
18263 this.monitorBuffer_();
18264 }
18265
18266 return;
18267 }
18268
18269 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
18270 // best to wait until all appends are done so we're sure that the primary media is
18271 // finished (and we have its end time).
18272
18273 this.updateTimingInfoEnd_(segmentInfo);
18274
18275 if (this.shouldSaveSegmentTimingInfo_) {
18276 // Timeline mappings should only be saved for the main loader. This is for multiple
18277 // reasons:
18278 //
18279 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
18280 // and the main loader try to save the timeline mapping, whichever comes later
18281 // will overwrite the first. In theory this is OK, as the mappings should be the
18282 // same, however, it breaks for (2)
18283 // 2) In the event of a live stream, the initial live point will make for a somewhat
18284 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
18285 // the mapping will be off for one of the streams, dependent on which one was
18286 // first saved (see (1)).
18287 // 3) Primary timing goes by video in VHS, so the mapping should be video.
18288 //
18289 // Since the audio loader will wait for the main loader to load the first segment,
18290 // the main loader will save the first timeline mapping, and ensure that there won't
18291 // be a case where audio loads two segments without saving a mapping (thus leading
18292 // to missing segment timing info).
18293 this.syncController_.saveSegmentTimingInfo({
18294 segmentInfo: segmentInfo,
18295 shouldSaveTimelineMapping: this.loaderType_ === 'main'
18296 });
18297 }
18298
18299 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
18300
18301 if (segmentDurationMessage) {
18302 if (segmentDurationMessage.severity === 'warn') {
18303 videojs__default["default"].log.warn(segmentDurationMessage.message);
18304 } else {
18305 this.logger_(segmentDurationMessage.message);
18306 }
18307 }
18308
18309 this.recordThroughput_(segmentInfo);
18310 this.pendingSegment_ = null;
18311 this.state = 'READY';
18312
18313 if (segmentInfo.isSyncRequest) {
18314 this.trigger('syncinfoupdate'); // if the sync request was not appended
18315 // then it was not the correct segment.
18316 // throw it away and use the data it gave us
18317 // to get the correct one.
18318
18319 if (!segmentInfo.hasAppendedData_) {
18320 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
18321 return;
18322 }
18323 }
18324
18325 this.logger_("Appended " + segmentInfoString(segmentInfo));
18326 this.addSegmentMetadataCue_(segmentInfo);
18327 this.fetchAtBuffer_ = true;
18328
18329 if (this.currentTimeline_ !== segmentInfo.timeline) {
18330 this.timelineChangeController_.lastTimelineChange({
18331 type: this.loaderType_,
18332 from: this.currentTimeline_,
18333 to: segmentInfo.timeline
18334 }); // If audio is not disabled, the main segment loader is responsible for updating
18335 // the audio timeline as well. If the content is video only, this won't have any
18336 // impact.
18337
18338 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
18339 this.timelineChangeController_.lastTimelineChange({
18340 type: 'audio',
18341 from: this.currentTimeline_,
18342 to: segmentInfo.timeline
18343 });
18344 }
18345 }
18346
18347 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
18348 // the following conditional otherwise it may consider this a bad "guess"
18349 // and attempt to resync when the post-update seekable window and live
18350 // point would mean that this was the perfect segment to fetch
18351
18352 this.trigger('syncinfoupdate');
18353 var segment = segmentInfo.segment;
18354 var part = segmentInfo.part;
18355 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
18356 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
18357 // the currentTime_ that means that our conservative guess was too conservative.
18358 // In that case, reset the loader state so that we try to use any information gained
18359 // from the previous request to create a new, more accurate, sync-point.
18360
18361 if (badSegmentGuess || badPartGuess) {
18362 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
18363 this.resetEverything();
18364 return;
18365 }
18366
18367 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
18368 // and conservatively guess
18369
18370 if (isWalkingForward) {
18371 this.trigger('bandwidthupdate');
18372 }
18373
18374 this.trigger('progress');
18375 this.mediaIndex = segmentInfo.mediaIndex;
18376 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
18377 // buffer, end the stream. this ensures the "ended" event will
18378 // fire if playback reaches that point.
18379
18380 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
18381 this.endOfStream();
18382 } // used for testing
18383
18384
18385 this.trigger('appended');
18386
18387 if (segmentInfo.hasAppendedData_) {
18388 this.mediaAppends++;
18389 }
18390
18391 if (!this.paused()) {
18392 this.monitorBuffer_();
18393 }
18394 }
18395 /**
18396 * Records the current throughput of the decrypt, transmux, and append
18397 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
18398 * moving average of the throughput. `throughput.count` is the number of
18399 * data points in the average.
18400 *
18401 * @private
18402 * @param {Object} segmentInfo the object returned by loadSegment
18403 */
18404 ;
18405
18406 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
18407 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
18408 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
18409 return;
18410 }
18411
18412 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
18413 // by zero in the case where the throughput is ridiculously high
18414
18415 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
18416
18417 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
18418 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
18419
18420 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
18421 }
18422 /**
18423 * Adds a cue to the segment-metadata track with some metadata information about the
18424 * segment
18425 *
18426 * @private
18427 * @param {Object} segmentInfo
18428 * the object returned by loadSegment
18429 * @method addSegmentMetadataCue_
18430 */
18431 ;
18432
18433 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
18434 if (!this.segmentMetadataTrack_) {
18435 return;
18436 }
18437
18438 var segment = segmentInfo.segment;
18439 var start = segment.start;
18440 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
18441
18442 if (!finite(start) || !finite(end)) {
18443 return;
18444 }
18445
18446 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
18447 var Cue = window__default["default"].WebKitDataCue || window__default["default"].VTTCue;
18448 var value = {
18449 custom: segment.custom,
18450 dateTimeObject: segment.dateTimeObject,
18451 dateTimeString: segment.dateTimeString,
18452 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
18453 resolution: segmentInfo.playlist.attributes.RESOLUTION,
18454 codecs: segmentInfo.playlist.attributes.CODECS,
18455 byteLength: segmentInfo.byteLength,
18456 uri: segmentInfo.uri,
18457 timeline: segmentInfo.timeline,
18458 playlist: segmentInfo.playlist.id,
18459 start: start,
18460 end: end
18461 };
18462 var data = JSON.stringify(value);
18463 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
18464 // the differences of WebKitDataCue in safari and VTTCue in other browsers
18465
18466 cue.value = value;
18467 this.segmentMetadataTrack_.addCue(cue);
18468 };
18469
18470 return SegmentLoader;
18471}(videojs__default["default"].EventTarget);
18472
18473function noop() {}
18474
18475var toTitleCase = function toTitleCase(string) {
18476 if (typeof string !== 'string') {
18477 return string;
18478 }
18479
18480 return string.replace(/./, function (w) {
18481 return w.toUpperCase();
18482 });
18483};
18484
18485var bufferTypes = ['video', 'audio'];
18486
18487var _updating = function updating(type, sourceUpdater) {
18488 var sourceBuffer = sourceUpdater[type + "Buffer"];
18489 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
18490};
18491
18492var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
18493 for (var i = 0; i < queue.length; i++) {
18494 var queueEntry = queue[i];
18495
18496 if (queueEntry.type === 'mediaSource') {
18497 // If the next entry is a media source entry (uses multiple source buffers), block
18498 // processing to allow it to go through first.
18499 return null;
18500 }
18501
18502 if (queueEntry.type === type) {
18503 return i;
18504 }
18505 }
18506
18507 return null;
18508};
18509
18510var shiftQueue = function shiftQueue(type, sourceUpdater) {
18511 if (sourceUpdater.queue.length === 0) {
18512 return;
18513 }
18514
18515 var queueIndex = 0;
18516 var queueEntry = sourceUpdater.queue[queueIndex];
18517
18518 if (queueEntry.type === 'mediaSource') {
18519 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
18520 sourceUpdater.queue.shift();
18521 queueEntry.action(sourceUpdater);
18522
18523 if (queueEntry.doneFn) {
18524 queueEntry.doneFn();
18525 } // Only specific source buffer actions must wait for async updateend events. Media
18526 // Source actions process synchronously. Therefore, both audio and video source
18527 // buffers are now clear to process the next queue entries.
18528
18529
18530 shiftQueue('audio', sourceUpdater);
18531 shiftQueue('video', sourceUpdater);
18532 } // Media Source actions require both source buffers, so if the media source action
18533 // couldn't process yet (because one or both source buffers are busy), block other
18534 // queue actions until both are available and the media source action can process.
18535
18536
18537 return;
18538 }
18539
18540 if (type === 'mediaSource') {
18541 // If the queue was shifted by a media source action (this happens when pushing a
18542 // media source action onto the queue), then it wasn't from an updateend event from an
18543 // audio or video source buffer, so there's no change from previous state, and no
18544 // processing should be done.
18545 return;
18546 } // Media source queue entries don't need to consider whether the source updater is
18547 // started (i.e., source buffers are created) as they don't need the source buffers, but
18548 // source buffer queue entries do.
18549
18550
18551 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
18552 return;
18553 }
18554
18555 if (queueEntry.type !== type) {
18556 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
18557
18558 if (queueIndex === null) {
18559 // Either there's no queue entry that uses this source buffer type in the queue, or
18560 // there's a media source queue entry before the next entry of this type, in which
18561 // case wait for that action to process first.
18562 return;
18563 }
18564
18565 queueEntry = sourceUpdater.queue[queueIndex];
18566 }
18567
18568 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
18569 //
18570 // The queue pending operation must be set before the action is performed in the event
18571 // that the action results in a synchronous event that is acted upon. For instance, if
18572 // an exception is thrown that can be handled, it's possible that new actions will be
18573 // appended to an empty queue and immediately executed, but would not have the correct
18574 // pending information if this property was set after the action was performed.
18575
18576 sourceUpdater.queuePending[type] = queueEntry;
18577 queueEntry.action(type, sourceUpdater);
18578
18579 if (!queueEntry.doneFn) {
18580 // synchronous operation, process next entry
18581 sourceUpdater.queuePending[type] = null;
18582 shiftQueue(type, sourceUpdater);
18583 return;
18584 }
18585};
18586
18587var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
18588 var buffer = sourceUpdater[type + "Buffer"];
18589 var titleType = toTitleCase(type);
18590
18591 if (!buffer) {
18592 return;
18593 }
18594
18595 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18596 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18597 sourceUpdater.codecs[type] = null;
18598 sourceUpdater[type + "Buffer"] = null;
18599};
18600
18601var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
18602 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
18603};
18604
18605var actions = {
18606 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
18607 return function (type, sourceUpdater) {
18608 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18609 // or the media source does not contain this source buffer.
18610
18611 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18612 return;
18613 }
18614
18615 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
18616
18617 try {
18618 sourceBuffer.appendBuffer(bytes);
18619 } catch (e) {
18620 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
18621 sourceUpdater.queuePending[type] = null;
18622 onError(e);
18623 }
18624 };
18625 },
18626 remove: function remove(start, end) {
18627 return function (type, sourceUpdater) {
18628 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18629 // or the media source does not contain this source buffer.
18630
18631 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18632 return;
18633 }
18634
18635 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
18636
18637 try {
18638 sourceBuffer.remove(start, end);
18639 } catch (e) {
18640 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
18641 }
18642 };
18643 },
18644 timestampOffset: function timestampOffset(offset) {
18645 return function (type, sourceUpdater) {
18646 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18647 // or the media source does not contain this source buffer.
18648
18649 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18650 return;
18651 }
18652
18653 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
18654 sourceBuffer.timestampOffset = offset;
18655 };
18656 },
18657 callback: function callback(_callback) {
18658 return function (type, sourceUpdater) {
18659 _callback();
18660 };
18661 },
18662 endOfStream: function endOfStream(error) {
18663 return function (sourceUpdater) {
18664 if (sourceUpdater.mediaSource.readyState !== 'open') {
18665 return;
18666 }
18667
18668 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
18669
18670 try {
18671 sourceUpdater.mediaSource.endOfStream(error);
18672 } catch (e) {
18673 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
18674 }
18675 };
18676 },
18677 duration: function duration(_duration) {
18678 return function (sourceUpdater) {
18679 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
18680
18681 try {
18682 sourceUpdater.mediaSource.duration = _duration;
18683 } catch (e) {
18684 videojs__default["default"].log.warn('Failed to set media source duration', e);
18685 }
18686 };
18687 },
18688 abort: function abort() {
18689 return function (type, sourceUpdater) {
18690 if (sourceUpdater.mediaSource.readyState !== 'open') {
18691 return;
18692 }
18693
18694 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18695 // or the media source does not contain this source buffer.
18696
18697 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18698 return;
18699 }
18700
18701 sourceUpdater.logger_("calling abort on " + type + "Buffer");
18702
18703 try {
18704 sourceBuffer.abort();
18705 } catch (e) {
18706 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
18707 }
18708 };
18709 },
18710 addSourceBuffer: function addSourceBuffer(type, codec) {
18711 return function (sourceUpdater) {
18712 var titleType = toTitleCase(type);
18713 var mime = codecs_js.getMimeForCodec(codec);
18714 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
18715 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
18716 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18717 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18718 sourceUpdater.codecs[type] = codec;
18719 sourceUpdater[type + "Buffer"] = sourceBuffer;
18720 };
18721 },
18722 removeSourceBuffer: function removeSourceBuffer(type) {
18723 return function (sourceUpdater) {
18724 var sourceBuffer = sourceUpdater[type + "Buffer"];
18725 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
18726 // or the media source does not contain this source buffer.
18727
18728 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18729 return;
18730 }
18731
18732 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
18733
18734 try {
18735 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
18736 } catch (e) {
18737 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
18738 }
18739 };
18740 },
18741 changeType: function changeType(codec) {
18742 return function (type, sourceUpdater) {
18743 var sourceBuffer = sourceUpdater[type + "Buffer"];
18744 var mime = codecs_js.getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
18745 // or the media source does not contain this source buffer.
18746
18747 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18748 return;
18749 } // do not update codec if we don't need to.
18750
18751
18752 if (sourceUpdater.codecs[type] === codec) {
18753 return;
18754 }
18755
18756 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
18757 sourceBuffer.changeType(mime);
18758 sourceUpdater.codecs[type] = codec;
18759 };
18760 }
18761};
18762
18763var pushQueue = function pushQueue(_ref) {
18764 var type = _ref.type,
18765 sourceUpdater = _ref.sourceUpdater,
18766 action = _ref.action,
18767 doneFn = _ref.doneFn,
18768 name = _ref.name;
18769 sourceUpdater.queue.push({
18770 type: type,
18771 action: action,
18772 doneFn: doneFn,
18773 name: name
18774 });
18775 shiftQueue(type, sourceUpdater);
18776};
18777
18778var onUpdateend = function onUpdateend(type, sourceUpdater) {
18779 return function (e) {
18780 // Although there should, in theory, be a pending action for any updateend receieved,
18781 // there are some actions that may trigger updateend events without set definitions in
18782 // the w3c spec. For instance, setting the duration on the media source may trigger
18783 // updateend events on source buffers. This does not appear to be in the spec. As such,
18784 // if we encounter an updateend without a corresponding pending action from our queue
18785 // for that source buffer type, process the next action.
18786 if (sourceUpdater.queuePending[type]) {
18787 var doneFn = sourceUpdater.queuePending[type].doneFn;
18788 sourceUpdater.queuePending[type] = null;
18789
18790 if (doneFn) {
18791 // if there's an error, report it
18792 doneFn(sourceUpdater[type + "Error_"]);
18793 }
18794 }
18795
18796 shiftQueue(type, sourceUpdater);
18797 };
18798};
18799/**
18800 * A queue of callbacks to be serialized and applied when a
18801 * MediaSource and its associated SourceBuffers are not in the
18802 * updating state. It is used by the segment loader to update the
18803 * underlying SourceBuffers when new data is loaded, for instance.
18804 *
18805 * @class SourceUpdater
18806 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
18807 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
18808 */
18809
18810
18811var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
18812 _inheritsLoose__default["default"](SourceUpdater, _videojs$EventTarget);
18813
18814 function SourceUpdater(mediaSource) {
18815 var _this;
18816
18817 _this = _videojs$EventTarget.call(this) || this;
18818 _this.mediaSource = mediaSource;
18819
18820 _this.sourceopenListener_ = function () {
18821 return shiftQueue('mediaSource', _assertThisInitialized__default["default"](_this));
18822 };
18823
18824 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
18825
18826 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
18827
18828 _this.audioTimestampOffset_ = 0;
18829 _this.videoTimestampOffset_ = 0;
18830 _this.queue = [];
18831 _this.queuePending = {
18832 audio: null,
18833 video: null
18834 };
18835 _this.delayedAudioAppendQueue_ = [];
18836 _this.videoAppendQueued_ = false;
18837 _this.codecs = {};
18838 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized__default["default"](_this));
18839 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized__default["default"](_this));
18840
18841 _this.onVideoError_ = function (e) {
18842 // used for debugging
18843 _this.videoError_ = e;
18844 };
18845
18846 _this.onAudioError_ = function (e) {
18847 // used for debugging
18848 _this.audioError_ = e;
18849 };
18850
18851 _this.createdSourceBuffers_ = false;
18852 _this.initializedEme_ = false;
18853 _this.triggeredReady_ = false;
18854 return _this;
18855 }
18856
18857 var _proto = SourceUpdater.prototype;
18858
18859 _proto.initializedEme = function initializedEme() {
18860 this.initializedEme_ = true;
18861 this.triggerReady();
18862 };
18863
18864 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
18865 // if false, likely waiting on one of the segment loaders to get enough data to create
18866 // source buffers
18867 return this.createdSourceBuffers_;
18868 };
18869
18870 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
18871 return this.initializedEme_;
18872 };
18873
18874 _proto.ready = function ready() {
18875 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
18876 };
18877
18878 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
18879 if (this.hasCreatedSourceBuffers()) {
18880 // already created them before
18881 return;
18882 } // the intial addOrChangeSourceBuffers will always be
18883 // two add buffers.
18884
18885
18886 this.addOrChangeSourceBuffers(codecs);
18887 this.createdSourceBuffers_ = true;
18888 this.trigger('createdsourcebuffers');
18889 this.triggerReady();
18890 };
18891
18892 _proto.triggerReady = function triggerReady() {
18893 // only allow ready to be triggered once, this prevents the case
18894 // where:
18895 // 1. we trigger createdsourcebuffers
18896 // 2. ie 11 synchronously initializates eme
18897 // 3. the synchronous initialization causes us to trigger ready
18898 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
18899 if (this.ready() && !this.triggeredReady_) {
18900 this.triggeredReady_ = true;
18901 this.trigger('ready');
18902 }
18903 }
18904 /**
18905 * Add a type of source buffer to the media source.
18906 *
18907 * @param {string} type
18908 * The type of source buffer to add.
18909 *
18910 * @param {string} codec
18911 * The codec to add the source buffer with.
18912 */
18913 ;
18914
18915 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
18916 pushQueue({
18917 type: 'mediaSource',
18918 sourceUpdater: this,
18919 action: actions.addSourceBuffer(type, codec),
18920 name: 'addSourceBuffer'
18921 });
18922 }
18923 /**
18924 * call abort on a source buffer.
18925 *
18926 * @param {string} type
18927 * The type of source buffer to call abort on.
18928 */
18929 ;
18930
18931 _proto.abort = function abort(type) {
18932 pushQueue({
18933 type: type,
18934 sourceUpdater: this,
18935 action: actions.abort(type),
18936 name: 'abort'
18937 });
18938 }
18939 /**
18940 * Call removeSourceBuffer and remove a specific type
18941 * of source buffer on the mediaSource.
18942 *
18943 * @param {string} type
18944 * The type of source buffer to remove.
18945 */
18946 ;
18947
18948 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
18949 if (!this.canRemoveSourceBuffer()) {
18950 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
18951 return;
18952 }
18953
18954 pushQueue({
18955 type: 'mediaSource',
18956 sourceUpdater: this,
18957 action: actions.removeSourceBuffer(type),
18958 name: 'removeSourceBuffer'
18959 });
18960 }
18961 /**
18962 * Whether or not the removeSourceBuffer function is supported
18963 * on the mediaSource.
18964 *
18965 * @return {boolean}
18966 * if removeSourceBuffer can be called.
18967 */
18968 ;
18969
18970 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
18971 // IE reports that it supports removeSourceBuffer, but often throws
18972 // errors when attempting to use the function. So we report that it
18973 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
18974 // throws errors, so we report that it does not support this as well.
18975 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window__default["default"].MediaSource && window__default["default"].MediaSource.prototype && typeof window__default["default"].MediaSource.prototype.removeSourceBuffer === 'function';
18976 }
18977 /**
18978 * Whether or not the changeType function is supported
18979 * on our SourceBuffers.
18980 *
18981 * @return {boolean}
18982 * if changeType can be called.
18983 */
18984 ;
18985
18986 SourceUpdater.canChangeType = function canChangeType() {
18987 return window__default["default"].SourceBuffer && window__default["default"].SourceBuffer.prototype && typeof window__default["default"].SourceBuffer.prototype.changeType === 'function';
18988 }
18989 /**
18990 * Whether or not the changeType function is supported
18991 * on our SourceBuffers.
18992 *
18993 * @return {boolean}
18994 * if changeType can be called.
18995 */
18996 ;
18997
18998 _proto.canChangeType = function canChangeType() {
18999 return this.constructor.canChangeType();
19000 }
19001 /**
19002 * Call the changeType function on a source buffer, given the code and type.
19003 *
19004 * @param {string} type
19005 * The type of source buffer to call changeType on.
19006 *
19007 * @param {string} codec
19008 * The codec string to change type with on the source buffer.
19009 */
19010 ;
19011
19012 _proto.changeType = function changeType(type, codec) {
19013 if (!this.canChangeType()) {
19014 videojs__default["default"].log.error('changeType is not supported!');
19015 return;
19016 }
19017
19018 pushQueue({
19019 type: type,
19020 sourceUpdater: this,
19021 action: actions.changeType(codec),
19022 name: 'changeType'
19023 });
19024 }
19025 /**
19026 * Add source buffers with a codec or, if they are already created,
19027 * call changeType on source buffers using changeType.
19028 *
19029 * @param {Object} codecs
19030 * Codecs to switch to
19031 */
19032 ;
19033
19034 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
19035 var _this2 = this;
19036
19037 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
19038 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
19039 }
19040
19041 Object.keys(codecs).forEach(function (type) {
19042 var codec = codecs[type];
19043
19044 if (!_this2.hasCreatedSourceBuffers()) {
19045 return _this2.addSourceBuffer(type, codec);
19046 }
19047
19048 if (_this2.canChangeType()) {
19049 _this2.changeType(type, codec);
19050 }
19051 });
19052 }
19053 /**
19054 * Queue an update to append an ArrayBuffer.
19055 *
19056 * @param {MediaObject} object containing audioBytes and/or videoBytes
19057 * @param {Function} done the function to call when done
19058 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
19059 */
19060 ;
19061
19062 _proto.appendBuffer = function appendBuffer(options, doneFn) {
19063 var _this3 = this;
19064
19065 var segmentInfo = options.segmentInfo,
19066 type = options.type,
19067 bytes = options.bytes;
19068 this.processedAppend_ = true;
19069
19070 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
19071 this.delayedAudioAppendQueue_.push([options, doneFn]);
19072 this.logger_("delayed audio append of " + bytes.length + " until video append");
19073 return;
19074 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
19075 // not be fired. This means that the queue will be blocked until the next action
19076 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
19077 // these errors by calling the doneFn with the specific error.
19078
19079
19080 var onError = doneFn;
19081 pushQueue({
19082 type: type,
19083 sourceUpdater: this,
19084 action: actions.appendBuffer(bytes, segmentInfo || {
19085 mediaIndex: -1
19086 }, onError),
19087 doneFn: doneFn,
19088 name: 'appendBuffer'
19089 });
19090
19091 if (type === 'video') {
19092 this.videoAppendQueued_ = true;
19093
19094 if (!this.delayedAudioAppendQueue_.length) {
19095 return;
19096 }
19097
19098 var queue = this.delayedAudioAppendQueue_.slice();
19099 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
19100 this.delayedAudioAppendQueue_.length = 0;
19101 queue.forEach(function (que) {
19102 _this3.appendBuffer.apply(_this3, que);
19103 });
19104 }
19105 }
19106 /**
19107 * Get the audio buffer's buffered timerange.
19108 *
19109 * @return {TimeRange}
19110 * The audio buffer's buffered time range
19111 */
19112 ;
19113
19114 _proto.audioBuffered = function audioBuffered() {
19115 // no media source/source buffer or it isn't in the media sources
19116 // source buffer list
19117 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
19118 return videojs__default["default"].createTimeRange();
19119 }
19120
19121 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
19122 }
19123 /**
19124 * Get the video buffer's buffered timerange.
19125 *
19126 * @return {TimeRange}
19127 * The video buffer's buffered time range
19128 */
19129 ;
19130
19131 _proto.videoBuffered = function videoBuffered() {
19132 // no media source/source buffer or it isn't in the media sources
19133 // source buffer list
19134 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
19135 return videojs__default["default"].createTimeRange();
19136 }
19137
19138 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
19139 }
19140 /**
19141 * Get a combined video/audio buffer's buffered timerange.
19142 *
19143 * @return {TimeRange}
19144 * the combined time range
19145 */
19146 ;
19147
19148 _proto.buffered = function buffered() {
19149 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
19150 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
19151
19152 if (audio && !video) {
19153 return this.audioBuffered();
19154 }
19155
19156 if (video && !audio) {
19157 return this.videoBuffered();
19158 }
19159
19160 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
19161 }
19162 /**
19163 * Add a callback to the queue that will set duration on the mediaSource.
19164 *
19165 * @param {number} duration
19166 * The duration to set
19167 *
19168 * @param {Function} [doneFn]
19169 * function to run after duration has been set.
19170 */
19171 ;
19172
19173 _proto.setDuration = function setDuration(duration, doneFn) {
19174 if (doneFn === void 0) {
19175 doneFn = noop;
19176 }
19177
19178 // In order to set the duration on the media source, it's necessary to wait for all
19179 // source buffers to no longer be updating. "If the updating attribute equals true on
19180 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19181 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19182 pushQueue({
19183 type: 'mediaSource',
19184 sourceUpdater: this,
19185 action: actions.duration(duration),
19186 name: 'duration',
19187 doneFn: doneFn
19188 });
19189 }
19190 /**
19191 * Add a mediaSource endOfStream call to the queue
19192 *
19193 * @param {Error} [error]
19194 * Call endOfStream with an error
19195 *
19196 * @param {Function} [doneFn]
19197 * A function that should be called when the
19198 * endOfStream call has finished.
19199 */
19200 ;
19201
19202 _proto.endOfStream = function endOfStream(error, doneFn) {
19203 if (error === void 0) {
19204 error = null;
19205 }
19206
19207 if (doneFn === void 0) {
19208 doneFn = noop;
19209 }
19210
19211 if (typeof error !== 'string') {
19212 error = undefined;
19213 } // In order to set the duration on the media source, it's necessary to wait for all
19214 // source buffers to no longer be updating. "If the updating attribute equals true on
19215 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19216 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19217
19218
19219 pushQueue({
19220 type: 'mediaSource',
19221 sourceUpdater: this,
19222 action: actions.endOfStream(error),
19223 name: 'endOfStream',
19224 doneFn: doneFn
19225 });
19226 }
19227 /**
19228 * Queue an update to remove a time range from the buffer.
19229 *
19230 * @param {number} start where to start the removal
19231 * @param {number} end where to end the removal
19232 * @param {Function} [done=noop] optional callback to be executed when the remove
19233 * operation is complete
19234 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19235 */
19236 ;
19237
19238 _proto.removeAudio = function removeAudio(start, end, done) {
19239 if (done === void 0) {
19240 done = noop;
19241 }
19242
19243 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
19244 done();
19245 return;
19246 }
19247
19248 pushQueue({
19249 type: 'audio',
19250 sourceUpdater: this,
19251 action: actions.remove(start, end),
19252 doneFn: done,
19253 name: 'remove'
19254 });
19255 }
19256 /**
19257 * Queue an update to remove a time range from the buffer.
19258 *
19259 * @param {number} start where to start the removal
19260 * @param {number} end where to end the removal
19261 * @param {Function} [done=noop] optional callback to be executed when the remove
19262 * operation is complete
19263 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19264 */
19265 ;
19266
19267 _proto.removeVideo = function removeVideo(start, end, done) {
19268 if (done === void 0) {
19269 done = noop;
19270 }
19271
19272 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
19273 done();
19274 return;
19275 }
19276
19277 pushQueue({
19278 type: 'video',
19279 sourceUpdater: this,
19280 action: actions.remove(start, end),
19281 doneFn: done,
19282 name: 'remove'
19283 });
19284 }
19285 /**
19286 * Whether the underlying sourceBuffer is updating or not
19287 *
19288 * @return {boolean} the updating status of the SourceBuffer
19289 */
19290 ;
19291
19292 _proto.updating = function updating() {
19293 // the audio/video source buffer is updating
19294 if (_updating('audio', this) || _updating('video', this)) {
19295 return true;
19296 }
19297
19298 return false;
19299 }
19300 /**
19301 * Set/get the timestampoffset on the audio SourceBuffer
19302 *
19303 * @return {number} the timestamp offset
19304 */
19305 ;
19306
19307 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
19308 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
19309 this.audioTimestampOffset_ !== offset) {
19310 pushQueue({
19311 type: 'audio',
19312 sourceUpdater: this,
19313 action: actions.timestampOffset(offset),
19314 name: 'timestampOffset'
19315 });
19316 this.audioTimestampOffset_ = offset;
19317 }
19318
19319 return this.audioTimestampOffset_;
19320 }
19321 /**
19322 * Set/get the timestampoffset on the video SourceBuffer
19323 *
19324 * @return {number} the timestamp offset
19325 */
19326 ;
19327
19328 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
19329 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
19330 this.videoTimestampOffset !== offset) {
19331 pushQueue({
19332 type: 'video',
19333 sourceUpdater: this,
19334 action: actions.timestampOffset(offset),
19335 name: 'timestampOffset'
19336 });
19337 this.videoTimestampOffset_ = offset;
19338 }
19339
19340 return this.videoTimestampOffset_;
19341 }
19342 /**
19343 * Add a function to the queue that will be called
19344 * when it is its turn to run in the audio queue.
19345 *
19346 * @param {Function} callback
19347 * The callback to queue.
19348 */
19349 ;
19350
19351 _proto.audioQueueCallback = function audioQueueCallback(callback) {
19352 if (!this.audioBuffer) {
19353 return;
19354 }
19355
19356 pushQueue({
19357 type: 'audio',
19358 sourceUpdater: this,
19359 action: actions.callback(callback),
19360 name: 'callback'
19361 });
19362 }
19363 /**
19364 * Add a function to the queue that will be called
19365 * when it is its turn to run in the video queue.
19366 *
19367 * @param {Function} callback
19368 * The callback to queue.
19369 */
19370 ;
19371
19372 _proto.videoQueueCallback = function videoQueueCallback(callback) {
19373 if (!this.videoBuffer) {
19374 return;
19375 }
19376
19377 pushQueue({
19378 type: 'video',
19379 sourceUpdater: this,
19380 action: actions.callback(callback),
19381 name: 'callback'
19382 });
19383 }
19384 /**
19385 * dispose of the source updater and the underlying sourceBuffer
19386 */
19387 ;
19388
19389 _proto.dispose = function dispose() {
19390 var _this4 = this;
19391
19392 this.trigger('dispose');
19393 bufferTypes.forEach(function (type) {
19394 _this4.abort(type);
19395
19396 if (_this4.canRemoveSourceBuffer()) {
19397 _this4.removeSourceBuffer(type);
19398 } else {
19399 _this4[type + "QueueCallback"](function () {
19400 return cleanupBuffer(type, _this4);
19401 });
19402 }
19403 });
19404 this.videoAppendQueued_ = false;
19405 this.delayedAudioAppendQueue_.length = 0;
19406
19407 if (this.sourceopenListener_) {
19408 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
19409 }
19410
19411 this.off();
19412 };
19413
19414 return SourceUpdater;
19415}(videojs__default["default"].EventTarget);
19416
19417var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
19418 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
19419};
19420
19421var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
19422 return char.charCodeAt(0);
19423}));
19424
19425var NoVttJsError = /*#__PURE__*/function (_Error) {
19426 _inheritsLoose__default["default"](NoVttJsError, _Error);
19427
19428 function NoVttJsError() {
19429 return _Error.call(this, 'Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.') || this;
19430 }
19431
19432 return NoVttJsError;
19433}( /*#__PURE__*/_wrapNativeSuper__default["default"](Error));
19434/**
19435 * An object that manages segment loading and appending.
19436 *
19437 * @class VTTSegmentLoader
19438 * @param {Object} options required and optional options
19439 * @extends videojs.EventTarget
19440 */
19441
19442
19443var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
19444 _inheritsLoose__default["default"](VTTSegmentLoader, _SegmentLoader);
19445
19446 function VTTSegmentLoader(settings, options) {
19447 var _this;
19448
19449 if (options === void 0) {
19450 options = {};
19451 }
19452
19453 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
19454 // however, VTTSegmentLoader has no need of a media source, so delete the reference
19455
19456 _this.mediaSource_ = null;
19457 _this.subtitlesTrack_ = null;
19458 _this.loaderType_ = 'subtitle';
19459 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
19460 _this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
19461 // the sync controller leads to improper behavior.
19462
19463 _this.shouldSaveSegmentTimingInfo_ = false;
19464 return _this;
19465 }
19466
19467 var _proto = VTTSegmentLoader.prototype;
19468
19469 _proto.createTransmuxer_ = function createTransmuxer_() {
19470 // don't need to transmux any subtitles
19471 return null;
19472 }
19473 /**
19474 * Indicates which time ranges are buffered
19475 *
19476 * @return {TimeRange}
19477 * TimeRange object representing the current buffered ranges
19478 */
19479 ;
19480
19481 _proto.buffered_ = function buffered_() {
19482 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
19483 return videojs__default["default"].createTimeRanges();
19484 }
19485
19486 var cues = this.subtitlesTrack_.cues;
19487 var start = cues[0].startTime;
19488 var end = cues[cues.length - 1].startTime;
19489 return videojs__default["default"].createTimeRanges([[start, end]]);
19490 }
19491 /**
19492 * Gets and sets init segment for the provided map
19493 *
19494 * @param {Object} map
19495 * The map object representing the init segment to get or set
19496 * @param {boolean=} set
19497 * If true, the init segment for the provided map should be saved
19498 * @return {Object}
19499 * map object for desired init segment
19500 */
19501 ;
19502
19503 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
19504 if (set === void 0) {
19505 set = false;
19506 }
19507
19508 if (!map) {
19509 return null;
19510 }
19511
19512 var id = initSegmentId(map);
19513 var storedMap = this.initSegments_[id];
19514
19515 if (set && !storedMap && map.bytes) {
19516 // append WebVTT line terminators to the media initialization segment if it exists
19517 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
19518 // requires two or more WebVTT line terminators between the WebVTT header and the
19519 // rest of the file
19520 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
19521 var combinedSegment = new Uint8Array(combinedByteLength);
19522 combinedSegment.set(map.bytes);
19523 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
19524 this.initSegments_[id] = storedMap = {
19525 resolvedUri: map.resolvedUri,
19526 byterange: map.byterange,
19527 bytes: combinedSegment
19528 };
19529 }
19530
19531 return storedMap || map;
19532 }
19533 /**
19534 * Returns true if all configuration required for loading is present, otherwise false.
19535 *
19536 * @return {boolean} True if the all configuration is ready for loading
19537 * @private
19538 */
19539 ;
19540
19541 _proto.couldBeginLoading_ = function couldBeginLoading_() {
19542 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
19543 }
19544 /**
19545 * Once all the starting parameters have been specified, begin
19546 * operation. This method should only be invoked from the INIT
19547 * state.
19548 *
19549 * @private
19550 */
19551 ;
19552
19553 _proto.init_ = function init_() {
19554 this.state = 'READY';
19555 this.resetEverything();
19556 return this.monitorBuffer_();
19557 }
19558 /**
19559 * Set a subtitle track on the segment loader to add subtitles to
19560 *
19561 * @param {TextTrack=} track
19562 * The text track to add loaded subtitles to
19563 * @return {TextTrack}
19564 * Returns the subtitles track
19565 */
19566 ;
19567
19568 _proto.track = function track(_track) {
19569 if (typeof _track === 'undefined') {
19570 return this.subtitlesTrack_;
19571 }
19572
19573 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
19574 // buffering now
19575
19576 if (this.state === 'INIT' && this.couldBeginLoading_()) {
19577 this.init_();
19578 }
19579
19580 return this.subtitlesTrack_;
19581 }
19582 /**
19583 * Remove any data in the source buffer between start and end times
19584 *
19585 * @param {number} start - the start time of the region to remove from the buffer
19586 * @param {number} end - the end time of the region to remove from the buffer
19587 */
19588 ;
19589
19590 _proto.remove = function remove(start, end) {
19591 removeCuesFromTrack(start, end, this.subtitlesTrack_);
19592 }
19593 /**
19594 * fill the buffer with segements unless the sourceBuffers are
19595 * currently updating
19596 *
19597 * Note: this function should only ever be called by monitorBuffer_
19598 * and never directly
19599 *
19600 * @private
19601 */
19602 ;
19603
19604 _proto.fillBuffer_ = function fillBuffer_() {
19605 var _this2 = this;
19606
19607 // see if we need to begin loading immediately
19608 var segmentInfo = this.chooseNextRequest_();
19609
19610 if (!segmentInfo) {
19611 return;
19612 }
19613
19614 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
19615 // We don't have the timestamp offset that we need to sync subtitles.
19616 // Rerun on a timestamp offset or user interaction.
19617 var checkTimestampOffset = function checkTimestampOffset() {
19618 _this2.state = 'READY';
19619
19620 if (!_this2.paused()) {
19621 // if not paused, queue a buffer check as soon as possible
19622 _this2.monitorBuffer_();
19623 }
19624 };
19625
19626 this.syncController_.one('timestampoffset', checkTimestampOffset);
19627 this.state = 'WAITING_ON_TIMELINE';
19628 return;
19629 }
19630
19631 this.loadSegment_(segmentInfo);
19632 } // never set a timestamp offset for vtt segments.
19633 ;
19634
19635 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
19636 return null;
19637 };
19638
19639 _proto.chooseNextRequest_ = function chooseNextRequest_() {
19640 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
19641 }
19642 /**
19643 * Prevents the segment loader from requesting segments we know contain no subtitles
19644 * by walking forward until we find the next segment that we don't know whether it is
19645 * empty or not.
19646 *
19647 * @param {Object} segmentInfo
19648 * a segment info object that describes the current segment
19649 * @return {Object}
19650 * a segment info object that describes the current segment
19651 */
19652 ;
19653
19654 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
19655 while (segmentInfo && segmentInfo.segment.empty) {
19656 // stop at the last possible segmentInfo
19657 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
19658 segmentInfo = null;
19659 break;
19660 }
19661
19662 segmentInfo = this.generateSegmentInfo_({
19663 playlist: segmentInfo.playlist,
19664 mediaIndex: segmentInfo.mediaIndex + 1,
19665 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
19666 isSyncRequest: segmentInfo.isSyncRequest
19667 });
19668 }
19669
19670 return segmentInfo;
19671 };
19672
19673 _proto.stopForError = function stopForError(error) {
19674 this.error(error);
19675 this.state = 'READY';
19676 this.pause();
19677 this.trigger('error');
19678 }
19679 /**
19680 * append a decrypted segement to the SourceBuffer through a SourceUpdater
19681 *
19682 * @private
19683 */
19684 ;
19685
19686 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
19687 var _this3 = this;
19688
19689 if (!this.subtitlesTrack_) {
19690 this.state = 'READY';
19691 return;
19692 }
19693
19694 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
19695
19696 if (!this.pendingSegment_) {
19697 this.state = 'READY';
19698 this.mediaRequestsAborted += 1;
19699 return;
19700 }
19701
19702 if (error) {
19703 if (error.code === REQUEST_ERRORS.TIMEOUT) {
19704 this.handleTimeout_();
19705 }
19706
19707 if (error.code === REQUEST_ERRORS.ABORTED) {
19708 this.mediaRequestsAborted += 1;
19709 } else {
19710 this.mediaRequestsErrored += 1;
19711 }
19712
19713 this.stopForError(error);
19714 return;
19715 }
19716
19717 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
19718 // maintain functionality between segment loaders
19719
19720 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
19721
19722 if (simpleSegment.key) {
19723 this.segmentKey(simpleSegment.key, true);
19724 }
19725
19726 this.state = 'APPENDING'; // used for tests
19727
19728 this.trigger('appending');
19729 var segment = segmentInfo.segment;
19730
19731 if (segment.map) {
19732 segment.map.bytes = simpleSegment.map.bytes;
19733 }
19734
19735 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
19736
19737 if (typeof window__default["default"].WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
19738 this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
19739 // script will be loaded once but multiple listeners will be added to the queue, which is expected.
19740
19741 this.loadVttJs().then(function () {
19742 return _this3.segmentRequestFinished_(error, simpleSegment, result);
19743 }, function () {
19744 return _this3.stopForError({
19745 message: 'Error loading vtt.js'
19746 });
19747 });
19748 return;
19749 }
19750
19751 segment.requested = true;
19752
19753 try {
19754 this.parseVTTCues_(segmentInfo);
19755 } catch (e) {
19756 this.stopForError({
19757 message: e.message
19758 });
19759 return;
19760 }
19761
19762 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
19763
19764 if (segmentInfo.cues.length) {
19765 segmentInfo.timingInfo = {
19766 start: segmentInfo.cues[0].startTime,
19767 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
19768 };
19769 } else {
19770 segmentInfo.timingInfo = {
19771 start: segmentInfo.startOfSegment,
19772 end: segmentInfo.startOfSegment + segmentInfo.duration
19773 };
19774 }
19775
19776 if (segmentInfo.isSyncRequest) {
19777 this.trigger('syncinfoupdate');
19778 this.pendingSegment_ = null;
19779 this.state = 'READY';
19780 return;
19781 }
19782
19783 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
19784 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
19785 // the subtitle track
19786
19787 segmentInfo.cues.forEach(function (cue) {
19788 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window__default["default"].VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
19789 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
19790 // cues to have identical time-intervals, but if the text is also identical
19791 // we can safely assume it is a duplicate that can be removed (ex. when a cue
19792 // "overlaps" VTT segments)
19793
19794 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
19795 this.handleAppendsDone_();
19796 };
19797
19798 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
19799 // that we do not support here.
19800 };
19801
19802 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
19803 }
19804 /**
19805 * Uses the WebVTT parser to parse the segment response
19806 *
19807 * @throws NoVttJsError
19808 *
19809 * @param {Object} segmentInfo
19810 * a segment info object that describes the current segment
19811 * @private
19812 */
19813 ;
19814
19815 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
19816 var decoder;
19817 var decodeBytesToString = false;
19818
19819 if (typeof window__default["default"].WebVTT !== 'function') {
19820 // caller is responsible for exception handling.
19821 throw new NoVttJsError();
19822 }
19823
19824 if (typeof window__default["default"].TextDecoder === 'function') {
19825 decoder = new window__default["default"].TextDecoder('utf8');
19826 } else {
19827 decoder = window__default["default"].WebVTT.StringDecoder();
19828 decodeBytesToString = true;
19829 }
19830
19831 var parser = new window__default["default"].WebVTT.Parser(window__default["default"], window__default["default"].vttjs, decoder);
19832 segmentInfo.cues = [];
19833 segmentInfo.timestampmap = {
19834 MPEGTS: 0,
19835 LOCAL: 0
19836 };
19837 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
19838
19839 parser.ontimestampmap = function (map) {
19840 segmentInfo.timestampmap = map;
19841 };
19842
19843 parser.onparsingerror = function (error) {
19844 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
19845 };
19846
19847 if (segmentInfo.segment.map) {
19848 var mapData = segmentInfo.segment.map.bytes;
19849
19850 if (decodeBytesToString) {
19851 mapData = uint8ToUtf8(mapData);
19852 }
19853
19854 parser.parse(mapData);
19855 }
19856
19857 var segmentData = segmentInfo.bytes;
19858
19859 if (decodeBytesToString) {
19860 segmentData = uint8ToUtf8(segmentData);
19861 }
19862
19863 parser.parse(segmentData);
19864 parser.flush();
19865 }
19866 /**
19867 * Updates the start and end times of any cues parsed by the WebVTT parser using
19868 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
19869 * from the SyncController
19870 *
19871 * @param {Object} segmentInfo
19872 * a segment info object that describes the current segment
19873 * @param {Object} mappingObj
19874 * object containing a mapping from TS to media time
19875 * @param {Object} playlist
19876 * the playlist object containing the segment
19877 * @private
19878 */
19879 ;
19880
19881 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
19882 var segment = segmentInfo.segment;
19883
19884 if (!mappingObj) {
19885 // If the sync controller does not have a mapping of TS to Media Time for the
19886 // timeline, then we don't have enough information to update the cue
19887 // start/end times
19888 return;
19889 }
19890
19891 if (!segmentInfo.cues.length) {
19892 // If there are no cues, we also do not have enough information to figure out
19893 // segment timing. Mark that the segment contains no cues so we don't re-request
19894 // an empty segment.
19895 segment.empty = true;
19896 return;
19897 }
19898
19899 var timestampmap = segmentInfo.timestampmap;
19900 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
19901 segmentInfo.cues.forEach(function (cue) {
19902 // First convert cue time to TS time using the timestamp-map provided within the vtt
19903 cue.startTime += diff;
19904 cue.endTime += diff;
19905 });
19906
19907 if (!playlist.syncInfo) {
19908 var firstStart = segmentInfo.cues[0].startTime;
19909 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
19910 playlist.syncInfo = {
19911 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
19912 time: Math.min(firstStart, lastStart - segment.duration)
19913 };
19914 }
19915 };
19916
19917 return VTTSegmentLoader;
19918}(SegmentLoader);
19919
19920/**
19921 * @file ad-cue-tags.js
19922 */
19923/**
19924 * Searches for an ad cue that overlaps with the given mediaTime
19925 *
19926 * @param {Object} track
19927 * the track to find the cue for
19928 *
19929 * @param {number} mediaTime
19930 * the time to find the cue at
19931 *
19932 * @return {Object|null}
19933 * the found cue or null
19934 */
19935
19936var findAdCue = function findAdCue(track, mediaTime) {
19937 var cues = track.cues;
19938
19939 for (var i = 0; i < cues.length; i++) {
19940 var cue = cues[i];
19941
19942 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
19943 return cue;
19944 }
19945 }
19946
19947 return null;
19948};
19949var updateAdCues = function updateAdCues(media, track, offset) {
19950 if (offset === void 0) {
19951 offset = 0;
19952 }
19953
19954 if (!media.segments) {
19955 return;
19956 }
19957
19958 var mediaTime = offset;
19959 var cue;
19960
19961 for (var i = 0; i < media.segments.length; i++) {
19962 var segment = media.segments[i];
19963
19964 if (!cue) {
19965 // Since the cues will span for at least the segment duration, adding a fudge
19966 // factor of half segment duration will prevent duplicate cues from being
19967 // created when timing info is not exact (e.g. cue start time initialized
19968 // at 10.006677, but next call mediaTime is 10.003332 )
19969 cue = findAdCue(track, mediaTime + segment.duration / 2);
19970 }
19971
19972 if (cue) {
19973 if ('cueIn' in segment) {
19974 // Found a CUE-IN so end the cue
19975 cue.endTime = mediaTime;
19976 cue.adEndTime = mediaTime;
19977 mediaTime += segment.duration;
19978 cue = null;
19979 continue;
19980 }
19981
19982 if (mediaTime < cue.endTime) {
19983 // Already processed this mediaTime for this cue
19984 mediaTime += segment.duration;
19985 continue;
19986 } // otherwise extend cue until a CUE-IN is found
19987
19988
19989 cue.endTime += segment.duration;
19990 } else {
19991 if ('cueOut' in segment) {
19992 cue = new window__default["default"].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
19993 cue.adStartTime = mediaTime; // Assumes tag format to be
19994 // #EXT-X-CUE-OUT:30
19995
19996 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
19997 track.addCue(cue);
19998 }
19999
20000 if ('cueOutCont' in segment) {
20001 // Entered into the middle of an ad cue
20002 // Assumes tag formate to be
20003 // #EXT-X-CUE-OUT-CONT:10/30
20004 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
20005 adOffset = _segment$cueOutCont$s[0],
20006 adTotal = _segment$cueOutCont$s[1];
20007
20008 cue = new window__default["default"].VTTCue(mediaTime, mediaTime + segment.duration, '');
20009 cue.adStartTime = mediaTime - adOffset;
20010 cue.adEndTime = cue.adStartTime + adTotal;
20011 track.addCue(cue);
20012 }
20013 }
20014
20015 mediaTime += segment.duration;
20016 }
20017};
20018
20019// synchronize expired playlist segments.
20020// the max media sequence diff is 48 hours of live stream
20021// content with two second segments. Anything larger than that
20022// will likely be invalid.
20023
20024var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
20025var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
20026// the equivalence display-time 0 === segment-index 0
20027{
20028 name: 'VOD',
20029 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20030 if (duration !== Infinity) {
20031 var syncPoint = {
20032 time: 0,
20033 segmentIndex: 0,
20034 partIndex: null
20035 };
20036 return syncPoint;
20037 }
20038
20039 return null;
20040 }
20041}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
20042{
20043 name: 'ProgramDateTime',
20044 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20045 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
20046 return null;
20047 }
20048
20049 var syncPoint = null;
20050 var lastDistance = null;
20051 var partsAndSegments = getPartsAndSegments(playlist);
20052 currentTime = currentTime || 0;
20053
20054 for (var i = 0; i < partsAndSegments.length; i++) {
20055 // start from the end and loop backwards for live
20056 // or start from the front and loop forwards for non-live
20057 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
20058 var partAndSegment = partsAndSegments[index];
20059 var segment = partAndSegment.segment;
20060 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
20061
20062 if (!datetimeMapping || !segment.dateTimeObject) {
20063 continue;
20064 }
20065
20066 var segmentTime = segment.dateTimeObject.getTime() / 1000;
20067 var start = segmentTime + datetimeMapping; // take part duration into account.
20068
20069 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
20070 for (var z = 0; z < partAndSegment.partIndex; z++) {
20071 start += segment.parts[z].duration;
20072 }
20073 }
20074
20075 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
20076 // currentTime and can stop looking for better candidates
20077
20078 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
20079 break;
20080 }
20081
20082 lastDistance = distance;
20083 syncPoint = {
20084 time: start,
20085 segmentIndex: partAndSegment.segmentIndex,
20086 partIndex: partAndSegment.partIndex
20087 };
20088 }
20089
20090 return syncPoint;
20091 }
20092}, // Stategy "Segment": We have a known time mapping for a timeline and a
20093// segment in the current timeline with timing data
20094{
20095 name: 'Segment',
20096 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20097 var syncPoint = null;
20098 var lastDistance = null;
20099 currentTime = currentTime || 0;
20100 var partsAndSegments = getPartsAndSegments(playlist);
20101
20102 for (var i = 0; i < partsAndSegments.length; i++) {
20103 // start from the end and loop backwards for live
20104 // or start from the front and loop forwards for non-live
20105 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
20106 var partAndSegment = partsAndSegments[index];
20107 var segment = partAndSegment.segment;
20108 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
20109
20110 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
20111 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
20112 // currentTime and can stop looking for better candidates
20113
20114 if (lastDistance !== null && lastDistance < distance) {
20115 break;
20116 }
20117
20118 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
20119 lastDistance = distance;
20120 syncPoint = {
20121 time: start,
20122 segmentIndex: partAndSegment.segmentIndex,
20123 partIndex: partAndSegment.partIndex
20124 };
20125 }
20126 }
20127 }
20128
20129 return syncPoint;
20130 }
20131}, // Stategy "Discontinuity": We have a discontinuity with a known
20132// display-time
20133{
20134 name: 'Discontinuity',
20135 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20136 var syncPoint = null;
20137 currentTime = currentTime || 0;
20138
20139 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20140 var lastDistance = null;
20141
20142 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20143 var segmentIndex = playlist.discontinuityStarts[i];
20144 var discontinuity = playlist.discontinuitySequence + i + 1;
20145 var discontinuitySync = syncController.discontinuities[discontinuity];
20146
20147 if (discontinuitySync) {
20148 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
20149 // currentTime and can stop looking for better candidates
20150
20151 if (lastDistance !== null && lastDistance < distance) {
20152 break;
20153 }
20154
20155 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
20156 lastDistance = distance;
20157 syncPoint = {
20158 time: discontinuitySync.time,
20159 segmentIndex: segmentIndex,
20160 partIndex: null
20161 };
20162 }
20163 }
20164 }
20165 }
20166
20167 return syncPoint;
20168 }
20169}, // Stategy "Playlist": We have a playlist with a known mapping of
20170// segment index to display time
20171{
20172 name: 'Playlist',
20173 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20174 if (playlist.syncInfo) {
20175 var syncPoint = {
20176 time: playlist.syncInfo.time,
20177 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
20178 partIndex: null
20179 };
20180 return syncPoint;
20181 }
20182
20183 return null;
20184 }
20185}];
20186
20187var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
20188 _inheritsLoose__default["default"](SyncController, _videojs$EventTarget);
20189
20190 function SyncController(options) {
20191 var _this;
20192
20193 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
20194
20195 _this.timelines = [];
20196 _this.discontinuities = [];
20197 _this.timelineToDatetimeMappings = {};
20198 _this.logger_ = logger('SyncController');
20199 return _this;
20200 }
20201 /**
20202 * Find a sync-point for the playlist specified
20203 *
20204 * A sync-point is defined as a known mapping from display-time to
20205 * a segment-index in the current playlist.
20206 *
20207 * @param {Playlist} playlist
20208 * The playlist that needs a sync-point
20209 * @param {number} duration
20210 * Duration of the MediaSource (Infinite if playing a live source)
20211 * @param {number} currentTimeline
20212 * The last timeline from which a segment was loaded
20213 * @return {Object}
20214 * A sync-point object
20215 */
20216
20217
20218 var _proto = SyncController.prototype;
20219
20220 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
20221 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
20222
20223 if (!syncPoints.length) {
20224 // Signal that we need to attempt to get a sync-point manually
20225 // by fetching a segment in the playlist and constructing
20226 // a sync-point from that information
20227 return null;
20228 } // Now find the sync-point that is closest to the currentTime because
20229 // that should result in the most accurate guess about which segment
20230 // to fetch
20231
20232
20233 return this.selectSyncPoint_(syncPoints, {
20234 key: 'time',
20235 value: currentTime
20236 });
20237 }
20238 /**
20239 * Calculate the amount of time that has expired off the playlist during playback
20240 *
20241 * @param {Playlist} playlist
20242 * Playlist object to calculate expired from
20243 * @param {number} duration
20244 * Duration of the MediaSource (Infinity if playling a live source)
20245 * @return {number|null}
20246 * The amount of time that has expired off the playlist during playback. Null
20247 * if no sync-points for the playlist can be found.
20248 */
20249 ;
20250
20251 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
20252 if (!playlist || !playlist.segments) {
20253 return null;
20254 }
20255
20256 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
20257
20258 if (!syncPoints.length) {
20259 return null;
20260 }
20261
20262 var syncPoint = this.selectSyncPoint_(syncPoints, {
20263 key: 'segmentIndex',
20264 value: 0
20265 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
20266 // duration from index 0 to syncPoint.segmentIndex instead of adding.
20267
20268 if (syncPoint.segmentIndex > 0) {
20269 syncPoint.time *= -1;
20270 }
20271
20272 return Math.abs(syncPoint.time + sumDurations({
20273 defaultDuration: playlist.targetDuration,
20274 durationList: playlist.segments,
20275 startIndex: syncPoint.segmentIndex,
20276 endIndex: 0
20277 }));
20278 }
20279 /**
20280 * Runs each sync-point strategy and returns a list of sync-points returned by the
20281 * strategies
20282 *
20283 * @private
20284 * @param {Playlist} playlist
20285 * The playlist that needs a sync-point
20286 * @param {number} duration
20287 * Duration of the MediaSource (Infinity if playing a live source)
20288 * @param {number} currentTimeline
20289 * The last timeline from which a segment was loaded
20290 * @return {Array}
20291 * A list of sync-point objects
20292 */
20293 ;
20294
20295 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
20296 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
20297
20298 for (var i = 0; i < syncPointStrategies.length; i++) {
20299 var strategy = syncPointStrategies[i];
20300 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
20301
20302 if (syncPoint) {
20303 syncPoint.strategy = strategy.name;
20304 syncPoints.push({
20305 strategy: strategy.name,
20306 syncPoint: syncPoint
20307 });
20308 }
20309 }
20310
20311 return syncPoints;
20312 }
20313 /**
20314 * Selects the sync-point nearest the specified target
20315 *
20316 * @private
20317 * @param {Array} syncPoints
20318 * List of sync-points to select from
20319 * @param {Object} target
20320 * Object specifying the property and value we are targeting
20321 * @param {string} target.key
20322 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
20323 * @param {number} target.value
20324 * The value to target for the specified key.
20325 * @return {Object}
20326 * The sync-point nearest the target
20327 */
20328 ;
20329
20330 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
20331 var bestSyncPoint = syncPoints[0].syncPoint;
20332 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
20333 var bestStrategy = syncPoints[0].strategy;
20334
20335 for (var i = 1; i < syncPoints.length; i++) {
20336 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
20337
20338 if (newDistance < bestDistance) {
20339 bestDistance = newDistance;
20340 bestSyncPoint = syncPoints[i].syncPoint;
20341 bestStrategy = syncPoints[i].strategy;
20342 }
20343 }
20344
20345 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
20346 return bestSyncPoint;
20347 }
20348 /**
20349 * Save any meta-data present on the segments when segments leave
20350 * the live window to the playlist to allow for synchronization at the
20351 * playlist level later.
20352 *
20353 * @param {Playlist} oldPlaylist - The previous active playlist
20354 * @param {Playlist} newPlaylist - The updated and most current playlist
20355 */
20356 ;
20357
20358 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
20359 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
20360
20361 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
20362 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
20363 return;
20364 } // When a segment expires from the playlist and it has a start time
20365 // save that information as a possible sync-point reference in future
20366
20367
20368 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
20369 var lastRemovedSegment = oldPlaylist.segments[i];
20370
20371 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
20372 newPlaylist.syncInfo = {
20373 mediaSequence: oldPlaylist.mediaSequence + i,
20374 time: lastRemovedSegment.start
20375 };
20376 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
20377 this.trigger('syncinfoupdate');
20378 break;
20379 }
20380 }
20381 }
20382 /**
20383 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
20384 * before segments start to load.
20385 *
20386 * @param {Playlist} playlist - The currently active playlist
20387 */
20388 ;
20389
20390 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
20391 // It's possible for the playlist to be updated before playback starts, meaning time
20392 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
20393 // crossed, then the old time zero mapping (for the prior timeline) would be retained
20394 // unless the mappings are cleared.
20395 this.timelineToDatetimeMappings = {};
20396
20397 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
20398 var firstSegment = playlist.segments[0];
20399 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
20400 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
20401 }
20402 }
20403 /**
20404 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
20405 * based on the latest timing information.
20406 *
20407 * @param {Object} options
20408 * Options object
20409 * @param {SegmentInfo} options.segmentInfo
20410 * The current active request information
20411 * @param {boolean} options.shouldSaveTimelineMapping
20412 * If there's a timeline change, determines if the timeline mapping should be
20413 * saved for timeline mapping and program date time mappings.
20414 */
20415 ;
20416
20417 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
20418 var segmentInfo = _ref.segmentInfo,
20419 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
20420 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
20421 var segment = segmentInfo.segment;
20422
20423 if (didCalculateSegmentTimeMapping) {
20424 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
20425 // now with segment timing information
20426
20427 if (!segmentInfo.playlist.syncInfo) {
20428 segmentInfo.playlist.syncInfo = {
20429 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
20430 time: segment.start
20431 };
20432 }
20433 }
20434
20435 var dateTime = segment.dateTimeObject;
20436
20437 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
20438 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
20439 }
20440 };
20441
20442 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
20443 if (typeof this.timelines[timeline] === 'undefined') {
20444 return null;
20445 }
20446
20447 return this.timelines[timeline].time;
20448 };
20449
20450 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
20451 if (typeof this.timelines[timeline] === 'undefined') {
20452 return null;
20453 }
20454
20455 return this.timelines[timeline].mapping;
20456 }
20457 /**
20458 * Use the "media time" for a segment to generate a mapping to "display time" and
20459 * save that display time to the segment.
20460 *
20461 * @private
20462 * @param {SegmentInfo} segmentInfo
20463 * The current active request information
20464 * @param {Object} timingInfo
20465 * The start and end time of the current segment in "media time"
20466 * @param {boolean} shouldSaveTimelineMapping
20467 * If there's a timeline change, determines if the timeline mapping should be
20468 * saved in timelines.
20469 * @return {boolean}
20470 * Returns false if segment time mapping could not be calculated
20471 */
20472 ;
20473
20474 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
20475 // TODO: remove side effects
20476 var segment = segmentInfo.segment;
20477 var part = segmentInfo.part;
20478 var mappingObj = this.timelines[segmentInfo.timeline];
20479 var start;
20480 var end;
20481
20482 if (typeof segmentInfo.timestampOffset === 'number') {
20483 mappingObj = {
20484 time: segmentInfo.startOfSegment,
20485 mapping: segmentInfo.startOfSegment - timingInfo.start
20486 };
20487
20488 if (shouldSaveTimelineMapping) {
20489 this.timelines[segmentInfo.timeline] = mappingObj;
20490 this.trigger('timestampoffset');
20491 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
20492 }
20493
20494 start = segmentInfo.startOfSegment;
20495 end = timingInfo.end + mappingObj.mapping;
20496 } else if (mappingObj) {
20497 start = timingInfo.start + mappingObj.mapping;
20498 end = timingInfo.end + mappingObj.mapping;
20499 } else {
20500 return false;
20501 }
20502
20503 if (part) {
20504 part.start = start;
20505 part.end = end;
20506 } // If we don't have a segment start yet or the start value we got
20507 // is less than our current segment.start value, save a new start value.
20508 // We have to do this because parts will have segment timing info saved
20509 // multiple times and we want segment start to be the earliest part start
20510 // value for that segment.
20511
20512
20513 if (!segment.start || start < segment.start) {
20514 segment.start = start;
20515 }
20516
20517 segment.end = end;
20518 return true;
20519 }
20520 /**
20521 * Each time we have discontinuity in the playlist, attempt to calculate the location
20522 * in display of the start of the discontinuity and save that. We also save an accuracy
20523 * value so that we save values with the most accuracy (closest to 0.)
20524 *
20525 * @private
20526 * @param {SegmentInfo} segmentInfo - The current active request information
20527 */
20528 ;
20529
20530 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
20531 var playlist = segmentInfo.playlist;
20532 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
20533 // the start of the range and it's accuracy is 0 (greater accuracy values
20534 // mean more approximation)
20535
20536 if (segment.discontinuity) {
20537 this.discontinuities[segment.timeline] = {
20538 time: segment.start,
20539 accuracy: 0
20540 };
20541 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20542 // Search for future discontinuities that we can provide better timing
20543 // information for and save that information for sync purposes
20544 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20545 var segmentIndex = playlist.discontinuityStarts[i];
20546 var discontinuity = playlist.discontinuitySequence + i + 1;
20547 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
20548 var accuracy = Math.abs(mediaIndexDiff);
20549
20550 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
20551 var time = void 0;
20552
20553 if (mediaIndexDiff < 0) {
20554 time = segment.start - sumDurations({
20555 defaultDuration: playlist.targetDuration,
20556 durationList: playlist.segments,
20557 startIndex: segmentInfo.mediaIndex,
20558 endIndex: segmentIndex
20559 });
20560 } else {
20561 time = segment.end + sumDurations({
20562 defaultDuration: playlist.targetDuration,
20563 durationList: playlist.segments,
20564 startIndex: segmentInfo.mediaIndex + 1,
20565 endIndex: segmentIndex
20566 });
20567 }
20568
20569 this.discontinuities[discontinuity] = {
20570 time: time,
20571 accuracy: accuracy
20572 };
20573 }
20574 }
20575 }
20576 };
20577
20578 _proto.dispose = function dispose() {
20579 this.trigger('dispose');
20580 this.off();
20581 };
20582
20583 return SyncController;
20584}(videojs__default["default"].EventTarget);
20585
20586/**
20587 * The TimelineChangeController acts as a source for segment loaders to listen for and
20588 * keep track of latest and pending timeline changes. This is useful to ensure proper
20589 * sync, as each loader may need to make a consideration for what timeline the other
20590 * loader is on before making changes which could impact the other loader's media.
20591 *
20592 * @class TimelineChangeController
20593 * @extends videojs.EventTarget
20594 */
20595
20596var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
20597 _inheritsLoose__default["default"](TimelineChangeController, _videojs$EventTarget);
20598
20599 function TimelineChangeController() {
20600 var _this;
20601
20602 _this = _videojs$EventTarget.call(this) || this;
20603 _this.pendingTimelineChanges_ = {};
20604 _this.lastTimelineChanges_ = {};
20605 return _this;
20606 }
20607
20608 var _proto = TimelineChangeController.prototype;
20609
20610 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
20611 this.pendingTimelineChanges_[type] = null;
20612 this.trigger('pendingtimelinechange');
20613 };
20614
20615 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
20616 var type = _ref.type,
20617 from = _ref.from,
20618 to = _ref.to;
20619
20620 if (typeof from === 'number' && typeof to === 'number') {
20621 this.pendingTimelineChanges_[type] = {
20622 type: type,
20623 from: from,
20624 to: to
20625 };
20626 this.trigger('pendingtimelinechange');
20627 }
20628
20629 return this.pendingTimelineChanges_[type];
20630 };
20631
20632 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
20633 var type = _ref2.type,
20634 from = _ref2.from,
20635 to = _ref2.to;
20636
20637 if (typeof from === 'number' && typeof to === 'number') {
20638 this.lastTimelineChanges_[type] = {
20639 type: type,
20640 from: from,
20641 to: to
20642 };
20643 delete this.pendingTimelineChanges_[type];
20644 this.trigger('timelinechange');
20645 }
20646
20647 return this.lastTimelineChanges_[type];
20648 };
20649
20650 _proto.dispose = function dispose() {
20651 this.trigger('dispose');
20652 this.pendingTimelineChanges_ = {};
20653 this.lastTimelineChanges_ = {};
20654 this.off();
20655 };
20656
20657 return TimelineChangeController;
20658}(videojs__default["default"].EventTarget);
20659
20660/* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
20661var workerCode = transform(getWorkerString(function () {
20662
20663 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
20664
20665 function createCommonjsModule(fn, basedir, module) {
20666 return module = {
20667 path: basedir,
20668 exports: {},
20669 require: function require(path, base) {
20670 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
20671 }
20672 }, fn(module, module.exports), module.exports;
20673 }
20674
20675 function commonjsRequire() {
20676 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
20677 }
20678
20679 var createClass = createCommonjsModule(function (module) {
20680 function _defineProperties(target, props) {
20681 for (var i = 0; i < props.length; i++) {
20682 var descriptor = props[i];
20683 descriptor.enumerable = descriptor.enumerable || false;
20684 descriptor.configurable = true;
20685 if ("value" in descriptor) descriptor.writable = true;
20686 Object.defineProperty(target, descriptor.key, descriptor);
20687 }
20688 }
20689
20690 function _createClass(Constructor, protoProps, staticProps) {
20691 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
20692 if (staticProps) _defineProperties(Constructor, staticProps);
20693 return Constructor;
20694 }
20695
20696 module.exports = _createClass;
20697 module.exports["default"] = module.exports, module.exports.__esModule = true;
20698 });
20699 var setPrototypeOf = createCommonjsModule(function (module) {
20700 function _setPrototypeOf(o, p) {
20701 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
20702 o.__proto__ = p;
20703 return o;
20704 };
20705
20706 module.exports["default"] = module.exports, module.exports.__esModule = true;
20707 return _setPrototypeOf(o, p);
20708 }
20709
20710 module.exports = _setPrototypeOf;
20711 module.exports["default"] = module.exports, module.exports.__esModule = true;
20712 });
20713 var inheritsLoose = createCommonjsModule(function (module) {
20714 function _inheritsLoose(subClass, superClass) {
20715 subClass.prototype = Object.create(superClass.prototype);
20716 subClass.prototype.constructor = subClass;
20717 setPrototypeOf(subClass, superClass);
20718 }
20719
20720 module.exports = _inheritsLoose;
20721 module.exports["default"] = module.exports, module.exports.__esModule = true;
20722 });
20723 /**
20724 * @file stream.js
20725 */
20726
20727 /**
20728 * A lightweight readable stream implemention that handles event dispatching.
20729 *
20730 * @class Stream
20731 */
20732
20733 var Stream = /*#__PURE__*/function () {
20734 function Stream() {
20735 this.listeners = {};
20736 }
20737 /**
20738 * Add a listener for a specified event type.
20739 *
20740 * @param {string} type the event name
20741 * @param {Function} listener the callback to be invoked when an event of
20742 * the specified type occurs
20743 */
20744
20745
20746 var _proto = Stream.prototype;
20747
20748 _proto.on = function on(type, listener) {
20749 if (!this.listeners[type]) {
20750 this.listeners[type] = [];
20751 }
20752
20753 this.listeners[type].push(listener);
20754 }
20755 /**
20756 * Remove a listener for a specified event type.
20757 *
20758 * @param {string} type the event name
20759 * @param {Function} listener a function previously registered for this
20760 * type of event through `on`
20761 * @return {boolean} if we could turn it off or not
20762 */
20763 ;
20764
20765 _proto.off = function off(type, listener) {
20766 if (!this.listeners[type]) {
20767 return false;
20768 }
20769
20770 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
20771 // In Video.js we slice listener functions
20772 // on trigger so that it does not mess up the order
20773 // while we loop through.
20774 //
20775 // Here we slice on off so that the loop in trigger
20776 // can continue using it's old reference to loop without
20777 // messing up the order.
20778
20779 this.listeners[type] = this.listeners[type].slice(0);
20780 this.listeners[type].splice(index, 1);
20781 return index > -1;
20782 }
20783 /**
20784 * Trigger an event of the specified type on this stream. Any additional
20785 * arguments to this function are passed as parameters to event listeners.
20786 *
20787 * @param {string} type the event name
20788 */
20789 ;
20790
20791 _proto.trigger = function trigger(type) {
20792 var callbacks = this.listeners[type];
20793
20794 if (!callbacks) {
20795 return;
20796 } // Slicing the arguments on every invocation of this method
20797 // can add a significant amount of overhead. Avoid the
20798 // intermediate object creation for the common case of a
20799 // single callback argument
20800
20801
20802 if (arguments.length === 2) {
20803 var length = callbacks.length;
20804
20805 for (var i = 0; i < length; ++i) {
20806 callbacks[i].call(this, arguments[1]);
20807 }
20808 } else {
20809 var args = Array.prototype.slice.call(arguments, 1);
20810 var _length = callbacks.length;
20811
20812 for (var _i = 0; _i < _length; ++_i) {
20813 callbacks[_i].apply(this, args);
20814 }
20815 }
20816 }
20817 /**
20818 * Destroys the stream and cleans up.
20819 */
20820 ;
20821
20822 _proto.dispose = function dispose() {
20823 this.listeners = {};
20824 }
20825 /**
20826 * Forwards all `data` events on this stream to the destination stream. The
20827 * destination stream should provide a method `push` to receive the data
20828 * events as they arrive.
20829 *
20830 * @param {Stream} destination the stream that will receive all `data` events
20831 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
20832 */
20833 ;
20834
20835 _proto.pipe = function pipe(destination) {
20836 this.on('data', function (data) {
20837 destination.push(data);
20838 });
20839 };
20840
20841 return Stream;
20842 }();
20843 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
20844
20845 /**
20846 * Returns the subarray of a Uint8Array without PKCS#7 padding.
20847 *
20848 * @param padded {Uint8Array} unencrypted bytes that have been padded
20849 * @return {Uint8Array} the unpadded bytes
20850 * @see http://tools.ietf.org/html/rfc5652
20851 */
20852
20853
20854 function unpad(padded) {
20855 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
20856 }
20857 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
20858
20859 /**
20860 * @file aes.js
20861 *
20862 * This file contains an adaptation of the AES decryption algorithm
20863 * from the Standford Javascript Cryptography Library. That work is
20864 * covered by the following copyright and permissions notice:
20865 *
20866 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
20867 * All rights reserved.
20868 *
20869 * Redistribution and use in source and binary forms, with or without
20870 * modification, are permitted provided that the following conditions are
20871 * met:
20872 *
20873 * 1. Redistributions of source code must retain the above copyright
20874 * notice, this list of conditions and the following disclaimer.
20875 *
20876 * 2. Redistributions in binary form must reproduce the above
20877 * copyright notice, this list of conditions and the following
20878 * disclaimer in the documentation and/or other materials provided
20879 * with the distribution.
20880 *
20881 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
20882 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20883 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20884 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
20885 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20886 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
20887 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
20888 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
20889 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
20890 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
20891 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
20892 *
20893 * The views and conclusions contained in the software and documentation
20894 * are those of the authors and should not be interpreted as representing
20895 * official policies, either expressed or implied, of the authors.
20896 */
20897
20898 /**
20899 * Expand the S-box tables.
20900 *
20901 * @private
20902 */
20903
20904
20905 var precompute = function precompute() {
20906 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
20907 var encTable = tables[0];
20908 var decTable = tables[1];
20909 var sbox = encTable[4];
20910 var sboxInv = decTable[4];
20911 var i;
20912 var x;
20913 var xInv;
20914 var d = [];
20915 var th = [];
20916 var x2;
20917 var x4;
20918 var x8;
20919 var s;
20920 var tEnc;
20921 var tDec; // Compute double and third tables
20922
20923 for (i = 0; i < 256; i++) {
20924 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
20925 }
20926
20927 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
20928 // Compute sbox
20929 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
20930 s = s >> 8 ^ s & 255 ^ 99;
20931 sbox[x] = s;
20932 sboxInv[s] = x; // Compute MixColumns
20933
20934 x8 = d[x4 = d[x2 = d[x]]];
20935 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
20936 tEnc = d[s] * 0x101 ^ s * 0x1010100;
20937
20938 for (i = 0; i < 4; i++) {
20939 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
20940 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
20941 }
20942 } // Compactify. Considerable speedup on Firefox.
20943
20944
20945 for (i = 0; i < 5; i++) {
20946 encTable[i] = encTable[i].slice(0);
20947 decTable[i] = decTable[i].slice(0);
20948 }
20949
20950 return tables;
20951 };
20952
20953 var aesTables = null;
20954 /**
20955 * Schedule out an AES key for both encryption and decryption. This
20956 * is a low-level class. Use a cipher mode to do bulk encryption.
20957 *
20958 * @class AES
20959 * @param key {Array} The key as an array of 4, 6 or 8 words.
20960 */
20961
20962 var AES = /*#__PURE__*/function () {
20963 function AES(key) {
20964 /**
20965 * The expanded S-box and inverse S-box tables. These will be computed
20966 * on the client so that we don't have to send them down the wire.
20967 *
20968 * There are two tables, _tables[0] is for encryption and
20969 * _tables[1] is for decryption.
20970 *
20971 * The first 4 sub-tables are the expanded S-box with MixColumns. The
20972 * last (_tables[01][4]) is the S-box itself.
20973 *
20974 * @private
20975 */
20976 // if we have yet to precompute the S-box tables
20977 // do so now
20978 if (!aesTables) {
20979 aesTables = precompute();
20980 } // then make a copy of that object for use
20981
20982
20983 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
20984 var i;
20985 var j;
20986 var tmp;
20987 var sbox = this._tables[0][4];
20988 var decTable = this._tables[1];
20989 var keyLen = key.length;
20990 var rcon = 1;
20991
20992 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
20993 throw new Error('Invalid aes key size');
20994 }
20995
20996 var encKey = key.slice(0);
20997 var decKey = [];
20998 this._key = [encKey, decKey]; // schedule encryption keys
20999
21000 for (i = keyLen; i < 4 * keyLen + 28; i++) {
21001 tmp = encKey[i - 1]; // apply sbox
21002
21003 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
21004 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
21005
21006 if (i % keyLen === 0) {
21007 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
21008 rcon = rcon << 1 ^ (rcon >> 7) * 283;
21009 }
21010 }
21011
21012 encKey[i] = encKey[i - keyLen] ^ tmp;
21013 } // schedule decryption keys
21014
21015
21016 for (j = 0; i; j++, i--) {
21017 tmp = encKey[j & 3 ? i : i - 4];
21018
21019 if (i <= 4 || j < 4) {
21020 decKey[j] = tmp;
21021 } else {
21022 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
21023 }
21024 }
21025 }
21026 /**
21027 * Decrypt 16 bytes, specified as four 32-bit words.
21028 *
21029 * @param {number} encrypted0 the first word to decrypt
21030 * @param {number} encrypted1 the second word to decrypt
21031 * @param {number} encrypted2 the third word to decrypt
21032 * @param {number} encrypted3 the fourth word to decrypt
21033 * @param {Int32Array} out the array to write the decrypted words
21034 * into
21035 * @param {number} offset the offset into the output array to start
21036 * writing results
21037 * @return {Array} The plaintext.
21038 */
21039
21040
21041 var _proto = AES.prototype;
21042
21043 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
21044 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
21045
21046 var a = encrypted0 ^ key[0];
21047 var b = encrypted3 ^ key[1];
21048 var c = encrypted2 ^ key[2];
21049 var d = encrypted1 ^ key[3];
21050 var a2;
21051 var b2;
21052 var c2; // key.length === 2 ?
21053
21054 var nInnerRounds = key.length / 4 - 2;
21055 var i;
21056 var kIndex = 4;
21057 var table = this._tables[1]; // load up the tables
21058
21059 var table0 = table[0];
21060 var table1 = table[1];
21061 var table2 = table[2];
21062 var table3 = table[3];
21063 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
21064
21065 for (i = 0; i < nInnerRounds; i++) {
21066 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
21067 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
21068 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
21069 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
21070 kIndex += 4;
21071 a = a2;
21072 b = b2;
21073 c = c2;
21074 } // Last round.
21075
21076
21077 for (i = 0; i < 4; i++) {
21078 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
21079 a2 = a;
21080 a = b;
21081 b = c;
21082 c = d;
21083 d = a2;
21084 }
21085 };
21086
21087 return AES;
21088 }();
21089 /**
21090 * A wrapper around the Stream class to use setTimeout
21091 * and run stream "jobs" Asynchronously
21092 *
21093 * @class AsyncStream
21094 * @extends Stream
21095 */
21096
21097
21098 var AsyncStream = /*#__PURE__*/function (_Stream) {
21099 inheritsLoose(AsyncStream, _Stream);
21100
21101 function AsyncStream() {
21102 var _this;
21103
21104 _this = _Stream.call(this, Stream) || this;
21105 _this.jobs = [];
21106 _this.delay = 1;
21107 _this.timeout_ = null;
21108 return _this;
21109 }
21110 /**
21111 * process an async job
21112 *
21113 * @private
21114 */
21115
21116
21117 var _proto = AsyncStream.prototype;
21118
21119 _proto.processJob_ = function processJob_() {
21120 this.jobs.shift()();
21121
21122 if (this.jobs.length) {
21123 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
21124 } else {
21125 this.timeout_ = null;
21126 }
21127 }
21128 /**
21129 * push a job into the stream
21130 *
21131 * @param {Function} job the job to push into the stream
21132 */
21133 ;
21134
21135 _proto.push = function push(job) {
21136 this.jobs.push(job);
21137
21138 if (!this.timeout_) {
21139 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
21140 }
21141 };
21142
21143 return AsyncStream;
21144 }(Stream);
21145 /**
21146 * Convert network-order (big-endian) bytes into their little-endian
21147 * representation.
21148 */
21149
21150
21151 var ntoh = function ntoh(word) {
21152 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
21153 };
21154 /**
21155 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
21156 *
21157 * @param {Uint8Array} encrypted the encrypted bytes
21158 * @param {Uint32Array} key the bytes of the decryption key
21159 * @param {Uint32Array} initVector the initialization vector (IV) to
21160 * use for the first round of CBC.
21161 * @return {Uint8Array} the decrypted bytes
21162 *
21163 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
21164 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
21165 * @see https://tools.ietf.org/html/rfc2315
21166 */
21167
21168
21169 var decrypt = function decrypt(encrypted, key, initVector) {
21170 // word-level access to the encrypted bytes
21171 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
21172 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
21173
21174 var decrypted = new Uint8Array(encrypted.byteLength);
21175 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
21176 // decrypted data
21177
21178 var init0;
21179 var init1;
21180 var init2;
21181 var init3;
21182 var encrypted0;
21183 var encrypted1;
21184 var encrypted2;
21185 var encrypted3; // iteration variable
21186
21187 var wordIx; // pull out the words of the IV to ensure we don't modify the
21188 // passed-in reference and easier access
21189
21190 init0 = initVector[0];
21191 init1 = initVector[1];
21192 init2 = initVector[2];
21193 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
21194 // to each decrypted block
21195
21196 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
21197 // convert big-endian (network order) words into little-endian
21198 // (javascript order)
21199 encrypted0 = ntoh(encrypted32[wordIx]);
21200 encrypted1 = ntoh(encrypted32[wordIx + 1]);
21201 encrypted2 = ntoh(encrypted32[wordIx + 2]);
21202 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
21203
21204 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
21205 // plaintext
21206
21207 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
21208 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
21209 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
21210 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
21211
21212 init0 = encrypted0;
21213 init1 = encrypted1;
21214 init2 = encrypted2;
21215 init3 = encrypted3;
21216 }
21217
21218 return decrypted;
21219 };
21220 /**
21221 * The `Decrypter` class that manages decryption of AES
21222 * data through `AsyncStream` objects and the `decrypt`
21223 * function
21224 *
21225 * @param {Uint8Array} encrypted the encrypted bytes
21226 * @param {Uint32Array} key the bytes of the decryption key
21227 * @param {Uint32Array} initVector the initialization vector (IV) to
21228 * @param {Function} done the function to run when done
21229 * @class Decrypter
21230 */
21231
21232
21233 var Decrypter = /*#__PURE__*/function () {
21234 function Decrypter(encrypted, key, initVector, done) {
21235 var step = Decrypter.STEP;
21236 var encrypted32 = new Int32Array(encrypted.buffer);
21237 var decrypted = new Uint8Array(encrypted.byteLength);
21238 var i = 0;
21239 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
21240
21241 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21242
21243 for (i = step; i < encrypted32.length; i += step) {
21244 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
21245 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21246 } // invoke the done() callback when everything is finished
21247
21248
21249 this.asyncStream_.push(function () {
21250 // remove pkcs#7 padding from the decrypted bytes
21251 done(null, unpad(decrypted));
21252 });
21253 }
21254 /**
21255 * a getter for step the maximum number of bytes to process at one time
21256 *
21257 * @return {number} the value of step 32000
21258 */
21259
21260
21261 var _proto = Decrypter.prototype;
21262 /**
21263 * @private
21264 */
21265
21266 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
21267 return function () {
21268 var bytes = decrypt(encrypted, key, initVector);
21269 decrypted.set(bytes, encrypted.byteOffset);
21270 };
21271 };
21272
21273 createClass(Decrypter, null, [{
21274 key: "STEP",
21275 get: function get() {
21276 // 4 * 8000;
21277 return 32000;
21278 }
21279 }]);
21280 return Decrypter;
21281 }();
21282
21283 var win;
21284
21285 if (typeof window !== "undefined") {
21286 win = window;
21287 } else if (typeof commonjsGlobal !== "undefined") {
21288 win = commonjsGlobal;
21289 } else if (typeof self !== "undefined") {
21290 win = self;
21291 } else {
21292 win = {};
21293 }
21294
21295 var window_1 = win;
21296
21297 var isArrayBufferView = function isArrayBufferView(obj) {
21298 if (ArrayBuffer.isView === 'function') {
21299 return ArrayBuffer.isView(obj);
21300 }
21301
21302 return obj && obj.buffer instanceof ArrayBuffer;
21303 };
21304
21305 var BigInt = window_1.BigInt || Number;
21306 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
21307
21308 (function () {
21309 var a = new Uint16Array([0xFFCC]);
21310 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
21311
21312 if (b[0] === 0xFF) {
21313 return 'big';
21314 }
21315
21316 if (b[0] === 0xCC) {
21317 return 'little';
21318 }
21319
21320 return 'unknown';
21321 })();
21322 /**
21323 * Creates an object for sending to a web worker modifying properties that are TypedArrays
21324 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
21325 *
21326 * @param {Object} message
21327 * Object of properties and values to send to the web worker
21328 * @return {Object}
21329 * Modified message with TypedArray values expanded
21330 * @function createTransferableMessage
21331 */
21332
21333
21334 var createTransferableMessage = function createTransferableMessage(message) {
21335 var transferable = {};
21336 Object.keys(message).forEach(function (key) {
21337 var value = message[key];
21338
21339 if (isArrayBufferView(value)) {
21340 transferable[key] = {
21341 bytes: value.buffer,
21342 byteOffset: value.byteOffset,
21343 byteLength: value.byteLength
21344 };
21345 } else {
21346 transferable[key] = value;
21347 }
21348 });
21349 return transferable;
21350 };
21351 /* global self */
21352
21353 /**
21354 * Our web worker interface so that things can talk to aes-decrypter
21355 * that will be running in a web worker. the scope is passed to this by
21356 * webworkify.
21357 */
21358
21359
21360 self.onmessage = function (event) {
21361 var data = event.data;
21362 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
21363 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
21364 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
21365 /* eslint-disable no-new, handle-callback-err */
21366
21367 new Decrypter(encrypted, key, iv, function (err, bytes) {
21368 self.postMessage(createTransferableMessage({
21369 source: data.source,
21370 decrypted: bytes
21371 }), [bytes.buffer]);
21372 });
21373 /* eslint-enable */
21374 };
21375}));
21376var Decrypter = factory(workerCode);
21377/* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
21378
21379/**
21380 * Convert the properties of an HLS track into an audioTrackKind.
21381 *
21382 * @private
21383 */
21384
21385var audioTrackKind_ = function audioTrackKind_(properties) {
21386 var kind = properties.default ? 'main' : 'alternative';
21387
21388 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
21389 kind = 'main-desc';
21390 }
21391
21392 return kind;
21393};
21394/**
21395 * Pause provided segment loader and playlist loader if active
21396 *
21397 * @param {SegmentLoader} segmentLoader
21398 * SegmentLoader to pause
21399 * @param {Object} mediaType
21400 * Active media type
21401 * @function stopLoaders
21402 */
21403
21404
21405var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
21406 segmentLoader.abort();
21407 segmentLoader.pause();
21408
21409 if (mediaType && mediaType.activePlaylistLoader) {
21410 mediaType.activePlaylistLoader.pause();
21411 mediaType.activePlaylistLoader = null;
21412 }
21413};
21414/**
21415 * Start loading provided segment loader and playlist loader
21416 *
21417 * @param {PlaylistLoader} playlistLoader
21418 * PlaylistLoader to start loading
21419 * @param {Object} mediaType
21420 * Active media type
21421 * @function startLoaders
21422 */
21423
21424var startLoaders = function startLoaders(playlistLoader, mediaType) {
21425 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
21426 // playlist loader
21427 mediaType.activePlaylistLoader = playlistLoader;
21428 playlistLoader.load();
21429};
21430/**
21431 * Returns a function to be called when the media group changes. It performs a
21432 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
21433 * change of group is merely a rendition switch of the same content at another encoding,
21434 * rather than a change of content, such as switching audio from English to Spanish.
21435 *
21436 * @param {string} type
21437 * MediaGroup type
21438 * @param {Object} settings
21439 * Object containing required information for media groups
21440 * @return {Function}
21441 * Handler for a non-destructive resync of SegmentLoader when the active media
21442 * group changes.
21443 * @function onGroupChanged
21444 */
21445
21446var onGroupChanged = function onGroupChanged(type, settings) {
21447 return function () {
21448 var _settings$segmentLoad = settings.segmentLoaders,
21449 segmentLoader = _settings$segmentLoad[type],
21450 mainSegmentLoader = _settings$segmentLoad.main,
21451 mediaType = settings.mediaTypes[type];
21452 var activeTrack = mediaType.activeTrack();
21453 var activeGroup = mediaType.getActiveGroup();
21454 var previousActiveLoader = mediaType.activePlaylistLoader;
21455 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
21456
21457 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
21458 return;
21459 }
21460
21461 mediaType.lastGroup_ = activeGroup;
21462 mediaType.lastTrack_ = activeTrack;
21463 stopLoaders(segmentLoader, mediaType);
21464
21465 if (!activeGroup || activeGroup.isMasterPlaylist) {
21466 // there is no group active or active group is a main playlist and won't change
21467 return;
21468 }
21469
21470 if (!activeGroup.playlistLoader) {
21471 if (previousActiveLoader) {
21472 // The previous group had a playlist loader but the new active group does not
21473 // this means we are switching from demuxed to muxed audio. In this case we want to
21474 // do a destructive reset of the main segment loader and not restart the audio
21475 // loaders.
21476 mainSegmentLoader.resetEverything();
21477 }
21478
21479 return;
21480 } // Non-destructive resync
21481
21482
21483 segmentLoader.resyncLoader();
21484 startLoaders(activeGroup.playlistLoader, mediaType);
21485 };
21486};
21487var onGroupChanging = function onGroupChanging(type, settings) {
21488 return function () {
21489 var segmentLoader = settings.segmentLoaders[type],
21490 mediaType = settings.mediaTypes[type];
21491 mediaType.lastGroup_ = null;
21492 segmentLoader.abort();
21493 segmentLoader.pause();
21494 };
21495};
21496/**
21497 * Returns a function to be called when the media track changes. It performs a
21498 * destructive reset of the SegmentLoader to ensure we start loading as close to
21499 * currentTime as possible.
21500 *
21501 * @param {string} type
21502 * MediaGroup type
21503 * @param {Object} settings
21504 * Object containing required information for media groups
21505 * @return {Function}
21506 * Handler for a destructive reset of SegmentLoader when the active media
21507 * track changes.
21508 * @function onTrackChanged
21509 */
21510
21511var onTrackChanged = function onTrackChanged(type, settings) {
21512 return function () {
21513 var masterPlaylistLoader = settings.masterPlaylistLoader,
21514 _settings$segmentLoad2 = settings.segmentLoaders,
21515 segmentLoader = _settings$segmentLoad2[type],
21516 mainSegmentLoader = _settings$segmentLoad2.main,
21517 mediaType = settings.mediaTypes[type];
21518 var activeTrack = mediaType.activeTrack();
21519 var activeGroup = mediaType.getActiveGroup();
21520 var previousActiveLoader = mediaType.activePlaylistLoader;
21521 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
21522
21523 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
21524 return;
21525 }
21526
21527 mediaType.lastGroup_ = activeGroup;
21528 mediaType.lastTrack_ = activeTrack;
21529 stopLoaders(segmentLoader, mediaType);
21530
21531 if (!activeGroup) {
21532 // there is no group active so we do not want to restart loaders
21533 return;
21534 }
21535
21536 if (activeGroup.isMasterPlaylist) {
21537 // track did not change, do nothing
21538 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
21539 return;
21540 }
21541
21542 var mpc = settings.vhs.masterPlaylistController_;
21543 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
21544
21545 if (mpc.media() === newPlaylist) {
21546 return;
21547 }
21548
21549 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
21550 masterPlaylistLoader.pause();
21551 mainSegmentLoader.resetEverything();
21552 mpc.fastQualityChange_(newPlaylist);
21553 return;
21554 }
21555
21556 if (type === 'AUDIO') {
21557 if (!activeGroup.playlistLoader) {
21558 // when switching from demuxed audio/video to muxed audio/video (noted by no
21559 // playlist loader for the audio group), we want to do a destructive reset of the
21560 // main segment loader and not restart the audio loaders
21561 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
21562 // it should be stopped
21563
21564 mainSegmentLoader.resetEverything();
21565 return;
21566 } // although the segment loader is an audio segment loader, call the setAudio
21567 // function to ensure it is prepared to re-append the init segment (or handle other
21568 // config changes)
21569
21570
21571 segmentLoader.setAudio(true);
21572 mainSegmentLoader.setAudio(false);
21573 }
21574
21575 if (previousActiveLoader === activeGroup.playlistLoader) {
21576 // Nothing has actually changed. This can happen because track change events can fire
21577 // multiple times for a "single" change. One for enabling the new active track, and
21578 // one for disabling the track that was active
21579 startLoaders(activeGroup.playlistLoader, mediaType);
21580 return;
21581 }
21582
21583 if (segmentLoader.track) {
21584 // For WebVTT, set the new text track in the segmentloader
21585 segmentLoader.track(activeTrack);
21586 } // destructive reset
21587
21588
21589 segmentLoader.resetEverything();
21590 startLoaders(activeGroup.playlistLoader, mediaType);
21591 };
21592};
21593var onError = {
21594 /**
21595 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21596 * an error.
21597 *
21598 * @param {string} type
21599 * MediaGroup type
21600 * @param {Object} settings
21601 * Object containing required information for media groups
21602 * @return {Function}
21603 * Error handler. Logs warning (or error if the playlist is blacklisted) to
21604 * console and switches back to default audio track.
21605 * @function onError.AUDIO
21606 */
21607 AUDIO: function AUDIO(type, settings) {
21608 return function () {
21609 var segmentLoader = settings.segmentLoaders[type],
21610 mediaType = settings.mediaTypes[type],
21611 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
21612 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
21613
21614 var activeTrack = mediaType.activeTrack();
21615 var activeGroup = mediaType.activeGroup();
21616 var id = (activeGroup.filter(function (group) {
21617 return group.default;
21618 })[0] || activeGroup[0]).id;
21619 var defaultTrack = mediaType.tracks[id];
21620
21621 if (activeTrack === defaultTrack) {
21622 // Default track encountered an error. All we can do now is blacklist the current
21623 // rendition and hope another will switch audio groups
21624 blacklistCurrentPlaylist({
21625 message: 'Problem encountered loading the default audio track.'
21626 });
21627 return;
21628 }
21629
21630 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
21631
21632 for (var trackId in mediaType.tracks) {
21633 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
21634 }
21635
21636 mediaType.onTrackChanged();
21637 };
21638 },
21639
21640 /**
21641 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21642 * an error.
21643 *
21644 * @param {string} type
21645 * MediaGroup type
21646 * @param {Object} settings
21647 * Object containing required information for media groups
21648 * @return {Function}
21649 * Error handler. Logs warning to console and disables the active subtitle track
21650 * @function onError.SUBTITLES
21651 */
21652 SUBTITLES: function SUBTITLES(type, settings) {
21653 return function () {
21654 var segmentLoader = settings.segmentLoaders[type],
21655 mediaType = settings.mediaTypes[type];
21656 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
21657 stopLoaders(segmentLoader, mediaType);
21658 var track = mediaType.activeTrack();
21659
21660 if (track) {
21661 track.mode = 'disabled';
21662 }
21663
21664 mediaType.onTrackChanged();
21665 };
21666 }
21667};
21668var setupListeners = {
21669 /**
21670 * Setup event listeners for audio playlist loader
21671 *
21672 * @param {string} type
21673 * MediaGroup type
21674 * @param {PlaylistLoader|null} playlistLoader
21675 * PlaylistLoader to register listeners on
21676 * @param {Object} settings
21677 * Object containing required information for media groups
21678 * @function setupListeners.AUDIO
21679 */
21680 AUDIO: function AUDIO(type, playlistLoader, settings) {
21681 if (!playlistLoader) {
21682 // no playlist loader means audio will be muxed with the video
21683 return;
21684 }
21685
21686 var tech = settings.tech,
21687 requestOptions = settings.requestOptions,
21688 segmentLoader = settings.segmentLoaders[type];
21689 playlistLoader.on('loadedmetadata', function () {
21690 var media = playlistLoader.media();
21691 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
21692 // permits, start downloading segments
21693
21694 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21695 segmentLoader.load();
21696 }
21697 });
21698 playlistLoader.on('loadedplaylist', function () {
21699 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21700
21701 if (!tech.paused()) {
21702 segmentLoader.load();
21703 }
21704 });
21705 playlistLoader.on('error', onError[type](type, settings));
21706 },
21707
21708 /**
21709 * Setup event listeners for subtitle playlist loader
21710 *
21711 * @param {string} type
21712 * MediaGroup type
21713 * @param {PlaylistLoader|null} playlistLoader
21714 * PlaylistLoader to register listeners on
21715 * @param {Object} settings
21716 * Object containing required information for media groups
21717 * @function setupListeners.SUBTITLES
21718 */
21719 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
21720 var tech = settings.tech,
21721 requestOptions = settings.requestOptions,
21722 segmentLoader = settings.segmentLoaders[type],
21723 mediaType = settings.mediaTypes[type];
21724 playlistLoader.on('loadedmetadata', function () {
21725 var media = playlistLoader.media();
21726 segmentLoader.playlist(media, requestOptions);
21727 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
21728 // permits, start downloading segments
21729
21730 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21731 segmentLoader.load();
21732 }
21733 });
21734 playlistLoader.on('loadedplaylist', function () {
21735 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21736
21737 if (!tech.paused()) {
21738 segmentLoader.load();
21739 }
21740 });
21741 playlistLoader.on('error', onError[type](type, settings));
21742 }
21743};
21744var initialize = {
21745 /**
21746 * Setup PlaylistLoaders and AudioTracks for the audio groups
21747 *
21748 * @param {string} type
21749 * MediaGroup type
21750 * @param {Object} settings
21751 * Object containing required information for media groups
21752 * @function initialize.AUDIO
21753 */
21754 'AUDIO': function AUDIO(type, settings) {
21755 var vhs = settings.vhs,
21756 sourceType = settings.sourceType,
21757 segmentLoader = settings.segmentLoaders[type],
21758 requestOptions = settings.requestOptions,
21759 mediaGroups = settings.master.mediaGroups,
21760 _settings$mediaTypes$ = settings.mediaTypes[type],
21761 groups = _settings$mediaTypes$.groups,
21762 tracks = _settings$mediaTypes$.tracks,
21763 logger_ = _settings$mediaTypes$.logger_,
21764 masterPlaylistLoader = settings.masterPlaylistLoader;
21765 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
21766
21767 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
21768 mediaGroups[type] = {
21769 main: {
21770 default: {
21771 default: true
21772 }
21773 }
21774 };
21775
21776 if (audioOnlyMaster) {
21777 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
21778 }
21779 }
21780
21781 for (var groupId in mediaGroups[type]) {
21782 if (!groups[groupId]) {
21783 groups[groupId] = [];
21784 }
21785
21786 for (var variantLabel in mediaGroups[type][groupId]) {
21787 var properties = mediaGroups[type][groupId][variantLabel];
21788 var playlistLoader = void 0;
21789
21790 if (audioOnlyMaster) {
21791 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
21792 properties.isMasterPlaylist = true;
21793 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
21794 // use the resolved media playlist object
21795 } else if (sourceType === 'vhs-json' && properties.playlists) {
21796 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
21797 } else if (properties.resolvedUri) {
21798 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
21799 // should we even have properties.playlists in this check.
21800 } else if (properties.playlists && sourceType === 'dash') {
21801 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21802 } else {
21803 // no resolvedUri means the audio is muxed with the video when using this
21804 // audio track
21805 playlistLoader = null;
21806 }
21807
21808 properties = videojs__default["default"].mergeOptions({
21809 id: variantLabel,
21810 playlistLoader: playlistLoader
21811 }, properties);
21812 setupListeners[type](type, properties.playlistLoader, settings);
21813 groups[groupId].push(properties);
21814
21815 if (typeof tracks[variantLabel] === 'undefined') {
21816 var track = new videojs__default["default"].AudioTrack({
21817 id: variantLabel,
21818 kind: audioTrackKind_(properties),
21819 enabled: false,
21820 language: properties.language,
21821 default: properties.default,
21822 label: variantLabel
21823 });
21824 tracks[variantLabel] = track;
21825 }
21826 }
21827 } // setup single error event handler for the segment loader
21828
21829
21830 segmentLoader.on('error', onError[type](type, settings));
21831 },
21832
21833 /**
21834 * Setup PlaylistLoaders and TextTracks for the subtitle groups
21835 *
21836 * @param {string} type
21837 * MediaGroup type
21838 * @param {Object} settings
21839 * Object containing required information for media groups
21840 * @function initialize.SUBTITLES
21841 */
21842 'SUBTITLES': function SUBTITLES(type, settings) {
21843 var tech = settings.tech,
21844 vhs = settings.vhs,
21845 sourceType = settings.sourceType,
21846 segmentLoader = settings.segmentLoaders[type],
21847 requestOptions = settings.requestOptions,
21848 mediaGroups = settings.master.mediaGroups,
21849 _settings$mediaTypes$2 = settings.mediaTypes[type],
21850 groups = _settings$mediaTypes$2.groups,
21851 tracks = _settings$mediaTypes$2.tracks,
21852 masterPlaylistLoader = settings.masterPlaylistLoader;
21853
21854 for (var groupId in mediaGroups[type]) {
21855 if (!groups[groupId]) {
21856 groups[groupId] = [];
21857 }
21858
21859 for (var variantLabel in mediaGroups[type][groupId]) {
21860 if (mediaGroups[type][groupId][variantLabel].forced) {
21861 // Subtitle playlists with the forced attribute are not selectable in Safari.
21862 // According to Apple's HLS Authoring Specification:
21863 // If content has forced subtitles and regular subtitles in a given language,
21864 // the regular subtitles track in that language MUST contain both the forced
21865 // subtitles and the regular subtitles for that language.
21866 // Because of this requirement and that Safari does not add forced subtitles,
21867 // forced subtitles are skipped here to maintain consistent experience across
21868 // all platforms
21869 continue;
21870 }
21871
21872 var properties = mediaGroups[type][groupId][variantLabel];
21873 var playlistLoader = void 0;
21874
21875 if (sourceType === 'hls') {
21876 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
21877 } else if (sourceType === 'dash') {
21878 var playlists = properties.playlists.filter(function (p) {
21879 return p.excludeUntil !== Infinity;
21880 });
21881
21882 if (!playlists.length) {
21883 return;
21884 }
21885
21886 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21887 } else if (sourceType === 'vhs-json') {
21888 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
21889 // as provided, otherwise use the resolved URI to load the playlist
21890 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
21891 }
21892
21893 properties = videojs__default["default"].mergeOptions({
21894 id: variantLabel,
21895 playlistLoader: playlistLoader
21896 }, properties);
21897 setupListeners[type](type, properties.playlistLoader, settings);
21898 groups[groupId].push(properties);
21899
21900 if (typeof tracks[variantLabel] === 'undefined') {
21901 var track = tech.addRemoteTextTrack({
21902 id: variantLabel,
21903 kind: 'subtitles',
21904 default: properties.default && properties.autoselect,
21905 language: properties.language,
21906 label: variantLabel
21907 }, false).track;
21908 tracks[variantLabel] = track;
21909 }
21910 }
21911 } // setup single error event handler for the segment loader
21912
21913
21914 segmentLoader.on('error', onError[type](type, settings));
21915 },
21916
21917 /**
21918 * Setup TextTracks for the closed-caption groups
21919 *
21920 * @param {String} type
21921 * MediaGroup type
21922 * @param {Object} settings
21923 * Object containing required information for media groups
21924 * @function initialize['CLOSED-CAPTIONS']
21925 */
21926 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
21927 var tech = settings.tech,
21928 mediaGroups = settings.master.mediaGroups,
21929 _settings$mediaTypes$3 = settings.mediaTypes[type],
21930 groups = _settings$mediaTypes$3.groups,
21931 tracks = _settings$mediaTypes$3.tracks;
21932
21933 for (var groupId in mediaGroups[type]) {
21934 if (!groups[groupId]) {
21935 groups[groupId] = [];
21936 }
21937
21938 for (var variantLabel in mediaGroups[type][groupId]) {
21939 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
21940
21941 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
21942 continue;
21943 }
21944
21945 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
21946 var newProps = {
21947 label: variantLabel,
21948 language: properties.language,
21949 instreamId: properties.instreamId,
21950 default: properties.default && properties.autoselect
21951 };
21952
21953 if (captionServices[newProps.instreamId]) {
21954 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
21955 }
21956
21957 if (newProps.default === undefined) {
21958 delete newProps.default;
21959 } // No PlaylistLoader is required for Closed-Captions because the captions are
21960 // embedded within the video stream
21961
21962
21963 groups[groupId].push(videojs__default["default"].mergeOptions({
21964 id: variantLabel
21965 }, properties));
21966
21967 if (typeof tracks[variantLabel] === 'undefined') {
21968 var track = tech.addRemoteTextTrack({
21969 id: newProps.instreamId,
21970 kind: 'captions',
21971 default: newProps.default,
21972 language: newProps.language,
21973 label: newProps.label
21974 }, false).track;
21975 tracks[variantLabel] = track;
21976 }
21977 }
21978 }
21979 }
21980};
21981
21982var groupMatch = function groupMatch(list, media) {
21983 for (var i = 0; i < list.length; i++) {
21984 if (playlistMatch(media, list[i])) {
21985 return true;
21986 }
21987
21988 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
21989 return true;
21990 }
21991 }
21992
21993 return false;
21994};
21995/**
21996 * Returns a function used to get the active group of the provided type
21997 *
21998 * @param {string} type
21999 * MediaGroup type
22000 * @param {Object} settings
22001 * Object containing required information for media groups
22002 * @return {Function}
22003 * Function that returns the active media group for the provided type. Takes an
22004 * optional parameter {TextTrack} track. If no track is provided, a list of all
22005 * variants in the group, otherwise the variant corresponding to the provided
22006 * track is returned.
22007 * @function activeGroup
22008 */
22009
22010
22011var activeGroup = function activeGroup(type, settings) {
22012 return function (track) {
22013 var masterPlaylistLoader = settings.masterPlaylistLoader,
22014 groups = settings.mediaTypes[type].groups;
22015 var media = masterPlaylistLoader.media();
22016
22017 if (!media) {
22018 return null;
22019 }
22020
22021 var variants = null; // set to variants to main media active group
22022
22023 if (media.attributes[type]) {
22024 variants = groups[media.attributes[type]];
22025 }
22026
22027 var groupKeys = Object.keys(groups);
22028
22029 if (!variants) {
22030 // find the masterPlaylistLoader media
22031 // that is in a media group if we are dealing
22032 // with audio only
22033 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
22034 for (var i = 0; i < groupKeys.length; i++) {
22035 var groupPropertyList = groups[groupKeys[i]];
22036
22037 if (groupMatch(groupPropertyList, media)) {
22038 variants = groupPropertyList;
22039 break;
22040 }
22041 } // use the main group if it exists
22042
22043 } else if (groups.main) {
22044 variants = groups.main; // only one group, use that one
22045 } else if (groupKeys.length === 1) {
22046 variants = groups[groupKeys[0]];
22047 }
22048 }
22049
22050 if (typeof track === 'undefined') {
22051 return variants;
22052 }
22053
22054 if (track === null || !variants) {
22055 // An active track was specified so a corresponding group is expected. track === null
22056 // means no track is currently active so there is no corresponding group
22057 return null;
22058 }
22059
22060 return variants.filter(function (props) {
22061 return props.id === track.id;
22062 })[0] || null;
22063 };
22064};
22065var activeTrack = {
22066 /**
22067 * Returns a function used to get the active track of type provided
22068 *
22069 * @param {string} type
22070 * MediaGroup type
22071 * @param {Object} settings
22072 * Object containing required information for media groups
22073 * @return {Function}
22074 * Function that returns the active media track for the provided type. Returns
22075 * null if no track is active
22076 * @function activeTrack.AUDIO
22077 */
22078 AUDIO: function AUDIO(type, settings) {
22079 return function () {
22080 var tracks = settings.mediaTypes[type].tracks;
22081
22082 for (var id in tracks) {
22083 if (tracks[id].enabled) {
22084 return tracks[id];
22085 }
22086 }
22087
22088 return null;
22089 };
22090 },
22091
22092 /**
22093 * Returns a function used to get the active track of type provided
22094 *
22095 * @param {string} type
22096 * MediaGroup type
22097 * @param {Object} settings
22098 * Object containing required information for media groups
22099 * @return {Function}
22100 * Function that returns the active media track for the provided type. Returns
22101 * null if no track is active
22102 * @function activeTrack.SUBTITLES
22103 */
22104 SUBTITLES: function SUBTITLES(type, settings) {
22105 return function () {
22106 var tracks = settings.mediaTypes[type].tracks;
22107
22108 for (var id in tracks) {
22109 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
22110 return tracks[id];
22111 }
22112 }
22113
22114 return null;
22115 };
22116 }
22117};
22118var getActiveGroup = function getActiveGroup(type, _ref) {
22119 var mediaTypes = _ref.mediaTypes;
22120 return function () {
22121 var activeTrack_ = mediaTypes[type].activeTrack();
22122
22123 if (!activeTrack_) {
22124 return null;
22125 }
22126
22127 return mediaTypes[type].activeGroup(activeTrack_);
22128 };
22129};
22130/**
22131 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
22132 * Closed-Captions) specified in the master manifest.
22133 *
22134 * @param {Object} settings
22135 * Object containing required information for setting up the media groups
22136 * @param {Tech} settings.tech
22137 * The tech of the player
22138 * @param {Object} settings.requestOptions
22139 * XHR request options used by the segment loaders
22140 * @param {PlaylistLoader} settings.masterPlaylistLoader
22141 * PlaylistLoader for the master source
22142 * @param {VhsHandler} settings.vhs
22143 * VHS SourceHandler
22144 * @param {Object} settings.master
22145 * The parsed master manifest
22146 * @param {Object} settings.mediaTypes
22147 * Object to store the loaders, tracks, and utility methods for each media type
22148 * @param {Function} settings.blacklistCurrentPlaylist
22149 * Blacklists the current rendition and forces a rendition switch.
22150 * @function setupMediaGroups
22151 */
22152
22153var setupMediaGroups = function setupMediaGroups(settings) {
22154 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
22155 initialize[type](type, settings);
22156 });
22157 var mediaTypes = settings.mediaTypes,
22158 masterPlaylistLoader = settings.masterPlaylistLoader,
22159 tech = settings.tech,
22160 vhs = settings.vhs,
22161 _settings$segmentLoad3 = settings.segmentLoaders,
22162 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
22163 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
22164
22165 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22166 mediaTypes[type].activeGroup = activeGroup(type, settings);
22167 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
22168 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
22169 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
22170 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
22171 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
22172 }); // DO NOT enable the default subtitle or caption track.
22173 // DO enable the default audio track
22174
22175 var audioGroup = mediaTypes.AUDIO.activeGroup();
22176
22177 if (audioGroup) {
22178 var groupId = (audioGroup.filter(function (group) {
22179 return group.default;
22180 })[0] || audioGroup[0]).id;
22181 mediaTypes.AUDIO.tracks[groupId].enabled = true;
22182 mediaTypes.AUDIO.onGroupChanged();
22183 mediaTypes.AUDIO.onTrackChanged();
22184 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
22185 // track is changed, but needs to be handled here since the track may not be considered
22186 // changed on the first call to onTrackChanged
22187
22188 if (!activeAudioGroup.playlistLoader) {
22189 // either audio is muxed with video or the stream is audio only
22190 mainSegmentLoader.setAudio(true);
22191 } else {
22192 // audio is demuxed
22193 mainSegmentLoader.setAudio(false);
22194 audioSegmentLoader.setAudio(true);
22195 }
22196 }
22197
22198 masterPlaylistLoader.on('mediachange', function () {
22199 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22200 return mediaTypes[type].onGroupChanged();
22201 });
22202 });
22203 masterPlaylistLoader.on('mediachanging', function () {
22204 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22205 return mediaTypes[type].onGroupChanging();
22206 });
22207 }); // custom audio track change event handler for usage event
22208
22209 var onAudioTrackChanged = function onAudioTrackChanged() {
22210 mediaTypes.AUDIO.onTrackChanged();
22211 tech.trigger({
22212 type: 'usage',
22213 name: 'vhs-audio-change'
22214 });
22215 tech.trigger({
22216 type: 'usage',
22217 name: 'hls-audio-change'
22218 });
22219 };
22220
22221 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
22222 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
22223 vhs.on('dispose', function () {
22224 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
22225 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
22226 }); // clear existing audio tracks and add the ones we just created
22227
22228 tech.clearTracks('audio');
22229
22230 for (var id in mediaTypes.AUDIO.tracks) {
22231 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
22232 }
22233};
22234/**
22235 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
22236 * media type
22237 *
22238 * @return {Object}
22239 * Object to store the loaders, tracks, and utility methods for each media type
22240 * @function createMediaTypes
22241 */
22242
22243var createMediaTypes = function createMediaTypes() {
22244 var mediaTypes = {};
22245 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
22246 mediaTypes[type] = {
22247 groups: {},
22248 tracks: {},
22249 activePlaylistLoader: null,
22250 activeGroup: noop,
22251 activeTrack: noop,
22252 getActiveGroup: noop,
22253 onGroupChanged: noop,
22254 onTrackChanged: noop,
22255 lastTrack_: null,
22256 logger_: logger("MediaGroups[" + type + "]")
22257 };
22258 });
22259 return mediaTypes;
22260};
22261
22262var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
22263var Vhs$1; // SegmentLoader stats that need to have each loader's
22264// values summed to calculate the final value
22265
22266var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
22267
22268var sumLoaderStat = function sumLoaderStat(stat) {
22269 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
22270};
22271
22272var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
22273 var currentPlaylist = _ref.currentPlaylist,
22274 buffered = _ref.buffered,
22275 currentTime = _ref.currentTime,
22276 nextPlaylist = _ref.nextPlaylist,
22277 bufferLowWaterLine = _ref.bufferLowWaterLine,
22278 bufferHighWaterLine = _ref.bufferHighWaterLine,
22279 duration = _ref.duration,
22280 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
22281 log = _ref.log;
22282
22283 // we have no other playlist to switch to
22284 if (!nextPlaylist) {
22285 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
22286 return false;
22287 }
22288
22289 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
22290
22291 if (!currentPlaylist) {
22292 log(sharedLogLine + " as current playlist is not set");
22293 return true;
22294 } // no need to switch if playlist is the same
22295
22296
22297 if (nextPlaylist.id === currentPlaylist.id) {
22298 return false;
22299 } // determine if current time is in a buffered range.
22300
22301
22302 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
22303 // This is because in LIVE, the player plays 3 segments from the end of the
22304 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
22305 // in those segments, a viewer will never experience a rendition upswitch.
22306
22307 if (!currentPlaylist.endList) {
22308 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
22309 // doubles the time to first playback.
22310 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
22311 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
22312 return false;
22313 }
22314
22315 log(sharedLogLine + " as current playlist is live");
22316 return true;
22317 }
22318
22319 var forwardBuffer = timeAheadOf(buffered, currentTime);
22320 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
22321 // duration is below the max potential low water line
22322
22323 if (duration < maxBufferLowWaterLine) {
22324 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
22325 return true;
22326 }
22327
22328 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
22329 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
22330 // we can switch down
22331
22332 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
22333 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
22334
22335 if (experimentalBufferBasedABR) {
22336 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
22337 }
22338
22339 log(logLine);
22340 return true;
22341 } // and if our buffer is higher than the low water line,
22342 // we can switch up
22343
22344
22345 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
22346 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
22347
22348 if (experimentalBufferBasedABR) {
22349 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
22350 }
22351
22352 log(_logLine);
22353 return true;
22354 }
22355
22356 log("not " + sharedLogLine + " as no switching criteria met");
22357 return false;
22358};
22359/**
22360 * the master playlist controller controller all interactons
22361 * between playlists and segmentloaders. At this time this mainly
22362 * involves a master playlist and a series of audio playlists
22363 * if they are available
22364 *
22365 * @class MasterPlaylistController
22366 * @extends videojs.EventTarget
22367 */
22368
22369
22370var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
22371 _inheritsLoose__default["default"](MasterPlaylistController, _videojs$EventTarget);
22372
22373 function MasterPlaylistController(options) {
22374 var _this;
22375
22376 _this = _videojs$EventTarget.call(this) || this;
22377 var src = options.src,
22378 handleManifestRedirects = options.handleManifestRedirects,
22379 withCredentials = options.withCredentials,
22380 tech = options.tech,
22381 bandwidth = options.bandwidth,
22382 externVhs = options.externVhs,
22383 useCueTags = options.useCueTags,
22384 blacklistDuration = options.blacklistDuration,
22385 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
22386 sourceType = options.sourceType,
22387 cacheEncryptionKeys = options.cacheEncryptionKeys,
22388 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
22389 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
22390 captionServices = options.captionServices;
22391
22392 if (!src) {
22393 throw new Error('A non-empty playlist URL or JSON manifest string is required');
22394 }
22395
22396 var maxPlaylistRetries = options.maxPlaylistRetries;
22397
22398 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
22399 maxPlaylistRetries = Infinity;
22400 }
22401
22402 Vhs$1 = externVhs;
22403 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
22404 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
22405 _this.withCredentials = withCredentials;
22406 _this.tech_ = tech;
22407 _this.vhs_ = tech.vhs;
22408 _this.sourceType_ = sourceType;
22409 _this.useCueTags_ = useCueTags;
22410 _this.blacklistDuration = blacklistDuration;
22411 _this.maxPlaylistRetries = maxPlaylistRetries;
22412 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
22413
22414 if (_this.useCueTags_) {
22415 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
22416 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
22417 }
22418
22419 _this.requestOptions_ = {
22420 withCredentials: withCredentials,
22421 handleManifestRedirects: handleManifestRedirects,
22422 maxPlaylistRetries: maxPlaylistRetries,
22423 timeout: null
22424 };
22425
22426 _this.on('error', _this.pauseLoading);
22427
22428 _this.mediaTypes_ = createMediaTypes();
22429 _this.mediaSource = new window__default["default"].MediaSource();
22430 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized__default["default"](_this));
22431 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized__default["default"](_this));
22432 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized__default["default"](_this));
22433
22434 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
22435
22436
22437 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
22438
22439 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
22440 // everything, and the MediaSource should not be detached without a proper disposal
22441
22442
22443 _this.seekable_ = videojs__default["default"].createTimeRanges();
22444 _this.hasPlayed_ = false;
22445 _this.syncController_ = new SyncController(options);
22446 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
22447 kind: 'metadata',
22448 label: 'segment-metadata'
22449 }, false).track;
22450 _this.decrypter_ = new Decrypter();
22451 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
22452 _this.inbandTextTracks_ = {};
22453 _this.timelineChangeController_ = new TimelineChangeController();
22454 var segmentLoaderSettings = {
22455 vhs: _this.vhs_,
22456 parse708captions: options.parse708captions,
22457 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
22458 captionServices: captionServices,
22459 mediaSource: _this.mediaSource,
22460 currentTime: _this.tech_.currentTime.bind(_this.tech_),
22461 seekable: function seekable() {
22462 return _this.seekable();
22463 },
22464 seeking: function seeking() {
22465 return _this.tech_.seeking();
22466 },
22467 duration: function duration() {
22468 return _this.duration();
22469 },
22470 hasPlayed: function hasPlayed() {
22471 return _this.hasPlayed_;
22472 },
22473 goalBufferLength: function goalBufferLength() {
22474 return _this.goalBufferLength();
22475 },
22476 bandwidth: bandwidth,
22477 syncController: _this.syncController_,
22478 decrypter: _this.decrypter_,
22479 sourceType: _this.sourceType_,
22480 inbandTextTracks: _this.inbandTextTracks_,
22481 cacheEncryptionKeys: cacheEncryptionKeys,
22482 sourceUpdater: _this.sourceUpdater_,
22483 timelineChangeController: _this.timelineChangeController_,
22484 experimentalExactManifestTimings: options.experimentalExactManifestTimings
22485 }; // The source type check not only determines whether a special DASH playlist loader
22486 // should be used, but also covers the case where the provided src is a vhs-json
22487 // manifest object (instead of a URL). In the case of vhs-json, the default
22488 // PlaylistLoader should be used.
22489
22490 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
22491
22492 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
22493 // combined audio/video or just video when alternate audio track is selected
22494
22495
22496 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
22497 segmentMetadataTrack: _this.segmentMetadataTrack_,
22498 loaderType: 'main'
22499 }), options); // alternate audio track
22500
22501 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
22502 loaderType: 'audio'
22503 }), options);
22504 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
22505 loaderType: 'vtt',
22506 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks,
22507 loadVttJs: function loadVttJs() {
22508 return new Promise(function (resolve, reject) {
22509 function onLoad() {
22510 tech.off('vttjserror', onError);
22511 resolve();
22512 }
22513
22514 function onError() {
22515 tech.off('vttjsloaded', onLoad);
22516 reject();
22517 }
22518
22519 tech.one('vttjsloaded', onLoad);
22520 tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
22521
22522 tech.addWebVttScript_();
22523 });
22524 }
22525 }), options);
22526
22527 _this.setupSegmentLoaderListeners_();
22528
22529 if (_this.experimentalBufferBasedABR) {
22530 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
22531 return _this.startABRTimer_();
22532 });
22533
22534 _this.tech_.on('pause', function () {
22535 return _this.stopABRTimer_();
22536 });
22537
22538 _this.tech_.on('play', function () {
22539 return _this.startABRTimer_();
22540 });
22541 } // Create SegmentLoader stat-getters
22542 // mediaRequests_
22543 // mediaRequestsAborted_
22544 // mediaRequestsTimedout_
22545 // mediaRequestsErrored_
22546 // mediaTransferDuration_
22547 // mediaBytesTransferred_
22548 // mediaAppends_
22549
22550
22551 loaderStats.forEach(function (stat) {
22552 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized__default["default"](_this), stat);
22553 });
22554 _this.logger_ = logger('MPC');
22555 _this.triggeredFmp4Usage = false;
22556
22557 if (_this.tech_.preload() === 'none') {
22558 _this.loadOnPlay_ = function () {
22559 _this.loadOnPlay_ = null;
22560
22561 _this.masterPlaylistLoader_.load();
22562 };
22563
22564 _this.tech_.one('play', _this.loadOnPlay_);
22565 } else {
22566 _this.masterPlaylistLoader_.load();
22567 }
22568
22569 _this.timeToLoadedData__ = -1;
22570 _this.mainAppendsToLoadedData__ = -1;
22571 _this.audioAppendsToLoadedData__ = -1;
22572 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
22573
22574 _this.tech_.one(event, function () {
22575 var timeToLoadedDataStart = Date.now();
22576
22577 _this.tech_.one('loadeddata', function () {
22578 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
22579 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
22580 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
22581 });
22582 });
22583
22584 return _this;
22585 }
22586
22587 var _proto = MasterPlaylistController.prototype;
22588
22589 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
22590 return this.mainAppendsToLoadedData__;
22591 };
22592
22593 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
22594 return this.audioAppendsToLoadedData__;
22595 };
22596
22597 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
22598 var main = this.mainAppendsToLoadedData_();
22599 var audio = this.audioAppendsToLoadedData_();
22600
22601 if (main === -1 || audio === -1) {
22602 return -1;
22603 }
22604
22605 return main + audio;
22606 };
22607
22608 _proto.timeToLoadedData_ = function timeToLoadedData_() {
22609 return this.timeToLoadedData__;
22610 }
22611 /**
22612 * Run selectPlaylist and switch to the new playlist if we should
22613 *
22614 * @param {string} [reason=abr] a reason for why the ABR check is made
22615 * @private
22616 */
22617 ;
22618
22619 _proto.checkABR_ = function checkABR_(reason) {
22620 if (reason === void 0) {
22621 reason = 'abr';
22622 }
22623
22624 var nextPlaylist = this.selectPlaylist();
22625
22626 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
22627 this.switchMedia_(nextPlaylist, reason);
22628 }
22629 };
22630
22631 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
22632 var oldMedia = this.media();
22633 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
22634 var newId = playlist.id || playlist.uri;
22635
22636 if (oldId && oldId !== newId) {
22637 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
22638 this.tech_.trigger({
22639 type: 'usage',
22640 name: "vhs-rendition-change-" + cause
22641 });
22642 }
22643
22644 this.masterPlaylistLoader_.media(playlist, delay);
22645 }
22646 /**
22647 * Start a timer that periodically calls checkABR_
22648 *
22649 * @private
22650 */
22651 ;
22652
22653 _proto.startABRTimer_ = function startABRTimer_() {
22654 var _this2 = this;
22655
22656 this.stopABRTimer_();
22657 this.abrTimer_ = window__default["default"].setInterval(function () {
22658 return _this2.checkABR_();
22659 }, 250);
22660 }
22661 /**
22662 * Stop the timer that periodically calls checkABR_
22663 *
22664 * @private
22665 */
22666 ;
22667
22668 _proto.stopABRTimer_ = function stopABRTimer_() {
22669 // if we're scrubbing, we don't need to pause.
22670 // This getter will be added to Video.js in version 7.11.
22671 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
22672 return;
22673 }
22674
22675 window__default["default"].clearInterval(this.abrTimer_);
22676 this.abrTimer_ = null;
22677 }
22678 /**
22679 * Get a list of playlists for the currently selected audio playlist
22680 *
22681 * @return {Array} the array of audio playlists
22682 */
22683 ;
22684
22685 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
22686 var master = this.master();
22687 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
22688 // assume that the audio tracks are contained in masters
22689 // playlist array, use that or an empty array.
22690
22691 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
22692 return defaultPlaylists;
22693 }
22694
22695 var AUDIO = master.mediaGroups.AUDIO;
22696 var groupKeys = Object.keys(AUDIO);
22697 var track; // get the current active track
22698
22699 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
22700 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
22701 } else {
22702 // default group is `main` or just the first group.
22703 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
22704
22705 for (var label in defaultGroup) {
22706 if (defaultGroup[label].default) {
22707 track = {
22708 label: label
22709 };
22710 break;
22711 }
22712 }
22713 } // no active track no playlists.
22714
22715
22716 if (!track) {
22717 return defaultPlaylists;
22718 }
22719
22720 var playlists = []; // get all of the playlists that are possible for the
22721 // active track.
22722
22723 for (var group in AUDIO) {
22724 if (AUDIO[group][track.label]) {
22725 var properties = AUDIO[group][track.label];
22726
22727 if (properties.playlists && properties.playlists.length) {
22728 playlists.push.apply(playlists, properties.playlists);
22729 } else if (properties.uri) {
22730 playlists.push(properties);
22731 } else if (master.playlists.length) {
22732 // if an audio group does not have a uri
22733 // see if we have main playlists that use it as a group.
22734 // if we do then add those to the playlists list.
22735 for (var i = 0; i < master.playlists.length; i++) {
22736 var playlist = master.playlists[i];
22737
22738 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
22739 playlists.push(playlist);
22740 }
22741 }
22742 }
22743 }
22744 }
22745
22746 if (!playlists.length) {
22747 return defaultPlaylists;
22748 }
22749
22750 return playlists;
22751 }
22752 /**
22753 * Register event handlers on the master playlist loader. A helper
22754 * function for construction time.
22755 *
22756 * @private
22757 */
22758 ;
22759
22760 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
22761 var _this3 = this;
22762
22763 this.masterPlaylistLoader_.on('loadedmetadata', function () {
22764 var media = _this3.masterPlaylistLoader_.media();
22765
22766 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22767 // timeout the request.
22768
22769 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22770 _this3.requestOptions_.timeout = 0;
22771 } else {
22772 _this3.requestOptions_.timeout = requestTimeout;
22773 } // if this isn't a live video and preload permits, start
22774 // downloading segments
22775
22776
22777 if (media.endList && _this3.tech_.preload() !== 'none') {
22778 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22779
22780 _this3.mainSegmentLoader_.load();
22781 }
22782
22783 setupMediaGroups({
22784 sourceType: _this3.sourceType_,
22785 segmentLoaders: {
22786 AUDIO: _this3.audioSegmentLoader_,
22787 SUBTITLES: _this3.subtitleSegmentLoader_,
22788 main: _this3.mainSegmentLoader_
22789 },
22790 tech: _this3.tech_,
22791 requestOptions: _this3.requestOptions_,
22792 masterPlaylistLoader: _this3.masterPlaylistLoader_,
22793 vhs: _this3.vhs_,
22794 master: _this3.master(),
22795 mediaTypes: _this3.mediaTypes_,
22796 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
22797 });
22798
22799 _this3.triggerPresenceUsage_(_this3.master(), media);
22800
22801 _this3.setupFirstPlay();
22802
22803 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
22804 _this3.trigger('selectedinitialmedia');
22805 } else {
22806 // We must wait for the active audio playlist loader to
22807 // finish setting up before triggering this event so the
22808 // representations API and EME setup is correct
22809 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
22810 _this3.trigger('selectedinitialmedia');
22811 });
22812 }
22813 });
22814 this.masterPlaylistLoader_.on('loadedplaylist', function () {
22815 if (_this3.loadOnPlay_) {
22816 _this3.tech_.off('play', _this3.loadOnPlay_);
22817 }
22818
22819 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
22820
22821 if (!updatedPlaylist) {
22822 // exclude any variants that are not supported by the browser before selecting
22823 // an initial media as the playlist selectors do not consider browser support
22824 _this3.excludeUnsupportedVariants_();
22825
22826 var selectedMedia;
22827
22828 if (_this3.enableLowInitialPlaylist) {
22829 selectedMedia = _this3.selectInitialPlaylist();
22830 }
22831
22832 if (!selectedMedia) {
22833 selectedMedia = _this3.selectPlaylist();
22834 }
22835
22836 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
22837 return;
22838 }
22839
22840 _this3.initialMedia_ = selectedMedia;
22841
22842 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
22843 // fire again since the playlist will be requested. In the case of vhs-json
22844 // (where the manifest object is provided as the source), when the media
22845 // playlist's `segments` list is already available, a media playlist won't be
22846 // requested, and loadedplaylist won't fire again, so the playlist handler must be
22847 // called on its own here.
22848
22849
22850 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
22851
22852 if (!haveJsonSource) {
22853 return;
22854 }
22855
22856 updatedPlaylist = _this3.initialMedia_;
22857 }
22858
22859 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
22860 });
22861 this.masterPlaylistLoader_.on('error', function () {
22862 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
22863 });
22864 this.masterPlaylistLoader_.on('mediachanging', function () {
22865 _this3.mainSegmentLoader_.abort();
22866
22867 _this3.mainSegmentLoader_.pause();
22868 });
22869 this.masterPlaylistLoader_.on('mediachange', function () {
22870 var media = _this3.masterPlaylistLoader_.media();
22871
22872 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22873 // timeout the request.
22874
22875 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22876 _this3.requestOptions_.timeout = 0;
22877 } else {
22878 _this3.requestOptions_.timeout = requestTimeout;
22879 }
22880
22881 _this3.masterPlaylistLoader_.load(); // TODO: Create a new event on the PlaylistLoader that signals
22882 // that the segments have changed in some way and use that to
22883 // update the SegmentLoader instead of doing it twice here and
22884 // on `loadedplaylist`
22885
22886
22887 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22888
22889 _this3.mainSegmentLoader_.load();
22890
22891 _this3.tech_.trigger({
22892 type: 'mediachange',
22893 bubbles: true
22894 });
22895 });
22896 this.masterPlaylistLoader_.on('playlistunchanged', function () {
22897 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
22898 // excluded for not-changing. We likely just have a really slowly updating
22899 // playlist.
22900
22901
22902 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
22903 return;
22904 }
22905
22906 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
22907
22908 if (playlistOutdated) {
22909 // Playlist has stopped updating and we're stuck at its end. Try to
22910 // blacklist it and switch to another playlist in the hope that that
22911 // one is updating (and give the player a chance to re-adjust to the
22912 // safe live point).
22913 _this3.blacklistCurrentPlaylist({
22914 message: 'Playlist no longer updating.',
22915 reason: 'playlist-unchanged'
22916 }); // useful for monitoring QoS
22917
22918
22919 _this3.tech_.trigger('playliststuck');
22920 }
22921 });
22922 this.masterPlaylistLoader_.on('renditiondisabled', function () {
22923 _this3.tech_.trigger({
22924 type: 'usage',
22925 name: 'vhs-rendition-disabled'
22926 });
22927
22928 _this3.tech_.trigger({
22929 type: 'usage',
22930 name: 'hls-rendition-disabled'
22931 });
22932 });
22933 this.masterPlaylistLoader_.on('renditionenabled', function () {
22934 _this3.tech_.trigger({
22935 type: 'usage',
22936 name: 'vhs-rendition-enabled'
22937 });
22938
22939 _this3.tech_.trigger({
22940 type: 'usage',
22941 name: 'hls-rendition-enabled'
22942 });
22943 });
22944 }
22945 /**
22946 * Given an updated media playlist (whether it was loaded for the first time, or
22947 * refreshed for live playlists), update any relevant properties and state to reflect
22948 * changes in the media that should be accounted for (e.g., cues and duration).
22949 *
22950 * @param {Object} updatedPlaylist the updated media playlist object
22951 *
22952 * @private
22953 */
22954 ;
22955
22956 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
22957 if (this.useCueTags_) {
22958 this.updateAdCues_(updatedPlaylist);
22959 } // TODO: Create a new event on the PlaylistLoader that signals
22960 // that the segments have changed in some way and use that to
22961 // update the SegmentLoader instead of doing it twice here and
22962 // on `mediachange`
22963
22964
22965 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
22966 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
22967 // as it is possible that it was temporarily stopped while waiting for
22968 // a playlist (e.g., in case the playlist errored and we re-requested it).
22969
22970 if (!this.tech_.paused()) {
22971 this.mainSegmentLoader_.load();
22972
22973 if (this.audioSegmentLoader_) {
22974 this.audioSegmentLoader_.load();
22975 }
22976 }
22977 }
22978 /**
22979 * A helper function for triggerring presence usage events once per source
22980 *
22981 * @private
22982 */
22983 ;
22984
22985 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
22986 var mediaGroups = master.mediaGroups || {};
22987 var defaultDemuxed = true;
22988 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
22989
22990 for (var mediaGroup in mediaGroups.AUDIO) {
22991 for (var label in mediaGroups.AUDIO[mediaGroup]) {
22992 var properties = mediaGroups.AUDIO[mediaGroup][label];
22993
22994 if (!properties.uri) {
22995 defaultDemuxed = false;
22996 }
22997 }
22998 }
22999
23000 if (defaultDemuxed) {
23001 this.tech_.trigger({
23002 type: 'usage',
23003 name: 'vhs-demuxed'
23004 });
23005 this.tech_.trigger({
23006 type: 'usage',
23007 name: 'hls-demuxed'
23008 });
23009 }
23010
23011 if (Object.keys(mediaGroups.SUBTITLES).length) {
23012 this.tech_.trigger({
23013 type: 'usage',
23014 name: 'vhs-webvtt'
23015 });
23016 this.tech_.trigger({
23017 type: 'usage',
23018 name: 'hls-webvtt'
23019 });
23020 }
23021
23022 if (Vhs$1.Playlist.isAes(media)) {
23023 this.tech_.trigger({
23024 type: 'usage',
23025 name: 'vhs-aes'
23026 });
23027 this.tech_.trigger({
23028 type: 'usage',
23029 name: 'hls-aes'
23030 });
23031 }
23032
23033 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
23034 this.tech_.trigger({
23035 type: 'usage',
23036 name: 'vhs-alternate-audio'
23037 });
23038 this.tech_.trigger({
23039 type: 'usage',
23040 name: 'hls-alternate-audio'
23041 });
23042 }
23043
23044 if (this.useCueTags_) {
23045 this.tech_.trigger({
23046 type: 'usage',
23047 name: 'vhs-playlist-cue-tags'
23048 });
23049 this.tech_.trigger({
23050 type: 'usage',
23051 name: 'hls-playlist-cue-tags'
23052 });
23053 }
23054 };
23055
23056 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
23057 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
23058 var currentTime = this.tech_.currentTime();
23059 var bufferLowWaterLine = this.bufferLowWaterLine();
23060 var bufferHighWaterLine = this.bufferHighWaterLine();
23061 var buffered = this.tech_.buffered();
23062 return shouldSwitchToMedia({
23063 buffered: buffered,
23064 currentTime: currentTime,
23065 currentPlaylist: currentPlaylist,
23066 nextPlaylist: nextPlaylist,
23067 bufferLowWaterLine: bufferLowWaterLine,
23068 bufferHighWaterLine: bufferHighWaterLine,
23069 duration: this.duration(),
23070 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
23071 log: this.logger_
23072 });
23073 }
23074 /**
23075 * Register event handlers on the segment loaders. A helper function
23076 * for construction time.
23077 *
23078 * @private
23079 */
23080 ;
23081
23082 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
23083 var _this4 = this;
23084
23085 this.mainSegmentLoader_.on('bandwidthupdate', function () {
23086 // Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
23087 // useful to check to see if a rendition switch should be made.
23088 _this4.checkABR_('bandwidthupdate');
23089
23090 _this4.tech_.trigger('bandwidthupdate');
23091 });
23092 this.mainSegmentLoader_.on('timeout', function () {
23093 if (_this4.experimentalBufferBasedABR) {
23094 // If a rendition change is needed, then it would've be done on `bandwidthupdate`.
23095 // Here the only consideration is that for buffer based ABR there's no guarantee
23096 // of an immediate switch (since the bandwidth is averaged with a timeout
23097 // bandwidth value of 1), so force a load on the segment loader to keep it going.
23098 _this4.mainSegmentLoader_.load();
23099 }
23100 }); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
23101 // based ABR.
23102
23103 if (!this.experimentalBufferBasedABR) {
23104 this.mainSegmentLoader_.on('progress', function () {
23105 _this4.trigger('progress');
23106 });
23107 }
23108
23109 this.mainSegmentLoader_.on('error', function () {
23110 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
23111 });
23112 this.mainSegmentLoader_.on('appenderror', function () {
23113 _this4.error = _this4.mainSegmentLoader_.error_;
23114
23115 _this4.trigger('error');
23116 });
23117 this.mainSegmentLoader_.on('syncinfoupdate', function () {
23118 _this4.onSyncInfoUpdate_();
23119 });
23120 this.mainSegmentLoader_.on('timestampoffset', function () {
23121 _this4.tech_.trigger({
23122 type: 'usage',
23123 name: 'vhs-timestamp-offset'
23124 });
23125
23126 _this4.tech_.trigger({
23127 type: 'usage',
23128 name: 'hls-timestamp-offset'
23129 });
23130 });
23131 this.audioSegmentLoader_.on('syncinfoupdate', function () {
23132 _this4.onSyncInfoUpdate_();
23133 });
23134 this.audioSegmentLoader_.on('appenderror', function () {
23135 _this4.error = _this4.audioSegmentLoader_.error_;
23136
23137 _this4.trigger('error');
23138 });
23139 this.mainSegmentLoader_.on('ended', function () {
23140 _this4.logger_('main segment loader ended');
23141
23142 _this4.onEndOfStream();
23143 });
23144 this.mainSegmentLoader_.on('earlyabort', function (event) {
23145 // never try to early abort with the new ABR algorithm
23146 if (_this4.experimentalBufferBasedABR) {
23147 return;
23148 }
23149
23150 _this4.delegateLoaders_('all', ['abort']);
23151
23152 _this4.blacklistCurrentPlaylist({
23153 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
23154 }, ABORT_EARLY_BLACKLIST_SECONDS);
23155 });
23156
23157 var updateCodecs = function updateCodecs() {
23158 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
23159 return _this4.tryToCreateSourceBuffers_();
23160 }
23161
23162 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
23163
23164
23165 if (!codecs) {
23166 return;
23167 }
23168
23169 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
23170 };
23171
23172 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
23173 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
23174 this.mainSegmentLoader_.on('fmp4', function () {
23175 if (!_this4.triggeredFmp4Usage) {
23176 _this4.tech_.trigger({
23177 type: 'usage',
23178 name: 'vhs-fmp4'
23179 });
23180
23181 _this4.tech_.trigger({
23182 type: 'usage',
23183 name: 'hls-fmp4'
23184 });
23185
23186 _this4.triggeredFmp4Usage = true;
23187 }
23188 });
23189 this.audioSegmentLoader_.on('fmp4', function () {
23190 if (!_this4.triggeredFmp4Usage) {
23191 _this4.tech_.trigger({
23192 type: 'usage',
23193 name: 'vhs-fmp4'
23194 });
23195
23196 _this4.tech_.trigger({
23197 type: 'usage',
23198 name: 'hls-fmp4'
23199 });
23200
23201 _this4.triggeredFmp4Usage = true;
23202 }
23203 });
23204 this.audioSegmentLoader_.on('ended', function () {
23205 _this4.logger_('audioSegmentLoader ended');
23206
23207 _this4.onEndOfStream();
23208 });
23209 };
23210
23211 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
23212 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
23213 }
23214 /**
23215 * Call load on our SegmentLoaders
23216 */
23217 ;
23218
23219 _proto.load = function load() {
23220 this.mainSegmentLoader_.load();
23221
23222 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23223 this.audioSegmentLoader_.load();
23224 }
23225
23226 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23227 this.subtitleSegmentLoader_.load();
23228 }
23229 }
23230 /**
23231 * Re-tune playback quality level for the current player
23232 * conditions without performing destructive actions, like
23233 * removing already buffered content
23234 *
23235 * @private
23236 * @deprecated
23237 */
23238 ;
23239
23240 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
23241 if (media === void 0) {
23242 media = this.selectPlaylist();
23243 }
23244
23245 this.fastQualityChange_(media);
23246 }
23247 /**
23248 * Re-tune playback quality level for the current player
23249 * conditions. This method will perform destructive actions like removing
23250 * already buffered content in order to readjust the currently active
23251 * playlist quickly. This is good for manual quality changes
23252 *
23253 * @private
23254 */
23255 ;
23256
23257 _proto.fastQualityChange_ = function fastQualityChange_(media) {
23258 var _this5 = this;
23259
23260 if (media === void 0) {
23261 media = this.selectPlaylist();
23262 }
23263
23264 if (media === this.masterPlaylistLoader_.media()) {
23265 this.logger_('skipping fastQualityChange because new media is same as old');
23266 return;
23267 }
23268
23269 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
23270 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
23271 // ahead is roughly the minimum that will accomplish this across a variety of content
23272 // in IE and Edge, but seeking in place is sufficient on all other browsers)
23273 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
23274 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
23275
23276 this.mainSegmentLoader_.resetEverything(function () {
23277 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
23278 // from the previously enabled rendition to load before the new playlist has finished loading
23279 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
23280 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
23281 } else {
23282 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
23283 }
23284 }); // don't need to reset audio as it is reset when media changes
23285 }
23286 /**
23287 * Begin playback.
23288 */
23289 ;
23290
23291 _proto.play = function play() {
23292 if (this.setupFirstPlay()) {
23293 return;
23294 }
23295
23296 if (this.tech_.ended()) {
23297 this.tech_.setCurrentTime(0);
23298 }
23299
23300 if (this.hasPlayed_) {
23301 this.load();
23302 }
23303
23304 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
23305 // seek forward to the live point
23306
23307 if (this.tech_.duration() === Infinity) {
23308 if (this.tech_.currentTime() < seekable.start(0)) {
23309 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
23310 }
23311 }
23312 }
23313 /**
23314 * Seek to the latest media position if this is a live video and the
23315 * player and video are loaded and initialized.
23316 */
23317 ;
23318
23319 _proto.setupFirstPlay = function setupFirstPlay() {
23320 var _this6 = this;
23321
23322 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
23323 // If 1) there is no active media
23324 // 2) the player is paused
23325 // 3) the first play has already been setup
23326 // then exit early
23327
23328 if (!media || this.tech_.paused() || this.hasPlayed_) {
23329 return false;
23330 } // when the video is a live stream
23331
23332
23333 if (!media.endList) {
23334 var seekable = this.seekable();
23335
23336 if (!seekable.length) {
23337 // without a seekable range, the player cannot seek to begin buffering at the live
23338 // point
23339 return false;
23340 }
23341
23342 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
23343 // IE11 throws an InvalidStateError if you try to set currentTime while the
23344 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
23345 this.tech_.one('loadedmetadata', function () {
23346 _this6.trigger('firstplay');
23347
23348 _this6.tech_.setCurrentTime(seekable.end(0));
23349
23350 _this6.hasPlayed_ = true;
23351 });
23352 return false;
23353 } // trigger firstplay to inform the source handler to ignore the next seek event
23354
23355
23356 this.trigger('firstplay'); // seek to the live point
23357
23358 this.tech_.setCurrentTime(seekable.end(0));
23359 }
23360
23361 this.hasPlayed_ = true; // we can begin loading now that everything is ready
23362
23363 this.load();
23364 return true;
23365 }
23366 /**
23367 * handle the sourceopen event on the MediaSource
23368 *
23369 * @private
23370 */
23371 ;
23372
23373 _proto.handleSourceOpen_ = function handleSourceOpen_() {
23374 // Only attempt to create the source buffer if none already exist.
23375 // handleSourceOpen is also called when we are "re-opening" a source buffer
23376 // after `endOfStream` has been called (in response to a seek for instance)
23377 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
23378 // code in video.js but is required because play() must be invoked
23379 // *after* the media source has opened.
23380
23381 if (this.tech_.autoplay()) {
23382 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
23383 // on browsers which return a promise
23384
23385 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
23386 playPromise.then(null, function (e) {});
23387 }
23388 }
23389
23390 this.trigger('sourceopen');
23391 }
23392 /**
23393 * handle the sourceended event on the MediaSource
23394 *
23395 * @private
23396 */
23397 ;
23398
23399 _proto.handleSourceEnded_ = function handleSourceEnded_() {
23400 if (!this.inbandTextTracks_.metadataTrack_) {
23401 return;
23402 }
23403
23404 var cues = this.inbandTextTracks_.metadataTrack_.cues;
23405
23406 if (!cues || !cues.length) {
23407 return;
23408 }
23409
23410 var duration = this.duration();
23411 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
23412 }
23413 /**
23414 * handle the durationchange event on the MediaSource
23415 *
23416 * @private
23417 */
23418 ;
23419
23420 _proto.handleDurationChange_ = function handleDurationChange_() {
23421 this.tech_.trigger('durationchange');
23422 }
23423 /**
23424 * Calls endOfStream on the media source when all active stream types have called
23425 * endOfStream
23426 *
23427 * @param {string} streamType
23428 * Stream type of the segment loader that called endOfStream
23429 * @private
23430 */
23431 ;
23432
23433 _proto.onEndOfStream = function onEndOfStream() {
23434 var isEndOfStream = this.mainSegmentLoader_.ended_;
23435
23436 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23437 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
23438
23439 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
23440 // if we do not know if the main segment loader contains video yet or if we
23441 // definitively know the main segment loader contains video, then we need to wait
23442 // for both main and audio segment loaders to call endOfStream
23443 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
23444 } else {
23445 // otherwise just rely on the audio loader
23446 isEndOfStream = this.audioSegmentLoader_.ended_;
23447 }
23448 }
23449
23450 if (!isEndOfStream) {
23451 return;
23452 }
23453
23454 this.stopABRTimer_();
23455 this.sourceUpdater_.endOfStream();
23456 }
23457 /**
23458 * Check if a playlist has stopped being updated
23459 *
23460 * @param {Object} playlist the media playlist object
23461 * @return {boolean} whether the playlist has stopped being updated or not
23462 */
23463 ;
23464
23465 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
23466 var seekable = this.seekable();
23467
23468 if (!seekable.length) {
23469 // playlist doesn't have enough information to determine whether we are stuck
23470 return false;
23471 }
23472
23473 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
23474
23475 if (expired === null) {
23476 return false;
23477 } // does not use the safe live end to calculate playlist end, since we
23478 // don't want to say we are stuck while there is still content
23479
23480
23481 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
23482 var currentTime = this.tech_.currentTime();
23483 var buffered = this.tech_.buffered();
23484
23485 if (!buffered.length) {
23486 // return true if the playhead reached the absolute end of the playlist
23487 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
23488 }
23489
23490 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
23491 // end of playlist
23492
23493 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
23494 }
23495 /**
23496 * Blacklists a playlist when an error occurs for a set amount of time
23497 * making it unavailable for selection by the rendition selection algorithm
23498 * and then forces a new playlist (rendition) selection.
23499 *
23500 * @param {Object=} error an optional error that may include the playlist
23501 * to blacklist
23502 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
23503 * playlist
23504 */
23505 ;
23506
23507 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
23508 if (error === void 0) {
23509 error = {};
23510 }
23511
23512 // If the `error` was generated by the playlist loader, it will contain
23513 // the playlist we were trying to load (but failed) and that should be
23514 // blacklisted instead of the currently selected playlist which is likely
23515 // out-of-date in this scenario
23516 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
23517 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
23518 // trying to load the master OR while we were disposing of the tech
23519
23520 if (!currentPlaylist) {
23521 this.error = error;
23522
23523 if (this.mediaSource.readyState !== 'open') {
23524 this.trigger('error');
23525 } else {
23526 this.sourceUpdater_.endOfStream('network');
23527 }
23528
23529 return;
23530 }
23531
23532 currentPlaylist.playlistErrors_++;
23533 var playlists = this.masterPlaylistLoader_.master.playlists;
23534 var enabledPlaylists = playlists.filter(isEnabled);
23535 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
23536 // forever
23537
23538 if (playlists.length === 1 && blacklistDuration !== Infinity) {
23539 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
23540 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
23541
23542 return this.masterPlaylistLoader_.load(isFinalRendition);
23543 }
23544
23545 if (isFinalRendition) {
23546 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
23547 // it, instead of erring the player or retrying this playlist, clear out the current
23548 // blacklist. This allows other playlists to be attempted in case any have been
23549 // fixed.
23550 var reincluded = false;
23551 playlists.forEach(function (playlist) {
23552 // skip current playlist which is about to be blacklisted
23553 if (playlist === currentPlaylist) {
23554 return;
23555 }
23556
23557 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
23558
23559 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
23560 reincluded = true;
23561 delete playlist.excludeUntil;
23562 }
23563 });
23564
23565 if (reincluded) {
23566 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
23567 // playlist. This is needed for users relying on the retryplaylist event to catch a
23568 // case where the player might be stuck and looping through "dead" playlists.
23569
23570 this.tech_.trigger('retryplaylist');
23571 }
23572 } // Blacklist this playlist
23573
23574
23575 var excludeUntil;
23576
23577 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
23578 excludeUntil = Infinity;
23579 } else {
23580 excludeUntil = Date.now() + blacklistDuration * 1000;
23581 }
23582
23583 currentPlaylist.excludeUntil = excludeUntil;
23584
23585 if (error.reason) {
23586 currentPlaylist.lastExcludeReason_ = error.reason;
23587 }
23588
23589 this.tech_.trigger('blacklistplaylist');
23590 this.tech_.trigger({
23591 type: 'usage',
23592 name: 'vhs-rendition-blacklisted'
23593 });
23594 this.tech_.trigger({
23595 type: 'usage',
23596 name: 'hls-rendition-blacklisted'
23597 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
23598 // Would be something like media().id !=== currentPlaylist.id and we would need something
23599 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
23600 // from loading a new playlist on any blacklist.
23601 // Select a new playlist
23602
23603 var nextPlaylist = this.selectPlaylist();
23604
23605 if (!nextPlaylist) {
23606 this.error = 'Playback cannot continue. No available working or supported playlists.';
23607 this.trigger('error');
23608 return;
23609 }
23610
23611 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
23612 var errorMessage = error.message ? ' ' + error.message : '';
23613 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
23614
23615 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
23616 this.delegateLoaders_('audio', ['abort', 'pause']);
23617 } // if subtitle group changed reset subtitle loaders
23618
23619
23620 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
23621 this.delegateLoaders_('subtitle', ['abort', 'pause']);
23622 }
23623
23624 this.delegateLoaders_('main', ['abort', 'pause']);
23625 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
23626 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
23627
23628 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
23629 }
23630 /**
23631 * Pause all segment/playlist loaders
23632 */
23633 ;
23634
23635 _proto.pauseLoading = function pauseLoading() {
23636 this.delegateLoaders_('all', ['abort', 'pause']);
23637 this.stopABRTimer_();
23638 }
23639 /**
23640 * Call a set of functions in order on playlist loaders, segment loaders,
23641 * or both types of loaders.
23642 *
23643 * @param {string} filter
23644 * Filter loaders that should call fnNames using a string. Can be:
23645 * * all - run on all loaders
23646 * * audio - run on all audio loaders
23647 * * subtitle - run on all subtitle loaders
23648 * * main - run on the main/master loaders
23649 *
23650 * @param {Array|string} fnNames
23651 * A string or array of function names to call.
23652 */
23653 ;
23654
23655 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
23656 var _this7 = this;
23657
23658 var loaders = [];
23659 var dontFilterPlaylist = filter === 'all';
23660
23661 if (dontFilterPlaylist || filter === 'main') {
23662 loaders.push(this.masterPlaylistLoader_);
23663 }
23664
23665 var mediaTypes = [];
23666
23667 if (dontFilterPlaylist || filter === 'audio') {
23668 mediaTypes.push('AUDIO');
23669 }
23670
23671 if (dontFilterPlaylist || filter === 'subtitle') {
23672 mediaTypes.push('CLOSED-CAPTIONS');
23673 mediaTypes.push('SUBTITLES');
23674 }
23675
23676 mediaTypes.forEach(function (mediaType) {
23677 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
23678
23679 if (loader) {
23680 loaders.push(loader);
23681 }
23682 });
23683 ['main', 'audio', 'subtitle'].forEach(function (name) {
23684 var loader = _this7[name + "SegmentLoader_"];
23685
23686 if (loader && (filter === name || filter === 'all')) {
23687 loaders.push(loader);
23688 }
23689 });
23690 loaders.forEach(function (loader) {
23691 return fnNames.forEach(function (fnName) {
23692 if (typeof loader[fnName] === 'function') {
23693 loader[fnName]();
23694 }
23695 });
23696 });
23697 }
23698 /**
23699 * set the current time on all segment loaders
23700 *
23701 * @param {TimeRange} currentTime the current time to set
23702 * @return {TimeRange} the current time
23703 */
23704 ;
23705
23706 _proto.setCurrentTime = function setCurrentTime(currentTime) {
23707 var buffered = findRange(this.tech_.buffered(), currentTime);
23708
23709 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
23710 // return immediately if the metadata is not ready yet
23711 return 0;
23712 } // it's clearly an edge-case but don't thrown an error if asked to
23713 // seek within an empty playlist
23714
23715
23716 if (!this.masterPlaylistLoader_.media().segments) {
23717 return 0;
23718 } // if the seek location is already buffered, continue buffering as usual
23719
23720
23721 if (buffered && buffered.length) {
23722 return currentTime;
23723 } // cancel outstanding requests so we begin buffering at the new
23724 // location
23725
23726
23727 this.mainSegmentLoader_.resetEverything();
23728 this.mainSegmentLoader_.abort();
23729
23730 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23731 this.audioSegmentLoader_.resetEverything();
23732 this.audioSegmentLoader_.abort();
23733 }
23734
23735 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23736 this.subtitleSegmentLoader_.resetEverything();
23737 this.subtitleSegmentLoader_.abort();
23738 } // start segment loader loading in case they are paused
23739
23740
23741 this.load();
23742 }
23743 /**
23744 * get the current duration
23745 *
23746 * @return {TimeRange} the duration
23747 */
23748 ;
23749
23750 _proto.duration = function duration() {
23751 if (!this.masterPlaylistLoader_) {
23752 return 0;
23753 }
23754
23755 var media = this.masterPlaylistLoader_.media();
23756
23757 if (!media) {
23758 // no playlists loaded yet, so can't determine a duration
23759 return 0;
23760 } // Don't rely on the media source for duration in the case of a live playlist since
23761 // setting the native MediaSource's duration to infinity ends up with consequences to
23762 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23763 //
23764 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23765 // however, few browsers have support for setLiveSeekableRange()
23766 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23767 //
23768 // Until a time when the duration of the media source can be set to infinity, and a
23769 // seekable range specified across browsers, just return Infinity.
23770
23771
23772 if (!media.endList) {
23773 return Infinity;
23774 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
23775 // available). If it's not available, fall back to a playlist-calculated estimate.
23776
23777
23778 if (this.mediaSource) {
23779 return this.mediaSource.duration;
23780 }
23781
23782 return Vhs$1.Playlist.duration(media);
23783 }
23784 /**
23785 * check the seekable range
23786 *
23787 * @return {TimeRange} the seekable range
23788 */
23789 ;
23790
23791 _proto.seekable = function seekable() {
23792 return this.seekable_;
23793 };
23794
23795 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
23796 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
23797 //
23798 // A fix was made to this function where a check for
23799 // this.sourceUpdater_.hasCreatedSourceBuffers
23800 // was added to ensure that both source buffers were created before seekable was
23801 // updated. However, it originally had a bug where it was checking for a true and
23802 // returning early instead of checking for false. Setting it to check for false to
23803 // return early though created other issues. A call to play() would check for seekable
23804 // end without verifying that a seekable range was present. In addition, even checking
23805 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
23806 // due to a media update calling load on the segment loaders, skipping a seek to live,
23807 // thereby starting live streams at the beginning of the stream rather than at the end.
23808 //
23809 // This conditional should be fixed to wait for the creation of two source buffers at
23810 // the same time as the other sections of code are fixed to properly seek to live and
23811 // not throw an error due to checking for a seekable end when no seekable range exists.
23812 //
23813 // For now, fall back to the older behavior, with the understanding that the seekable
23814 // range may not be completely correct, leading to a suboptimal initial live point.
23815
23816 if (!this.masterPlaylistLoader_) {
23817 return;
23818 }
23819
23820 var media = this.masterPlaylistLoader_.media();
23821
23822 if (!media) {
23823 return;
23824 }
23825
23826 var expired = this.syncController_.getExpiredTime(media, this.duration());
23827
23828 if (expired === null) {
23829 // not enough information to update seekable
23830 return;
23831 }
23832
23833 var master = this.masterPlaylistLoader_.master;
23834 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23835
23836 if (mainSeekable.length === 0) {
23837 return;
23838 }
23839
23840 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23841 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
23842 expired = this.syncController_.getExpiredTime(media, this.duration());
23843
23844 if (expired === null) {
23845 return;
23846 }
23847
23848 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23849
23850 if (audioSeekable.length === 0) {
23851 return;
23852 }
23853 }
23854
23855 var oldEnd;
23856 var oldStart;
23857
23858 if (this.seekable_ && this.seekable_.length) {
23859 oldEnd = this.seekable_.end(0);
23860 oldStart = this.seekable_.start(0);
23861 }
23862
23863 if (!audioSeekable) {
23864 // seekable has been calculated based on buffering video data so it
23865 // can be returned directly
23866 this.seekable_ = mainSeekable;
23867 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
23868 // seekables are pretty far off, rely on main
23869 this.seekable_ = mainSeekable;
23870 } else {
23871 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
23872 } // seekable is the same as last time
23873
23874
23875 if (this.seekable_ && this.seekable_.length) {
23876 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
23877 return;
23878 }
23879 }
23880
23881 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
23882 this.tech_.trigger('seekablechanged');
23883 }
23884 /**
23885 * Update the player duration
23886 */
23887 ;
23888
23889 _proto.updateDuration = function updateDuration(isLive) {
23890 if (this.updateDuration_) {
23891 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23892 this.updateDuration_ = null;
23893 }
23894
23895 if (this.mediaSource.readyState !== 'open') {
23896 this.updateDuration_ = this.updateDuration.bind(this, isLive);
23897 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
23898 return;
23899 }
23900
23901 if (isLive) {
23902 var seekable = this.seekable();
23903
23904 if (!seekable.length) {
23905 return;
23906 } // Even in the case of a live playlist, the native MediaSource's duration should not
23907 // be set to Infinity (even though this would be expected for a live playlist), since
23908 // setting the native MediaSource's duration to infinity ends up with consequences to
23909 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23910 //
23911 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23912 // however, few browsers have support for setLiveSeekableRange()
23913 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23914 //
23915 // Until a time when the duration of the media source can be set to infinity, and a
23916 // seekable range specified across browsers, the duration should be greater than or
23917 // equal to the last possible seekable value.
23918 // MediaSource duration starts as NaN
23919 // It is possible (and probable) that this case will never be reached for many
23920 // sources, since the MediaSource reports duration as the highest value without
23921 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
23922 // we buffered times 0 to 100 with real times of 100 to 200, even though current
23923 // time will be between 0 and 100, the native media source may report the duration
23924 // as 200. However, since we report duration separate from the media source (as
23925 // Infinity), and as long as the native media source duration value is greater than
23926 // our reported seekable range, seeks will work as expected. The large number as
23927 // duration for live is actually a strategy used by some players to work around the
23928 // issue of live seekable ranges cited above.
23929
23930
23931 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
23932 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
23933 }
23934
23935 return;
23936 }
23937
23938 var buffered = this.tech_.buffered();
23939 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
23940
23941 if (buffered.length > 0) {
23942 duration = Math.max(duration, buffered.end(buffered.length - 1));
23943 }
23944
23945 if (this.mediaSource.duration !== duration) {
23946 this.sourceUpdater_.setDuration(duration);
23947 }
23948 }
23949 /**
23950 * dispose of the MasterPlaylistController and everything
23951 * that it controls
23952 */
23953 ;
23954
23955 _proto.dispose = function dispose() {
23956 var _this8 = this;
23957
23958 this.trigger('dispose');
23959 this.decrypter_.terminate();
23960 this.masterPlaylistLoader_.dispose();
23961 this.mainSegmentLoader_.dispose();
23962
23963 if (this.loadOnPlay_) {
23964 this.tech_.off('play', this.loadOnPlay_);
23965 }
23966
23967 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
23968 var groups = _this8.mediaTypes_[type].groups;
23969
23970 for (var id in groups) {
23971 groups[id].forEach(function (group) {
23972 if (group.playlistLoader) {
23973 group.playlistLoader.dispose();
23974 }
23975 });
23976 }
23977 });
23978 this.audioSegmentLoader_.dispose();
23979 this.subtitleSegmentLoader_.dispose();
23980 this.sourceUpdater_.dispose();
23981 this.timelineChangeController_.dispose();
23982 this.stopABRTimer_();
23983
23984 if (this.updateDuration_) {
23985 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23986 }
23987
23988 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
23989
23990 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
23991 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
23992 this.off();
23993 }
23994 /**
23995 * return the master playlist object if we have one
23996 *
23997 * @return {Object} the master playlist object that we parsed
23998 */
23999 ;
24000
24001 _proto.master = function master() {
24002 return this.masterPlaylistLoader_.master;
24003 }
24004 /**
24005 * return the currently selected playlist
24006 *
24007 * @return {Object} the currently selected playlist object that we parsed
24008 */
24009 ;
24010
24011 _proto.media = function media() {
24012 // playlist loader will not return media if it has not been fully loaded
24013 return this.masterPlaylistLoader_.media() || this.initialMedia_;
24014 };
24015
24016 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
24017 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
24018 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
24019 // otherwise check on the segment loader.
24020
24021 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
24022
24023 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
24024 return false;
24025 }
24026
24027 return true;
24028 };
24029
24030 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
24031 var _this9 = this;
24032
24033 var media = {
24034 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
24035 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
24036 }; // set "main" media equal to video
24037
24038 media.video = media.main;
24039 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
24040 var codecs = {};
24041 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
24042
24043 if (media.main.hasVideo) {
24044 codecs.video = playlistCodecs.video || media.main.videoCodec || codecs_js.DEFAULT_VIDEO_CODEC;
24045 }
24046
24047 if (media.main.isMuxed) {
24048 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC);
24049 }
24050
24051 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
24052 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
24053
24054 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
24055 } // no codecs, no playback.
24056
24057
24058 if (!codecs.audio && !codecs.video) {
24059 this.blacklistCurrentPlaylist({
24060 playlist: this.media(),
24061 message: 'Could not determine codecs for playlist.',
24062 blacklistDuration: Infinity
24063 });
24064 return;
24065 } // fmp4 relies on browser support, while ts relies on muxer support
24066
24067
24068 var supportFunction = function supportFunction(isFmp4, codec) {
24069 return isFmp4 ? codecs_js.browserSupportsCodec(codec) : codecs_js.muxerSupportsCodec(codec);
24070 };
24071
24072 var unsupportedCodecs = {};
24073 var unsupportedAudio;
24074 ['video', 'audio'].forEach(function (type) {
24075 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
24076 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
24077 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
24078 unsupportedCodecs[supporter].push(codecs[type]);
24079
24080 if (type === 'audio') {
24081 unsupportedAudio = supporter;
24082 }
24083 }
24084 });
24085
24086 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
24087 var audioGroup = this.media().attributes.AUDIO;
24088 this.master().playlists.forEach(function (variant) {
24089 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
24090
24091 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
24092 variant.excludeUntil = Infinity;
24093 }
24094 });
24095 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
24096 } // if we have any unsupported codecs blacklist this playlist.
24097
24098
24099 if (Object.keys(unsupportedCodecs).length) {
24100 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
24101 if (acc) {
24102 acc += ', ';
24103 }
24104
24105 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
24106 return acc;
24107 }, '') + '.';
24108 this.blacklistCurrentPlaylist({
24109 playlist: this.media(),
24110 internal: true,
24111 message: message,
24112 blacklistDuration: Infinity
24113 });
24114 return;
24115 } // check if codec switching is happening
24116
24117
24118 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
24119 var switchMessages = [];
24120 ['video', 'audio'].forEach(function (type) {
24121 var newCodec = (codecs_js.parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
24122 var oldCodec = (codecs_js.parseCodecs(codecs[type] || '')[0] || {}).type;
24123
24124 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
24125 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
24126 }
24127 });
24128
24129 if (switchMessages.length) {
24130 this.blacklistCurrentPlaylist({
24131 playlist: this.media(),
24132 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
24133 blacklistDuration: Infinity,
24134 internal: true
24135 });
24136 return;
24137 }
24138 } // TODO: when using the muxer shouldn't we just return
24139 // the codecs that the muxer outputs?
24140
24141
24142 return codecs;
24143 }
24144 /**
24145 * Create source buffers and exlude any incompatible renditions.
24146 *
24147 * @private
24148 */
24149 ;
24150
24151 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
24152 // media source is not ready yet or sourceBuffers are already
24153 // created.
24154 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
24155 return;
24156 }
24157
24158 if (!this.areMediaTypesKnown_()) {
24159 return;
24160 }
24161
24162 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
24163
24164 if (!codecs) {
24165 return;
24166 }
24167
24168 this.sourceUpdater_.createSourceBuffers(codecs);
24169 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
24170 this.excludeIncompatibleVariants_(codecString);
24171 }
24172 /**
24173 * Excludes playlists with codecs that are unsupported by the muxer and browser.
24174 */
24175 ;
24176
24177 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
24178 var _this10 = this;
24179
24180 var playlists = this.master().playlists;
24181 var ids = []; // TODO: why don't we have a property to loop through all
24182 // playlist? Why did we ever mix indexes and keys?
24183
24184 Object.keys(playlists).forEach(function (key) {
24185 var variant = playlists[key]; // check if we already processed this playlist.
24186
24187 if (ids.indexOf(variant.id) !== -1) {
24188 return;
24189 }
24190
24191 ids.push(variant.id);
24192 var codecs = codecsForPlaylist(_this10.master, variant);
24193 var unsupported = [];
24194
24195 if (codecs.audio && !codecs_js.muxerSupportsCodec(codecs.audio) && !codecs_js.browserSupportsCodec(codecs.audio)) {
24196 unsupported.push("audio codec " + codecs.audio);
24197 }
24198
24199 if (codecs.video && !codecs_js.muxerSupportsCodec(codecs.video) && !codecs_js.browserSupportsCodec(codecs.video)) {
24200 unsupported.push("video codec " + codecs.video);
24201 }
24202
24203 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
24204 unsupported.push("text codec " + codecs.text);
24205 }
24206
24207 if (unsupported.length) {
24208 variant.excludeUntil = Infinity;
24209
24210 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
24211 }
24212 });
24213 }
24214 /**
24215 * Blacklist playlists that are known to be codec or
24216 * stream-incompatible with the SourceBuffer configuration. For
24217 * instance, Media Source Extensions would cause the video element to
24218 * stall waiting for video data if you switched from a variant with
24219 * video and audio to an audio-only one.
24220 *
24221 * @param {Object} media a media playlist compatible with the current
24222 * set of SourceBuffers. Variants in the current master playlist that
24223 * do not appear to have compatible codec or stream configurations
24224 * will be excluded from the default playlist selection algorithm
24225 * indefinitely.
24226 * @private
24227 */
24228 ;
24229
24230 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
24231 var _this11 = this;
24232
24233 var ids = [];
24234 var playlists = this.master().playlists;
24235 var codecs = unwrapCodecList(codecs_js.parseCodecs(codecString));
24236 var codecCount_ = codecCount(codecs);
24237 var videoDetails = codecs.video && codecs_js.parseCodecs(codecs.video)[0] || null;
24238 var audioDetails = codecs.audio && codecs_js.parseCodecs(codecs.audio)[0] || null;
24239 Object.keys(playlists).forEach(function (key) {
24240 var variant = playlists[key]; // check if we already processed this playlist.
24241 // or it if it is already excluded forever.
24242
24243 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
24244 return;
24245 }
24246
24247 ids.push(variant.id);
24248 var blacklistReasons = []; // get codecs from the playlist for this variant
24249
24250 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
24251 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
24252 // variant is incompatible. Wait for mux.js to probe
24253
24254 if (!variantCodecs.audio && !variantCodecs.video) {
24255 return;
24256 } // TODO: we can support this by removing the
24257 // old media source and creating a new one, but it will take some work.
24258 // The number of streams cannot change
24259
24260
24261 if (variantCodecCount !== codecCount_) {
24262 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
24263 } // only exclude playlists by codec change, if codecs cannot switch
24264 // during playback.
24265
24266
24267 if (!_this11.sourceUpdater_.canChangeType()) {
24268 var variantVideoDetails = variantCodecs.video && codecs_js.parseCodecs(variantCodecs.video)[0] || null;
24269 var variantAudioDetails = variantCodecs.audio && codecs_js.parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
24270
24271 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
24272 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
24273 } // the audio codec cannot change
24274
24275
24276 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
24277 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
24278 }
24279 }
24280
24281 if (blacklistReasons.length) {
24282 variant.excludeUntil = Infinity;
24283
24284 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
24285 }
24286 });
24287 };
24288
24289 _proto.updateAdCues_ = function updateAdCues_(media) {
24290 var offset = 0;
24291 var seekable = this.seekable();
24292
24293 if (seekable.length) {
24294 offset = seekable.start(0);
24295 }
24296
24297 updateAdCues(media, this.cueTagsTrack_, offset);
24298 }
24299 /**
24300 * Calculates the desired forward buffer length based on current time
24301 *
24302 * @return {number} Desired forward buffer length in seconds
24303 */
24304 ;
24305
24306 _proto.goalBufferLength = function goalBufferLength() {
24307 var currentTime = this.tech_.currentTime();
24308 var initial = Config.GOAL_BUFFER_LENGTH;
24309 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
24310 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
24311 return Math.min(initial + currentTime * rate, max);
24312 }
24313 /**
24314 * Calculates the desired buffer low water line based on current time
24315 *
24316 * @return {number} Desired buffer low water line in seconds
24317 */
24318 ;
24319
24320 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
24321 var currentTime = this.tech_.currentTime();
24322 var initial = Config.BUFFER_LOW_WATER_LINE;
24323 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
24324 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
24325 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
24326 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
24327 };
24328
24329 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
24330 return Config.BUFFER_HIGH_WATER_LINE;
24331 };
24332
24333 return MasterPlaylistController;
24334}(videojs__default["default"].EventTarget);
24335
24336/**
24337 * Returns a function that acts as the Enable/disable playlist function.
24338 *
24339 * @param {PlaylistLoader} loader - The master playlist loader
24340 * @param {string} playlistID - id of the playlist
24341 * @param {Function} changePlaylistFn - A function to be called after a
24342 * playlist's enabled-state has been changed. Will NOT be called if a
24343 * playlist's enabled-state is unchanged
24344 * @param {boolean=} enable - Value to set the playlist enabled-state to
24345 * or if undefined returns the current enabled-state for the playlist
24346 * @return {Function} Function for setting/getting enabled
24347 */
24348
24349var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
24350 return function (enable) {
24351 var playlist = loader.master.playlists[playlistID];
24352 var incompatible = isIncompatible(playlist);
24353 var currentlyEnabled = isEnabled(playlist);
24354
24355 if (typeof enable === 'undefined') {
24356 return currentlyEnabled;
24357 }
24358
24359 if (enable) {
24360 delete playlist.disabled;
24361 } else {
24362 playlist.disabled = true;
24363 }
24364
24365 if (enable !== currentlyEnabled && !incompatible) {
24366 // Ensure the outside world knows about our changes
24367 changePlaylistFn();
24368
24369 if (enable) {
24370 loader.trigger('renditionenabled');
24371 } else {
24372 loader.trigger('renditiondisabled');
24373 }
24374 }
24375
24376 return enable;
24377 };
24378};
24379/**
24380 * The representation object encapsulates the publicly visible information
24381 * in a media playlist along with a setter/getter-type function (enabled)
24382 * for changing the enabled-state of a particular playlist entry
24383 *
24384 * @class Representation
24385 */
24386
24387
24388var Representation = function Representation(vhsHandler, playlist, id) {
24389 var mpc = vhsHandler.masterPlaylistController_,
24390 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
24391
24392 var changeType = smoothQualityChange ? 'smooth' : 'fast';
24393 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
24394
24395 if (playlist.attributes) {
24396 var resolution = playlist.attributes.RESOLUTION;
24397 this.width = resolution && resolution.width;
24398 this.height = resolution && resolution.height;
24399 this.bandwidth = playlist.attributes.BANDWIDTH;
24400 this.frameRate = playlist.attributes['FRAME-RATE'];
24401 }
24402
24403 this.codecs = codecsForPlaylist(mpc.master(), playlist);
24404 this.playlist = playlist; // The id is simply the ordinality of the media playlist
24405 // within the master playlist
24406
24407 this.id = id; // Partially-apply the enableFunction to create a playlist-
24408 // specific variant
24409
24410 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
24411};
24412/**
24413 * A mixin function that adds the `representations` api to an instance
24414 * of the VhsHandler class
24415 *
24416 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
24417 * representation API into
24418 */
24419
24420
24421var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
24422 // Add a single API-specific function to the VhsHandler instance
24423 vhsHandler.representations = function () {
24424 var master = vhsHandler.masterPlaylistController_.master();
24425 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
24426
24427 if (!playlists) {
24428 return [];
24429 }
24430
24431 return playlists.filter(function (media) {
24432 return !isIncompatible(media);
24433 }).map(function (e, i) {
24434 return new Representation(vhsHandler, e, e.id);
24435 });
24436 };
24437};
24438
24439/**
24440 * @file playback-watcher.js
24441 *
24442 * Playback starts, and now my watch begins. It shall not end until my death. I shall
24443 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
24444 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
24445 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
24446 * my life and honor to the Playback Watch, for this Player and all the Players to come.
24447 */
24448
24449var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
24450/**
24451 * @class PlaybackWatcher
24452 */
24453
24454var PlaybackWatcher = /*#__PURE__*/function () {
24455 /**
24456 * Represents an PlaybackWatcher object.
24457 *
24458 * @class
24459 * @param {Object} options an object that includes the tech and settings
24460 */
24461 function PlaybackWatcher(options) {
24462 var _this = this;
24463
24464 this.masterPlaylistController_ = options.masterPlaylistController;
24465 this.tech_ = options.tech;
24466 this.seekable = options.seekable;
24467 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
24468 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
24469 this.media = options.media;
24470 this.consecutiveUpdates = 0;
24471 this.lastRecordedTime = null;
24472 this.timer_ = null;
24473 this.checkCurrentTimeTimeout_ = null;
24474 this.logger_ = logger('PlaybackWatcher');
24475 this.logger_('initialize');
24476
24477 var playHandler = function playHandler() {
24478 return _this.monitorCurrentTime_();
24479 };
24480
24481 var canPlayHandler = function canPlayHandler() {
24482 return _this.monitorCurrentTime_();
24483 };
24484
24485 var waitingHandler = function waitingHandler() {
24486 return _this.techWaiting_();
24487 };
24488
24489 var cancelTimerHandler = function cancelTimerHandler() {
24490 return _this.cancelTimer_();
24491 };
24492
24493 var mpc = this.masterPlaylistController_;
24494 var loaderTypes = ['main', 'subtitle', 'audio'];
24495 var loaderChecks = {};
24496 loaderTypes.forEach(function (type) {
24497 loaderChecks[type] = {
24498 reset: function reset() {
24499 return _this.resetSegmentDownloads_(type);
24500 },
24501 updateend: function updateend() {
24502 return _this.checkSegmentDownloads_(type);
24503 }
24504 };
24505 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
24506 // isn't changing we want to reset. We cannot assume that the new rendition
24507 // will also be stalled, until after new appends.
24508
24509 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
24510 // This prevents one segment playlists (single vtt or single segment content)
24511 // from being detected as stalling. As the buffer will not change in those cases, since
24512 // the buffer is the entire video duration.
24513
24514 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
24515 });
24516 /**
24517 * We check if a seek was into a gap through the following steps:
24518 * 1. We get a seeking event and we do not get a seeked event. This means that
24519 * a seek was attempted but not completed.
24520 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
24521 * removed everything from our buffer and appended a segment, and should be ready
24522 * to check for gaps.
24523 */
24524
24525 var setSeekingHandlers = function setSeekingHandlers(fn) {
24526 ['main', 'audio'].forEach(function (type) {
24527 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
24528 });
24529 };
24530
24531 this.seekingAppendCheck_ = function () {
24532 if (_this.fixesBadSeeks_()) {
24533 _this.consecutiveUpdates = 0;
24534 _this.lastRecordedTime = _this.tech_.currentTime();
24535 setSeekingHandlers('off');
24536 }
24537 };
24538
24539 this.clearSeekingAppendCheck_ = function () {
24540 return setSeekingHandlers('off');
24541 };
24542
24543 this.watchForBadSeeking_ = function () {
24544 _this.clearSeekingAppendCheck_();
24545
24546 setSeekingHandlers('on');
24547 };
24548
24549 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
24550 this.tech_.on('seeking', this.watchForBadSeeking_);
24551 this.tech_.on('waiting', waitingHandler);
24552 this.tech_.on(timerCancelEvents, cancelTimerHandler);
24553 this.tech_.on('canplay', canPlayHandler);
24554 /*
24555 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
24556 is surfaced in one of two ways:
24557 1) The `waiting` event is fired before the player has buffered content, making it impossible
24558 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
24559 we can check if playback is stalled due to a gap, and skip the gap if necessary.
24560 2) A source with a gap at the beginning of the stream is loaded programatically while the player
24561 is in a playing state. To catch this case, it's important that our one-time play listener is setup
24562 even if the player is in a playing state
24563 */
24564
24565 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
24566
24567 this.dispose = function () {
24568 _this.clearSeekingAppendCheck_();
24569
24570 _this.logger_('dispose');
24571
24572 _this.tech_.off('waiting', waitingHandler);
24573
24574 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
24575
24576 _this.tech_.off('canplay', canPlayHandler);
24577
24578 _this.tech_.off('play', playHandler);
24579
24580 _this.tech_.off('seeking', _this.watchForBadSeeking_);
24581
24582 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
24583
24584 loaderTypes.forEach(function (type) {
24585 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
24586 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
24587
24588 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
24589 });
24590
24591 if (_this.checkCurrentTimeTimeout_) {
24592 window__default["default"].clearTimeout(_this.checkCurrentTimeTimeout_);
24593 }
24594
24595 _this.cancelTimer_();
24596 };
24597 }
24598 /**
24599 * Periodically check current time to see if playback stopped
24600 *
24601 * @private
24602 */
24603
24604
24605 var _proto = PlaybackWatcher.prototype;
24606
24607 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
24608 this.checkCurrentTime_();
24609
24610 if (this.checkCurrentTimeTimeout_) {
24611 window__default["default"].clearTimeout(this.checkCurrentTimeTimeout_);
24612 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
24613
24614
24615 this.checkCurrentTimeTimeout_ = window__default["default"].setTimeout(this.monitorCurrentTime_.bind(this), 250);
24616 }
24617 /**
24618 * Reset stalled download stats for a specific type of loader
24619 *
24620 * @param {string} type
24621 * The segment loader type to check.
24622 *
24623 * @listens SegmentLoader#playlistupdate
24624 * @listens Tech#seeking
24625 * @listens Tech#seeked
24626 */
24627 ;
24628
24629 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
24630 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
24631
24632 if (this[type + "StalledDownloads_"] > 0) {
24633 this.logger_("resetting possible stalled download count for " + type + " loader");
24634 }
24635
24636 this[type + "StalledDownloads_"] = 0;
24637 this[type + "Buffered_"] = loader.buffered_();
24638 }
24639 /**
24640 * Checks on every segment `appendsdone` to see
24641 * if segment appends are making progress. If they are not
24642 * and we are still downloading bytes. We blacklist the playlist.
24643 *
24644 * @param {string} type
24645 * The segment loader type to check.
24646 *
24647 * @listens SegmentLoader#appendsdone
24648 */
24649 ;
24650
24651 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
24652 var mpc = this.masterPlaylistController_;
24653 var loader = mpc[type + "SegmentLoader_"];
24654 var buffered = loader.buffered_();
24655 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
24656 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
24657 // the buffered value for this loader changed
24658 // appends are working
24659
24660 if (isBufferedDifferent) {
24661 this.resetSegmentDownloads_(type);
24662 return;
24663 }
24664
24665 this[type + "StalledDownloads_"]++;
24666 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
24667 playlistId: loader.playlist_ && loader.playlist_.id,
24668 buffered: timeRangesToArray(buffered)
24669 }); // after 10 possibly stalled appends with no reset, exclude
24670
24671 if (this[type + "StalledDownloads_"] < 10) {
24672 return;
24673 }
24674
24675 this.logger_(type + " loader stalled download exclusion");
24676 this.resetSegmentDownloads_(type);
24677 this.tech_.trigger({
24678 type: 'usage',
24679 name: "vhs-" + type + "-download-exclusion"
24680 });
24681
24682 if (type === 'subtitle') {
24683 return;
24684 } // TODO: should we exclude audio tracks rather than main tracks
24685 // when type is audio?
24686
24687
24688 mpc.blacklistCurrentPlaylist({
24689 message: "Excessive " + type + " segment downloading detected."
24690 }, Infinity);
24691 }
24692 /**
24693 * The purpose of this function is to emulate the "waiting" event on
24694 * browsers that do not emit it when they are waiting for more
24695 * data to continue playback
24696 *
24697 * @private
24698 */
24699 ;
24700
24701 _proto.checkCurrentTime_ = function checkCurrentTime_() {
24702 if (this.tech_.paused() || this.tech_.seeking()) {
24703 return;
24704 }
24705
24706 var currentTime = this.tech_.currentTime();
24707 var buffered = this.tech_.buffered();
24708
24709 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
24710 // If current time is at the end of the final buffered region, then any playback
24711 // stall is most likely caused by buffering in a low bandwidth environment. The tech
24712 // should fire a `waiting` event in this scenario, but due to browser and tech
24713 // inconsistencies. Calling `techWaiting_` here allows us to simulate
24714 // responding to a native `waiting` event when the tech fails to emit one.
24715 return this.techWaiting_();
24716 }
24717
24718 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
24719 this.consecutiveUpdates++;
24720 this.waiting_();
24721 } else if (currentTime === this.lastRecordedTime) {
24722 this.consecutiveUpdates++;
24723 } else {
24724 this.consecutiveUpdates = 0;
24725 this.lastRecordedTime = currentTime;
24726 }
24727 }
24728 /**
24729 * Cancels any pending timers and resets the 'timeupdate' mechanism
24730 * designed to detect that we are stalled
24731 *
24732 * @private
24733 */
24734 ;
24735
24736 _proto.cancelTimer_ = function cancelTimer_() {
24737 this.consecutiveUpdates = 0;
24738
24739 if (this.timer_) {
24740 this.logger_('cancelTimer_');
24741 clearTimeout(this.timer_);
24742 }
24743
24744 this.timer_ = null;
24745 }
24746 /**
24747 * Fixes situations where there's a bad seek
24748 *
24749 * @return {boolean} whether an action was taken to fix the seek
24750 * @private
24751 */
24752 ;
24753
24754 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
24755 var seeking = this.tech_.seeking();
24756
24757 if (!seeking) {
24758 return false;
24759 } // TODO: It's possible that these seekable checks should be moved out of this function
24760 // and into a function that runs on seekablechange. It's also possible that we only need
24761 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
24762 // seekable range.
24763
24764
24765 var seekable = this.seekable();
24766 var currentTime = this.tech_.currentTime();
24767 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
24768 var seekTo;
24769
24770 if (isAfterSeekableRange) {
24771 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
24772
24773 seekTo = seekableEnd;
24774 }
24775
24776 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24777 var seekableStart = seekable.start(0); // sync to the beginning of the live window
24778 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
24779
24780 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
24781 // happen in live with a 3 segment playlist), then don't use a time delta
24782 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
24783 }
24784
24785 if (typeof seekTo !== 'undefined') {
24786 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
24787 this.tech_.setCurrentTime(seekTo);
24788 return true;
24789 }
24790
24791 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
24792 var buffered = this.tech_.buffered();
24793 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
24794 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
24795 var media = this.media(); // verify that at least two segment durations or one part duration have been
24796 // appended before checking for a gap.
24797
24798 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
24799 // appended before checking for a gap.
24800
24801 var bufferedToCheck = [audioBuffered, videoBuffered];
24802
24803 for (var i = 0; i < bufferedToCheck.length; i++) {
24804 // skip null buffered
24805 if (!bufferedToCheck[i]) {
24806 continue;
24807 }
24808
24809 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
24810 // duration behind we haven't appended enough to call this a bad seek.
24811
24812 if (timeAhead < minAppendedDuration) {
24813 return false;
24814 }
24815 }
24816
24817 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
24818 // to seek over the gap
24819
24820 if (nextRange.length === 0) {
24821 return false;
24822 }
24823
24824 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
24825 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
24826 this.tech_.setCurrentTime(seekTo);
24827 return true;
24828 }
24829 /**
24830 * Handler for situations when we determine the player is waiting.
24831 *
24832 * @private
24833 */
24834 ;
24835
24836 _proto.waiting_ = function waiting_() {
24837 if (this.techWaiting_()) {
24838 return;
24839 } // All tech waiting checks failed. Use last resort correction
24840
24841
24842 var currentTime = this.tech_.currentTime();
24843 var buffered = this.tech_.buffered();
24844 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
24845 // region with no indication that anything is amiss (seen in Firefox). Seeking to
24846 // currentTime is usually enough to kickstart the player. This checks that the player
24847 // is currently within a buffered region before attempting a corrective seek.
24848 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
24849 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
24850 // make sure there is ~3 seconds of forward buffer before taking any corrective action
24851 // to avoid triggering an `unknownwaiting` event when the network is slow.
24852
24853 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
24854 this.cancelTimer_();
24855 this.tech_.setCurrentTime(currentTime);
24856 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
24857
24858 this.tech_.trigger({
24859 type: 'usage',
24860 name: 'vhs-unknown-waiting'
24861 });
24862 this.tech_.trigger({
24863 type: 'usage',
24864 name: 'hls-unknown-waiting'
24865 });
24866 return;
24867 }
24868 }
24869 /**
24870 * Handler for situations when the tech fires a `waiting` event
24871 *
24872 * @return {boolean}
24873 * True if an action (or none) was needed to correct the waiting. False if no
24874 * checks passed
24875 * @private
24876 */
24877 ;
24878
24879 _proto.techWaiting_ = function techWaiting_() {
24880 var seekable = this.seekable();
24881 var currentTime = this.tech_.currentTime();
24882
24883 if (this.tech_.seeking() || this.timer_ !== null) {
24884 // Tech is seeking or already waiting on another action, no action needed
24885 return true;
24886 }
24887
24888 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24889 var livePoint = seekable.end(seekable.length - 1);
24890 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
24891 this.cancelTimer_();
24892 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
24893
24894 this.tech_.trigger({
24895 type: 'usage',
24896 name: 'vhs-live-resync'
24897 });
24898 this.tech_.trigger({
24899 type: 'usage',
24900 name: 'hls-live-resync'
24901 });
24902 return true;
24903 }
24904
24905 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
24906 var buffered = this.tech_.buffered();
24907 var videoUnderflow = this.videoUnderflow_({
24908 audioBuffered: sourceUpdater.audioBuffered(),
24909 videoBuffered: sourceUpdater.videoBuffered(),
24910 currentTime: currentTime
24911 });
24912
24913 if (videoUnderflow) {
24914 // Even though the video underflowed and was stuck in a gap, the audio overplayed
24915 // the gap, leading currentTime into a buffered range. Seeking to currentTime
24916 // allows the video to catch up to the audio position without losing any audio
24917 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
24918 this.cancelTimer_();
24919 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
24920
24921 this.tech_.trigger({
24922 type: 'usage',
24923 name: 'vhs-video-underflow'
24924 });
24925 this.tech_.trigger({
24926 type: 'usage',
24927 name: 'hls-video-underflow'
24928 });
24929 return true;
24930 }
24931
24932 var nextRange = findNextRange(buffered, currentTime); // check for gap
24933
24934 if (nextRange.length > 0) {
24935 var difference = nextRange.start(0) - currentTime;
24936 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
24937 this.cancelTimer_();
24938 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
24939 return true;
24940 } // All checks failed. Returning false to indicate failure to correct waiting
24941
24942
24943 return false;
24944 };
24945
24946 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
24947 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
24948 allowSeeksWithinUnsafeLiveWindow = false;
24949 }
24950
24951 if (!seekable.length) {
24952 // we can't make a solid case if there's no seekable, default to false
24953 return false;
24954 }
24955
24956 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
24957 var isLive = !playlist.endList;
24958
24959 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
24960 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
24961 }
24962
24963 if (currentTime > allowedEnd) {
24964 return true;
24965 }
24966
24967 return false;
24968 };
24969
24970 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
24971 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
24972 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
24973 return true;
24974 }
24975
24976 return false;
24977 };
24978
24979 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
24980 var videoBuffered = _ref.videoBuffered,
24981 audioBuffered = _ref.audioBuffered,
24982 currentTime = _ref.currentTime;
24983
24984 // audio only content will not have video underflow :)
24985 if (!videoBuffered) {
24986 return;
24987 }
24988
24989 var gap; // find a gap in demuxed content.
24990
24991 if (videoBuffered.length && audioBuffered.length) {
24992 // in Chrome audio will continue to play for ~3s when we run out of video
24993 // so we have to check that the video buffer did have some buffer in the
24994 // past.
24995 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
24996 var videoRange = findRange(videoBuffered, currentTime);
24997 var audioRange = findRange(audioBuffered, currentTime);
24998
24999 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
25000 gap = {
25001 start: lastVideoRange.end(0),
25002 end: audioRange.end(0)
25003 };
25004 } // find a gap in muxed content.
25005
25006 } else {
25007 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
25008 // stuck in a gap due to video underflow.
25009
25010 if (!nextRange.length) {
25011 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
25012 }
25013 }
25014
25015 if (gap) {
25016 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
25017 return true;
25018 }
25019
25020 return false;
25021 }
25022 /**
25023 * Timer callback. If playback still has not proceeded, then we seek
25024 * to the start of the next buffered region.
25025 *
25026 * @private
25027 */
25028 ;
25029
25030 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
25031 var buffered = this.tech_.buffered();
25032 var currentTime = this.tech_.currentTime();
25033 var nextRange = findNextRange(buffered, currentTime);
25034 this.cancelTimer_();
25035
25036 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
25037 return;
25038 }
25039
25040 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
25041
25042 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
25043 this.tech_.trigger({
25044 type: 'usage',
25045 name: 'vhs-gap-skip'
25046 });
25047 this.tech_.trigger({
25048 type: 'usage',
25049 name: 'hls-gap-skip'
25050 });
25051 };
25052
25053 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
25054 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
25055 // playing for ~3 seconds after the video gap starts. This is done to account for
25056 // video buffer underflow/underrun (note that this is not done when there is audio
25057 // buffer underflow/underrun -- in that case the video will stop as soon as it
25058 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
25059 // video stalls). The player's time will reflect the playthrough of audio, so the
25060 // time will appear as if we are in a buffered region, even if we are stuck in a
25061 // "gap."
25062 //
25063 // Example:
25064 // video buffer: 0 => 10.1, 10.2 => 20
25065 // audio buffer: 0 => 20
25066 // overall buffer: 0 => 10.1, 10.2 => 20
25067 // current time: 13
25068 //
25069 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
25070 // however, the audio continued playing until it reached ~3 seconds past the gap
25071 // (13 seconds), at which point it stops as well. Since current time is past the
25072 // gap, findNextRange will return no ranges.
25073 //
25074 // To check for this issue, we see if there is a gap that starts somewhere within
25075 // a 3 second range (3 seconds +/- 1 second) back from our current time.
25076 var gaps = findGaps(buffered);
25077
25078 for (var i = 0; i < gaps.length; i++) {
25079 var start = gaps.start(i);
25080 var end = gaps.end(i); // gap is starts no more than 4 seconds back
25081
25082 if (currentTime - start < 4 && currentTime - start > 2) {
25083 return {
25084 start: start,
25085 end: end
25086 };
25087 }
25088 }
25089
25090 return null;
25091 };
25092
25093 return PlaybackWatcher;
25094}();
25095
25096var defaultOptions = {
25097 errorInterval: 30,
25098 getSource: function getSource(next) {
25099 var tech = this.tech({
25100 IWillNotUseThisInPlugins: true
25101 });
25102 var sourceObj = tech.currentSource_ || this.currentSource();
25103 return next(sourceObj);
25104 }
25105};
25106/**
25107 * Main entry point for the plugin
25108 *
25109 * @param {Player} player a reference to a videojs Player instance
25110 * @param {Object} [options] an object with plugin options
25111 * @private
25112 */
25113
25114var initPlugin = function initPlugin(player, options) {
25115 var lastCalled = 0;
25116 var seekTo = 0;
25117 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
25118 player.ready(function () {
25119 player.trigger({
25120 type: 'usage',
25121 name: 'vhs-error-reload-initialized'
25122 });
25123 player.trigger({
25124 type: 'usage',
25125 name: 'hls-error-reload-initialized'
25126 });
25127 });
25128 /**
25129 * Player modifications to perform that must wait until `loadedmetadata`
25130 * has been triggered
25131 *
25132 * @private
25133 */
25134
25135 var loadedMetadataHandler = function loadedMetadataHandler() {
25136 if (seekTo) {
25137 player.currentTime(seekTo);
25138 }
25139 };
25140 /**
25141 * Set the source on the player element, play, and seek if necessary
25142 *
25143 * @param {Object} sourceObj An object specifying the source url and mime-type to play
25144 * @private
25145 */
25146
25147
25148 var setSource = function setSource(sourceObj) {
25149 if (sourceObj === null || sourceObj === undefined) {
25150 return;
25151 }
25152
25153 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
25154 player.one('loadedmetadata', loadedMetadataHandler);
25155 player.src(sourceObj);
25156 player.trigger({
25157 type: 'usage',
25158 name: 'vhs-error-reload'
25159 });
25160 player.trigger({
25161 type: 'usage',
25162 name: 'hls-error-reload'
25163 });
25164 player.play();
25165 };
25166 /**
25167 * Attempt to get a source from either the built-in getSource function
25168 * or a custom function provided via the options
25169 *
25170 * @private
25171 */
25172
25173
25174 var errorHandler = function errorHandler() {
25175 // Do not attempt to reload the source if a source-reload occurred before
25176 // 'errorInterval' time has elapsed since the last source-reload
25177 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
25178 player.trigger({
25179 type: 'usage',
25180 name: 'vhs-error-reload-canceled'
25181 });
25182 player.trigger({
25183 type: 'usage',
25184 name: 'hls-error-reload-canceled'
25185 });
25186 return;
25187 }
25188
25189 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
25190 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
25191 return;
25192 }
25193
25194 lastCalled = Date.now();
25195 return localOptions.getSource.call(player, setSource);
25196 };
25197 /**
25198 * Unbind any event handlers that were bound by the plugin
25199 *
25200 * @private
25201 */
25202
25203
25204 var cleanupEvents = function cleanupEvents() {
25205 player.off('loadedmetadata', loadedMetadataHandler);
25206 player.off('error', errorHandler);
25207 player.off('dispose', cleanupEvents);
25208 };
25209 /**
25210 * Cleanup before re-initializing the plugin
25211 *
25212 * @param {Object} [newOptions] an object with plugin options
25213 * @private
25214 */
25215
25216
25217 var reinitPlugin = function reinitPlugin(newOptions) {
25218 cleanupEvents();
25219 initPlugin(player, newOptions);
25220 };
25221
25222 player.on('error', errorHandler);
25223 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
25224 // initializing the plugin
25225
25226 player.reloadSourceOnError = reinitPlugin;
25227};
25228/**
25229 * Reload the source when an error is detected as long as there
25230 * wasn't an error previously within the last 30 seconds
25231 *
25232 * @param {Object} [options] an object with plugin options
25233 */
25234
25235
25236var reloadSourceOnError = function reloadSourceOnError(options) {
25237 initPlugin(this, options);
25238};
25239
25240var version$4 = "2.16.0";
25241
25242var version$3 = "6.0.1";
25243
25244var version$2 = "0.22.1";
25245
25246var version$1 = "4.8.0";
25247
25248var version = "3.1.3";
25249
25250var Vhs = {
25251 PlaylistLoader: PlaylistLoader,
25252 Playlist: Playlist,
25253 utils: utils,
25254 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
25255 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
25256 lastBandwidthSelector: lastBandwidthSelector,
25257 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
25258 comparePlaylistBandwidth: comparePlaylistBandwidth,
25259 comparePlaylistResolution: comparePlaylistResolution,
25260 xhr: xhrFactory()
25261}; // Define getter/setters for config properties
25262
25263Object.keys(Config).forEach(function (prop) {
25264 Object.defineProperty(Vhs, prop, {
25265 get: function get() {
25266 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
25267 return Config[prop];
25268 },
25269 set: function set(value) {
25270 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
25271
25272 if (typeof value !== 'number' || value < 0) {
25273 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
25274 return;
25275 }
25276
25277 Config[prop] = value;
25278 }
25279 });
25280});
25281var LOCAL_STORAGE_KEY = 'videojs-vhs';
25282/**
25283 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
25284 *
25285 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
25286 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
25287 * @function handleVhsMediaChange
25288 */
25289
25290var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
25291 var newPlaylist = playlistLoader.media();
25292 var selectedIndex = -1;
25293
25294 for (var i = 0; i < qualityLevels.length; i++) {
25295 if (qualityLevels[i].id === newPlaylist.id) {
25296 selectedIndex = i;
25297 break;
25298 }
25299 }
25300
25301 qualityLevels.selectedIndex_ = selectedIndex;
25302 qualityLevels.trigger({
25303 selectedIndex: selectedIndex,
25304 type: 'change'
25305 });
25306};
25307/**
25308 * Adds quality levels to list once playlist metadata is available
25309 *
25310 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
25311 * @param {Object} vhs Vhs object to listen to for media events.
25312 * @function handleVhsLoadedMetadata
25313 */
25314
25315
25316var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
25317 vhs.representations().forEach(function (rep) {
25318 qualityLevels.addQualityLevel(rep);
25319 });
25320 handleVhsMediaChange(qualityLevels, vhs.playlists);
25321}; // HLS is a source handler, not a tech. Make sure attempts to use it
25322// as one do not cause exceptions.
25323
25324
25325Vhs.canPlaySource = function () {
25326 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25327};
25328
25329var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
25330 if (!keySystemOptions) {
25331 return keySystemOptions;
25332 }
25333
25334 var codecs = {};
25335
25336 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
25337 codecs = unwrapCodecList(codecs_js.parseCodecs(mainPlaylist.attributes.CODECS));
25338 }
25339
25340 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
25341 codecs.audio = audioPlaylist.attributes.CODECS;
25342 }
25343
25344 var videoContentType = codecs_js.getMimeForCodec(codecs.video);
25345 var audioContentType = codecs_js.getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
25346
25347 var keySystemContentTypes = {};
25348
25349 for (var keySystem in keySystemOptions) {
25350 keySystemContentTypes[keySystem] = {};
25351
25352 if (audioContentType) {
25353 keySystemContentTypes[keySystem].audioContentType = audioContentType;
25354 }
25355
25356 if (videoContentType) {
25357 keySystemContentTypes[keySystem].videoContentType = videoContentType;
25358 } // Default to using the video playlist's PSSH even though they may be different, as
25359 // videojs-contrib-eme will only accept one in the options.
25360 //
25361 // This shouldn't be an issue for most cases as early intialization will handle all
25362 // unique PSSH values, and if they aren't, then encrypted events should have the
25363 // specific information needed for the unique license.
25364
25365
25366 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
25367 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
25368 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
25369 // so we need to prevent overwriting the URL entirely
25370
25371
25372 if (typeof keySystemOptions[keySystem] === 'string') {
25373 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
25374 }
25375 }
25376
25377 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
25378};
25379/**
25380 * @typedef {Object} KeySystems
25381 *
25382 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
25383 * Note: not all options are listed here.
25384 *
25385 * @property {Uint8Array} [pssh]
25386 * Protection System Specific Header
25387 */
25388
25389/**
25390 * Goes through all the playlists and collects an array of KeySystems options objects
25391 * containing each playlist's keySystems and their pssh values, if available.
25392 *
25393 * @param {Object[]} playlists
25394 * The playlists to look through
25395 * @param {string[]} keySystems
25396 * The keySystems to collect pssh values for
25397 *
25398 * @return {KeySystems[]}
25399 * An array of KeySystems objects containing available key systems and their
25400 * pssh values
25401 */
25402
25403
25404var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
25405 return playlists.reduce(function (keySystemsArr, playlist) {
25406 if (!playlist.contentProtection) {
25407 return keySystemsArr;
25408 }
25409
25410 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
25411 var keySystemOptions = playlist.contentProtection[keySystem];
25412
25413 if (keySystemOptions && keySystemOptions.pssh) {
25414 keySystemsObj[keySystem] = {
25415 pssh: keySystemOptions.pssh
25416 };
25417 }
25418
25419 return keySystemsObj;
25420 }, {});
25421
25422 if (Object.keys(keySystemsOptions).length) {
25423 keySystemsArr.push(keySystemsOptions);
25424 }
25425
25426 return keySystemsArr;
25427 }, []);
25428};
25429/**
25430 * Returns a promise that waits for the
25431 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
25432 *
25433 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
25434 * browsers.
25435 *
25436 * As per the above ticket, this is particularly important for Chrome, where, if
25437 * unencrypted content is appended before encrypted content and the key session has not
25438 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
25439 * during playback.
25440 *
25441 * @param {Object} player
25442 * The player instance
25443 * @param {Object[]} sourceKeySystems
25444 * The key systems options from the player source
25445 * @param {Object} [audioMedia]
25446 * The active audio media playlist (optional)
25447 * @param {Object[]} mainPlaylists
25448 * The playlists found on the master playlist object
25449 *
25450 * @return {Object}
25451 * Promise that resolves when the key session has been created
25452 */
25453
25454
25455var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
25456 var player = _ref.player,
25457 sourceKeySystems = _ref.sourceKeySystems,
25458 audioMedia = _ref.audioMedia,
25459 mainPlaylists = _ref.mainPlaylists;
25460
25461 if (!player.eme.initializeMediaKeys) {
25462 return Promise.resolve();
25463 } // TODO should all audio PSSH values be initialized for DRM?
25464 //
25465 // All unique video rendition pssh values are initialized for DRM, but here only
25466 // the initial audio playlist license is initialized. In theory, an encrypted
25467 // event should be fired if the user switches to an alternative audio playlist
25468 // where a license is required, but this case hasn't yet been tested. In addition, there
25469 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
25470 // languages).
25471
25472
25473 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
25474 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
25475 var initializationFinishedPromises = [];
25476 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
25477 // only place where it should not be deduped is for ms-prefixed APIs, but the early
25478 // return for IE11 above, and the existence of modern EME APIs in addition to
25479 // ms-prefixed APIs on Edge should prevent this from being a concern.
25480 // initializeMediaKeys also won't use the webkit-prefixed APIs.
25481
25482 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
25483 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
25484 player.tech_.one('keysessioncreated', resolve);
25485 }));
25486 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
25487 player.eme.initializeMediaKeys({
25488 keySystems: keySystemsOptions
25489 }, function (err) {
25490 if (err) {
25491 reject(err);
25492 return;
25493 }
25494
25495 resolve();
25496 });
25497 }));
25498 }); // The reasons Promise.race is chosen over Promise.any:
25499 //
25500 // * Promise.any is only available in Safari 14+.
25501 // * None of these promises are expected to reject. If they do reject, it might be
25502 // better here for the race to surface the rejection, rather than mask it by using
25503 // Promise.any.
25504
25505 return Promise.race([// If a session was previously created, these will all finish resolving without
25506 // creating a new session, otherwise it will take until the end of all license
25507 // requests, which is why the key session check is used (to make setup much faster).
25508 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
25509 Promise.race(keySessionCreatedPromises)]);
25510};
25511/**
25512 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
25513 * there are keySystems on the source, sets up source options to prepare the source for
25514 * eme.
25515 *
25516 * @param {Object} player
25517 * The player instance
25518 * @param {Object[]} sourceKeySystems
25519 * The key systems options from the player source
25520 * @param {Object} media
25521 * The active media playlist
25522 * @param {Object} [audioMedia]
25523 * The active audio media playlist (optional)
25524 *
25525 * @return {boolean}
25526 * Whether or not options were configured and EME is available
25527 */
25528
25529var setupEmeOptions = function setupEmeOptions(_ref2) {
25530 var player = _ref2.player,
25531 sourceKeySystems = _ref2.sourceKeySystems,
25532 media = _ref2.media,
25533 audioMedia = _ref2.audioMedia;
25534 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
25535
25536 if (!sourceOptions) {
25537 return false;
25538 }
25539
25540 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
25541 // do nothing.
25542
25543 if (sourceOptions && !player.eme) {
25544 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
25545 return false;
25546 }
25547
25548 return true;
25549};
25550
25551var getVhsLocalStorage = function getVhsLocalStorage() {
25552 if (!window__default["default"].localStorage) {
25553 return null;
25554 }
25555
25556 var storedObject = window__default["default"].localStorage.getItem(LOCAL_STORAGE_KEY);
25557
25558 if (!storedObject) {
25559 return null;
25560 }
25561
25562 try {
25563 return JSON.parse(storedObject);
25564 } catch (e) {
25565 // someone may have tampered with the value
25566 return null;
25567 }
25568};
25569
25570var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
25571 if (!window__default["default"].localStorage) {
25572 return false;
25573 }
25574
25575 var objectToStore = getVhsLocalStorage();
25576 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
25577
25578 try {
25579 window__default["default"].localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
25580 } catch (e) {
25581 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
25582 // storage is set to 0).
25583 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
25584 // No need to perform any operation.
25585 return false;
25586 }
25587
25588 return objectToStore;
25589};
25590/**
25591 * Parses VHS-supported media types from data URIs. See
25592 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
25593 * for information on data URIs.
25594 *
25595 * @param {string} dataUri
25596 * The data URI
25597 *
25598 * @return {string|Object}
25599 * The parsed object/string, or the original string if no supported media type
25600 * was found
25601 */
25602
25603
25604var expandDataUri = function expandDataUri(dataUri) {
25605 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
25606 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
25607 } // no known case for this data URI, return the string as-is
25608
25609
25610 return dataUri;
25611};
25612/**
25613 * Whether the browser has built-in HLS support.
25614 */
25615
25616
25617Vhs.supportsNativeHls = function () {
25618 if (!document__default["default"] || !document__default["default"].createElement) {
25619 return false;
25620 }
25621
25622 var video = document__default["default"].createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
25623
25624 if (!videojs__default["default"].getTech('Html5').isSupported()) {
25625 return false;
25626 } // HLS manifests can go by many mime-types
25627
25628
25629 var canPlay = [// Apple santioned
25630 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
25631 'audio/mpegurl', // Very common
25632 'audio/x-mpegurl', // Very common
25633 'application/x-mpegurl', // Included for completeness
25634 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
25635 return canPlay.some(function (canItPlay) {
25636 return /maybe|probably/i.test(video.canPlayType(canItPlay));
25637 });
25638}();
25639
25640Vhs.supportsNativeDash = function () {
25641 if (!document__default["default"] || !document__default["default"].createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
25642 return false;
25643 }
25644
25645 return /maybe|probably/i.test(document__default["default"].createElement('video').canPlayType('application/dash+xml'));
25646}();
25647
25648Vhs.supportsTypeNatively = function (type) {
25649 if (type === 'hls') {
25650 return Vhs.supportsNativeHls;
25651 }
25652
25653 if (type === 'dash') {
25654 return Vhs.supportsNativeDash;
25655 }
25656
25657 return false;
25658};
25659/**
25660 * HLS is a source handler, not a tech. Make sure attempts to use it
25661 * as one do not cause exceptions.
25662 */
25663
25664
25665Vhs.isSupported = function () {
25666 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25667};
25668
25669var Component = videojs__default["default"].getComponent('Component');
25670/**
25671 * The Vhs Handler object, where we orchestrate all of the parts
25672 * of HLS to interact with video.js
25673 *
25674 * @class VhsHandler
25675 * @extends videojs.Component
25676 * @param {Object} source the soruce object
25677 * @param {Tech} tech the parent tech object
25678 * @param {Object} options optional and required options
25679 */
25680
25681var VhsHandler = /*#__PURE__*/function (_Component) {
25682 _inheritsLoose__default["default"](VhsHandler, _Component);
25683
25684 function VhsHandler(source, tech, options) {
25685 var _this;
25686
25687 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
25688
25689 if (options.hls && Object.keys(options.hls).length) {
25690 videojs__default["default"].log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
25691 } // if a tech level `initialBandwidth` option was passed
25692 // use that over the VHS level `bandwidth` option
25693
25694
25695 if (typeof options.initialBandwidth === 'number') {
25696 _this.options_.bandwidth = options.initialBandwidth;
25697 }
25698
25699 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
25700 // backwards-compatibility
25701
25702 if (tech.options_ && tech.options_.playerId) {
25703 var _player = videojs__default["default"](tech.options_.playerId);
25704
25705 if (!_player.hasOwnProperty('hls')) {
25706 Object.defineProperty(_player, 'hls', {
25707 get: function get() {
25708 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
25709 tech.trigger({
25710 type: 'usage',
25711 name: 'hls-player-access'
25712 });
25713 return _assertThisInitialized__default["default"](_this);
25714 },
25715 configurable: true
25716 });
25717 }
25718
25719 if (!_player.hasOwnProperty('vhs')) {
25720 Object.defineProperty(_player, 'vhs', {
25721 get: function get() {
25722 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
25723 tech.trigger({
25724 type: 'usage',
25725 name: 'vhs-player-access'
25726 });
25727 return _assertThisInitialized__default["default"](_this);
25728 },
25729 configurable: true
25730 });
25731 }
25732
25733 if (!_player.hasOwnProperty('dash')) {
25734 Object.defineProperty(_player, 'dash', {
25735 get: function get() {
25736 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
25737 return _assertThisInitialized__default["default"](_this);
25738 },
25739 configurable: true
25740 });
25741 }
25742
25743 _this.player_ = _player;
25744 }
25745
25746 _this.tech_ = tech;
25747 _this.source_ = source;
25748 _this.stats = {};
25749 _this.ignoreNextSeekingEvent_ = false;
25750
25751 _this.setOptions_();
25752
25753 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
25754 tech.overrideNativeAudioTracks(true);
25755 tech.overrideNativeVideoTracks(true);
25756 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
25757 // overriding native HLS only works if audio tracks have been emulated
25758 // error early if we're misconfigured
25759 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
25760 } // listen for fullscreenchange events for this player so that we
25761 // can adjust our quality selection quickly
25762
25763
25764 _this.on(document__default["default"], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
25765 var fullscreenElement = document__default["default"].fullscreenElement || document__default["default"].webkitFullscreenElement || document__default["default"].mozFullScreenElement || document__default["default"].msFullscreenElement;
25766
25767 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
25768 _this.masterPlaylistController_.fastQualityChange_();
25769 } else {
25770 // When leaving fullscreen, since the in page pixel dimensions should be smaller
25771 // than full screen, see if there should be a rendition switch down to preserve
25772 // bandwidth.
25773 _this.masterPlaylistController_.checkABR_();
25774 }
25775 });
25776
25777 _this.on(_this.tech_, 'seeking', function () {
25778 if (this.ignoreNextSeekingEvent_) {
25779 this.ignoreNextSeekingEvent_ = false;
25780 return;
25781 }
25782
25783 this.setCurrentTime(this.tech_.currentTime());
25784 });
25785
25786 _this.on(_this.tech_, 'error', function () {
25787 // verify that the error was real and we are loaded
25788 // enough to have mpc loaded.
25789 if (this.tech_.error() && this.masterPlaylistController_) {
25790 this.masterPlaylistController_.pauseLoading();
25791 }
25792 });
25793
25794 _this.on(_this.tech_, 'play', _this.play);
25795
25796 return _this;
25797 }
25798
25799 var _proto = VhsHandler.prototype;
25800
25801 _proto.setOptions_ = function setOptions_() {
25802 var _this2 = this;
25803
25804 // defaults
25805 this.options_.withCredentials = this.options_.withCredentials || false;
25806 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
25807 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
25808 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
25809 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
25810 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
25811 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
25812 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
25813 this.options_.customTagParsers = this.options_.customTagParsers || [];
25814 this.options_.customTagMappers = this.options_.customTagMappers || [];
25815 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
25816
25817 if (typeof this.options_.blacklistDuration !== 'number') {
25818 this.options_.blacklistDuration = 5 * 60;
25819 }
25820
25821 if (typeof this.options_.bandwidth !== 'number') {
25822 if (this.options_.useBandwidthFromLocalStorage) {
25823 var storedObject = getVhsLocalStorage();
25824
25825 if (storedObject && storedObject.bandwidth) {
25826 this.options_.bandwidth = storedObject.bandwidth;
25827 this.tech_.trigger({
25828 type: 'usage',
25829 name: 'vhs-bandwidth-from-local-storage'
25830 });
25831 this.tech_.trigger({
25832 type: 'usage',
25833 name: 'hls-bandwidth-from-local-storage'
25834 });
25835 }
25836
25837 if (storedObject && storedObject.throughput) {
25838 this.options_.throughput = storedObject.throughput;
25839 this.tech_.trigger({
25840 type: 'usage',
25841 name: 'vhs-throughput-from-local-storage'
25842 });
25843 this.tech_.trigger({
25844 type: 'usage',
25845 name: 'hls-throughput-from-local-storage'
25846 });
25847 }
25848 }
25849 } // if bandwidth was not set by options or pulled from local storage, start playlist
25850 // selection at a reasonable bandwidth
25851
25852
25853 if (typeof this.options_.bandwidth !== 'number') {
25854 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
25855 } // If the bandwidth number is unchanged from the initial setting
25856 // then this takes precedence over the enableLowInitialPlaylist option
25857
25858
25859 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
25860
25861 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
25862 if (typeof _this2.source_[option] !== 'undefined') {
25863 _this2.options_[option] = _this2.source_[option];
25864 }
25865 });
25866 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
25867 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
25868 }
25869 /**
25870 * called when player.src gets called, handle a new source
25871 *
25872 * @param {Object} src the source object to handle
25873 */
25874 ;
25875
25876 _proto.src = function src(_src, type) {
25877 var _this3 = this;
25878
25879 // do nothing if the src is falsey
25880 if (!_src) {
25881 return;
25882 }
25883
25884 this.setOptions_(); // add master playlist controller options
25885
25886 this.options_.src = expandDataUri(this.source_.src);
25887 this.options_.tech = this.tech_;
25888 this.options_.externVhs = Vhs;
25889 this.options_.sourceType = mediaTypes_js.simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
25890
25891 this.options_.seekTo = function (time) {
25892 _this3.tech_.setCurrentTime(time);
25893 };
25894
25895 if (this.options_.smoothQualityChange) {
25896 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
25897 }
25898
25899 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
25900 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
25901 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
25902 }, this.options_, {
25903 seekable: function seekable() {
25904 return _this3.seekable();
25905 },
25906 media: function media() {
25907 return _this3.masterPlaylistController_.media();
25908 },
25909 masterPlaylistController: this.masterPlaylistController_
25910 });
25911 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
25912 this.masterPlaylistController_.on('error', function () {
25913 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
25914 var error = _this3.masterPlaylistController_.error;
25915
25916 if (typeof error === 'object' && !error.code) {
25917 error.code = 3;
25918 } else if (typeof error === 'string') {
25919 error = {
25920 message: error,
25921 code: 3
25922 };
25923 }
25924
25925 player.error(error);
25926 });
25927 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
25928 // compatibility with < v2
25929
25930 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
25931 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
25932
25933 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
25934 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
25935 // controller. Using a custom property for backwards compatibility
25936 // with < v2
25937
25938 Object.defineProperties(this, {
25939 selectPlaylist: {
25940 get: function get() {
25941 return this.masterPlaylistController_.selectPlaylist;
25942 },
25943 set: function set(selectPlaylist) {
25944 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
25945 }
25946 },
25947 throughput: {
25948 get: function get() {
25949 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
25950 },
25951 set: function set(throughput) {
25952 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
25953 // for the cumulative average
25954
25955 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
25956 }
25957 },
25958 bandwidth: {
25959 get: function get() {
25960 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
25961 var networkInformation = window__default["default"].navigator.connection || window__default["default"].navigator.mozConnection || window__default["default"].navigator.webkitConnection;
25962 var tenMbpsAsBitsPerSecond = 10e6;
25963
25964 if (this.options_.useNetworkInformationApi && networkInformation) {
25965 // downlink returns Mbps
25966 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
25967 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
25968 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
25969 // high quality streams are not filtered out.
25970
25971 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
25972 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
25973 } else {
25974 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
25975 }
25976 }
25977
25978 return playerBandwidthEst;
25979 },
25980 set: function set(bandwidth) {
25981 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
25982 // `count` is set to zero that current value of `rate` isn't included
25983 // in the cumulative average
25984
25985 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
25986 rate: 0,
25987 count: 0
25988 };
25989 }
25990 },
25991
25992 /**
25993 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
25994 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
25995 * the entire process after that - decryption, transmuxing, and appending - provided
25996 * by `throughput`.
25997 *
25998 * Since the two process are serial, the overall system bandwidth is given by:
25999 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
26000 */
26001 systemBandwidth: {
26002 get: function get() {
26003 var invBandwidth = 1 / (this.bandwidth || 1);
26004 var invThroughput;
26005
26006 if (this.throughput > 0) {
26007 invThroughput = 1 / this.throughput;
26008 } else {
26009 invThroughput = 0;
26010 }
26011
26012 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
26013 return systemBitrate;
26014 },
26015 set: function set() {
26016 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
26017 }
26018 }
26019 });
26020
26021 if (this.options_.bandwidth) {
26022 this.bandwidth = this.options_.bandwidth;
26023 }
26024
26025 if (this.options_.throughput) {
26026 this.throughput = this.options_.throughput;
26027 }
26028
26029 Object.defineProperties(this.stats, {
26030 bandwidth: {
26031 get: function get() {
26032 return _this3.bandwidth || 0;
26033 },
26034 enumerable: true
26035 },
26036 mediaRequests: {
26037 get: function get() {
26038 return _this3.masterPlaylistController_.mediaRequests_() || 0;
26039 },
26040 enumerable: true
26041 },
26042 mediaRequestsAborted: {
26043 get: function get() {
26044 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
26045 },
26046 enumerable: true
26047 },
26048 mediaRequestsTimedout: {
26049 get: function get() {
26050 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
26051 },
26052 enumerable: true
26053 },
26054 mediaRequestsErrored: {
26055 get: function get() {
26056 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
26057 },
26058 enumerable: true
26059 },
26060 mediaTransferDuration: {
26061 get: function get() {
26062 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
26063 },
26064 enumerable: true
26065 },
26066 mediaBytesTransferred: {
26067 get: function get() {
26068 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
26069 },
26070 enumerable: true
26071 },
26072 mediaSecondsLoaded: {
26073 get: function get() {
26074 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
26075 },
26076 enumerable: true
26077 },
26078 mediaAppends: {
26079 get: function get() {
26080 return _this3.masterPlaylistController_.mediaAppends_() || 0;
26081 },
26082 enumerable: true
26083 },
26084 mainAppendsToLoadedData: {
26085 get: function get() {
26086 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
26087 },
26088 enumerable: true
26089 },
26090 audioAppendsToLoadedData: {
26091 get: function get() {
26092 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
26093 },
26094 enumerable: true
26095 },
26096 appendsToLoadedData: {
26097 get: function get() {
26098 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
26099 },
26100 enumerable: true
26101 },
26102 timeToLoadedData: {
26103 get: function get() {
26104 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
26105 },
26106 enumerable: true
26107 },
26108 buffered: {
26109 get: function get() {
26110 return timeRangesToArray(_this3.tech_.buffered());
26111 },
26112 enumerable: true
26113 },
26114 currentTime: {
26115 get: function get() {
26116 return _this3.tech_.currentTime();
26117 },
26118 enumerable: true
26119 },
26120 currentSource: {
26121 get: function get() {
26122 return _this3.tech_.currentSource_;
26123 },
26124 enumerable: true
26125 },
26126 currentTech: {
26127 get: function get() {
26128 return _this3.tech_.name_;
26129 },
26130 enumerable: true
26131 },
26132 duration: {
26133 get: function get() {
26134 return _this3.tech_.duration();
26135 },
26136 enumerable: true
26137 },
26138 master: {
26139 get: function get() {
26140 return _this3.playlists.master;
26141 },
26142 enumerable: true
26143 },
26144 playerDimensions: {
26145 get: function get() {
26146 return _this3.tech_.currentDimensions();
26147 },
26148 enumerable: true
26149 },
26150 seekable: {
26151 get: function get() {
26152 return timeRangesToArray(_this3.tech_.seekable());
26153 },
26154 enumerable: true
26155 },
26156 timestamp: {
26157 get: function get() {
26158 return Date.now();
26159 },
26160 enumerable: true
26161 },
26162 videoPlaybackQuality: {
26163 get: function get() {
26164 return _this3.tech_.getVideoPlaybackQuality();
26165 },
26166 enumerable: true
26167 }
26168 });
26169 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
26170 this.tech_.on('bandwidthupdate', function () {
26171 if (_this3.options_.useBandwidthFromLocalStorage) {
26172 updateVhsLocalStorage({
26173 bandwidth: _this3.bandwidth,
26174 throughput: Math.round(_this3.throughput)
26175 });
26176 }
26177 });
26178 this.masterPlaylistController_.on('selectedinitialmedia', function () {
26179 // Add the manual rendition mix-in to VhsHandler
26180 renditionSelectionMixin(_this3);
26181 });
26182 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
26183 _this3.setupEme_();
26184 }); // the bandwidth of the primary segment loader is our best
26185 // estimate of overall bandwidth
26186
26187 this.on(this.masterPlaylistController_, 'progress', function () {
26188 this.tech_.trigger('progress');
26189 }); // In the live case, we need to ignore the very first `seeking` event since
26190 // that will be the result of the seek-to-live behavior
26191
26192 this.on(this.masterPlaylistController_, 'firstplay', function () {
26193 this.ignoreNextSeekingEvent_ = true;
26194 });
26195 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
26196 // this can occur if someone sets the src in player.ready(), for instance
26197
26198 if (!this.tech_.el()) {
26199 return;
26200 }
26201
26202 this.mediaSourceUrl_ = window__default["default"].URL.createObjectURL(this.masterPlaylistController_.mediaSource);
26203 this.tech_.src(this.mediaSourceUrl_);
26204 };
26205
26206 _proto.createKeySessions_ = function createKeySessions_() {
26207 var _this4 = this;
26208
26209 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
26210 this.logger_('waiting for EME key session creation');
26211 waitForKeySessionCreation({
26212 player: this.player_,
26213 sourceKeySystems: this.source_.keySystems,
26214 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
26215 mainPlaylists: this.playlists.master.playlists
26216 }).then(function () {
26217 _this4.logger_('created EME key session');
26218
26219 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
26220 }).catch(function (err) {
26221 _this4.logger_('error while creating EME key session', err);
26222
26223 _this4.player_.error({
26224 message: 'Failed to initialize media keys for EME',
26225 code: 3
26226 });
26227 });
26228 };
26229
26230 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
26231 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
26232 // the key is in the manifest. While this should've happened on initial source load, it
26233 // may happen again in live streams where the keys change, and the manifest info
26234 // reflects the update.
26235 //
26236 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
26237 // already requested keys for, we don't have to worry about this generating extraneous
26238 // requests.
26239 this.logger_('waitingforkey fired, attempting to create any new key sessions');
26240 this.createKeySessions_();
26241 }
26242 /**
26243 * If necessary and EME is available, sets up EME options and waits for key session
26244 * creation.
26245 *
26246 * This function also updates the source updater so taht it can be used, as for some
26247 * browsers, EME must be configured before content is appended (if appending unencrypted
26248 * content before encrypted content).
26249 */
26250 ;
26251
26252 _proto.setupEme_ = function setupEme_() {
26253 var _this5 = this;
26254
26255 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
26256 var didSetupEmeOptions = setupEmeOptions({
26257 player: this.player_,
26258 sourceKeySystems: this.source_.keySystems,
26259 media: this.playlists.media(),
26260 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
26261 });
26262 this.player_.tech_.on('keystatuschange', function (e) {
26263 if (e.status !== 'output-restricted') {
26264 return;
26265 }
26266
26267 var masterPlaylist = _this5.masterPlaylistController_.master();
26268
26269 if (!masterPlaylist || !masterPlaylist.playlists) {
26270 return;
26271 }
26272
26273 var excludedHDPlaylists = []; // Assume all HD streams are unplayable and exclude them from ABR selection
26274
26275 masterPlaylist.playlists.forEach(function (playlist) {
26276 if (playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height >= 720) {
26277 if (!playlist.excludeUntil || playlist.excludeUntil < Infinity) {
26278 playlist.excludeUntil = Infinity;
26279 excludedHDPlaylists.push(playlist);
26280 }
26281 }
26282 });
26283
26284 if (excludedHDPlaylists.length) {
26285 var _videojs$log;
26286
26287 (_videojs$log = videojs__default["default"].log).warn.apply(_videojs$log, ['DRM keystatus changed to "output-restricted." Removing the following HD playlists ' + 'that will most likely fail to play and clearing the buffer. ' + 'This may be due to HDCP restrictions on the stream and the capabilities of the current device.'].concat(excludedHDPlaylists)); // Clear the buffer before switching playlists, since it may already contain unplayable segments
26288
26289
26290 _this5.masterPlaylistController_.fastQualityChange_();
26291 }
26292 });
26293 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
26294 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
26295 // promises.
26296
26297 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
26298 // If EME options were not set up, we've done all we could to initialize EME.
26299 this.masterPlaylistController_.sourceUpdater_.initializedEme();
26300 return;
26301 }
26302
26303 this.createKeySessions_();
26304 }
26305 /**
26306 * Initializes the quality levels and sets listeners to update them.
26307 *
26308 * @method setupQualityLevels_
26309 * @private
26310 */
26311 ;
26312
26313 _proto.setupQualityLevels_ = function setupQualityLevels_() {
26314 var _this6 = this;
26315
26316 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
26317 // or qualityLevels_ listeners have already been setup, do nothing.
26318
26319 if (!player || !player.qualityLevels || this.qualityLevels_) {
26320 return;
26321 }
26322
26323 this.qualityLevels_ = player.qualityLevels();
26324 this.masterPlaylistController_.on('selectedinitialmedia', function () {
26325 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
26326 });
26327 this.playlists.on('mediachange', function () {
26328 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
26329 });
26330 }
26331 /**
26332 * return the version
26333 */
26334 ;
26335
26336 VhsHandler.version = function version$5() {
26337 return {
26338 '@videojs/http-streaming': version$4,
26339 'mux.js': version$3,
26340 'mpd-parser': version$2,
26341 'm3u8-parser': version$1,
26342 'aes-decrypter': version
26343 };
26344 }
26345 /**
26346 * return the version
26347 */
26348 ;
26349
26350 _proto.version = function version() {
26351 return this.constructor.version();
26352 };
26353
26354 _proto.canChangeType = function canChangeType() {
26355 return SourceUpdater.canChangeType();
26356 }
26357 /**
26358 * Begin playing the video.
26359 */
26360 ;
26361
26362 _proto.play = function play() {
26363 this.masterPlaylistController_.play();
26364 }
26365 /**
26366 * a wrapper around the function in MasterPlaylistController
26367 */
26368 ;
26369
26370 _proto.setCurrentTime = function setCurrentTime(currentTime) {
26371 this.masterPlaylistController_.setCurrentTime(currentTime);
26372 }
26373 /**
26374 * a wrapper around the function in MasterPlaylistController
26375 */
26376 ;
26377
26378 _proto.duration = function duration() {
26379 return this.masterPlaylistController_.duration();
26380 }
26381 /**
26382 * a wrapper around the function in MasterPlaylistController
26383 */
26384 ;
26385
26386 _proto.seekable = function seekable() {
26387 return this.masterPlaylistController_.seekable();
26388 }
26389 /**
26390 * Abort all outstanding work and cleanup.
26391 */
26392 ;
26393
26394 _proto.dispose = function dispose() {
26395 if (this.playbackWatcher_) {
26396 this.playbackWatcher_.dispose();
26397 }
26398
26399 if (this.masterPlaylistController_) {
26400 this.masterPlaylistController_.dispose();
26401 }
26402
26403 if (this.qualityLevels_) {
26404 this.qualityLevels_.dispose();
26405 }
26406
26407 if (this.player_) {
26408 delete this.player_.vhs;
26409 delete this.player_.dash;
26410 delete this.player_.hls;
26411 }
26412
26413 if (this.tech_ && this.tech_.vhs) {
26414 delete this.tech_.vhs;
26415 } // don't check this.tech_.hls as it will log a deprecated warning
26416
26417
26418 if (this.tech_) {
26419 delete this.tech_.hls;
26420 }
26421
26422 if (this.mediaSourceUrl_ && window__default["default"].URL.revokeObjectURL) {
26423 window__default["default"].URL.revokeObjectURL(this.mediaSourceUrl_);
26424 this.mediaSourceUrl_ = null;
26425 }
26426
26427 if (this.tech_) {
26428 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
26429 }
26430
26431 _Component.prototype.dispose.call(this);
26432 };
26433
26434 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
26435 return getProgramTime({
26436 playlist: this.masterPlaylistController_.media(),
26437 time: time,
26438 callback: callback
26439 });
26440 } // the player must be playing before calling this
26441 ;
26442
26443 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
26444 if (pauseAfterSeek === void 0) {
26445 pauseAfterSeek = true;
26446 }
26447
26448 if (retryCount === void 0) {
26449 retryCount = 2;
26450 }
26451
26452 return seekToProgramTime({
26453 programTime: programTime,
26454 playlist: this.masterPlaylistController_.media(),
26455 retryCount: retryCount,
26456 pauseAfterSeek: pauseAfterSeek,
26457 seekTo: this.options_.seekTo,
26458 tech: this.options_.tech,
26459 callback: callback
26460 });
26461 };
26462
26463 return VhsHandler;
26464}(Component);
26465/**
26466 * The Source Handler object, which informs video.js what additional
26467 * MIME types are supported and sets up playback. It is registered
26468 * automatically to the appropriate tech based on the capabilities of
26469 * the browser it is running in. It is not necessary to use or modify
26470 * this object in normal usage.
26471 */
26472
26473
26474var VhsSourceHandler = {
26475 name: 'videojs-http-streaming',
26476 VERSION: version$4,
26477 canHandleSource: function canHandleSource(srcObj, options) {
26478 if (options === void 0) {
26479 options = {};
26480 }
26481
26482 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
26483 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
26484 },
26485 handleSource: function handleSource(source, tech, options) {
26486 if (options === void 0) {
26487 options = {};
26488 }
26489
26490 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
26491 tech.vhs = new VhsHandler(source, tech, localOptions);
26492
26493 if (!videojs__default["default"].hasOwnProperty('hls')) {
26494 Object.defineProperty(tech, 'hls', {
26495 get: function get() {
26496 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
26497 return tech.vhs;
26498 },
26499 configurable: true
26500 });
26501 }
26502
26503 tech.vhs.xhr = xhrFactory();
26504 tech.vhs.src(source.src, source.type);
26505 return tech.vhs;
26506 },
26507 canPlayType: function canPlayType(type, options) {
26508 var simpleType = mediaTypes_js.simpleTypeFromSourceType(type);
26509
26510 if (!simpleType) {
26511 return '';
26512 }
26513
26514 var overrideNative = VhsSourceHandler.getOverrideNative(options);
26515 var supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
26516 var canUseMsePlayback = !supportsTypeNatively || overrideNative;
26517 return canUseMsePlayback ? 'maybe' : '';
26518 },
26519 getOverrideNative: function getOverrideNative(options) {
26520 if (options === void 0) {
26521 options = {};
26522 }
26523
26524 var _options = options,
26525 _options$vhs = _options.vhs,
26526 vhs = _options$vhs === void 0 ? {} : _options$vhs,
26527 _options$hls = _options.hls,
26528 hls = _options$hls === void 0 ? {} : _options$hls;
26529 var defaultOverrideNative = !(videojs__default["default"].browser.IS_ANY_SAFARI || videojs__default["default"].browser.IS_IOS);
26530 var _vhs$overrideNative = vhs.overrideNative,
26531 overrideNative = _vhs$overrideNative === void 0 ? defaultOverrideNative : _vhs$overrideNative;
26532 var _hls$overrideNative = hls.overrideNative,
26533 legacyOverrideNative = _hls$overrideNative === void 0 ? false : _hls$overrideNative;
26534 return legacyOverrideNative || overrideNative;
26535 }
26536};
26537/**
26538 * Check to see if the native MediaSource object exists and supports
26539 * an MP4 container with both H.264 video and AAC-LC audio.
26540 *
26541 * @return {boolean} if native media sources are supported
26542 */
26543
26544var supportsNativeMediaSources = function supportsNativeMediaSources() {
26545 return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2');
26546}; // register source handlers with the appropriate techs
26547
26548
26549if (supportsNativeMediaSources()) {
26550 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
26551}
26552
26553videojs__default["default"].VhsHandler = VhsHandler;
26554Object.defineProperty(videojs__default["default"], 'HlsHandler', {
26555 get: function get() {
26556 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
26557 return VhsHandler;
26558 },
26559 configurable: true
26560});
26561videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
26562Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
26563 get: function get() {
26564 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
26565 return VhsSourceHandler;
26566 },
26567 configurable: true
26568});
26569videojs__default["default"].Vhs = Vhs;
26570Object.defineProperty(videojs__default["default"], 'Hls', {
26571 get: function get() {
26572 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
26573 return Vhs;
26574 },
26575 configurable: true
26576});
26577
26578if (!videojs__default["default"].use) {
26579 videojs__default["default"].registerComponent('Hls', Vhs);
26580 videojs__default["default"].registerComponent('Vhs', Vhs);
26581}
26582
26583videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
26584videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
26585
26586if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
26587 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
26588 registerPlugin('reloadSourceOnError', reloadSourceOnError);
26589}
26590
26591Object.defineProperty(exports, 'simpleTypeFromSourceType', {
26592 enumerable: true,
26593 get: function () { return mediaTypes_js.simpleTypeFromSourceType; }
26594});
26595exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
26596exports.Vhs = Vhs;
26597exports.VhsHandler = VhsHandler;
26598exports.VhsSourceHandler = VhsSourceHandler;
26599exports.emeKeySystems = emeKeySystems;
26600exports.expandDataUri = expandDataUri;
26601exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
26602exports.setupEmeOptions = setupEmeOptions;
26603exports.waitForKeySessionCreation = waitForKeySessionCreation;