UNPKG

891 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.16.0 @license Apache-2.0 */
2import _assertThisInitialized from '@babel/runtime/helpers/assertThisInitialized';
3import _inheritsLoose from '@babel/runtime/helpers/inheritsLoose';
4import document from 'global/document';
5import window$1 from 'global/window';
6import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
7import videojs from 'video.js';
8import _extends from '@babel/runtime/helpers/extends';
9import { Parser } from 'm3u8-parser';
10import { isAudioCodec, translateLegacyCodec, codecsFromDefault, parseCodecs, getMimeForCodec, DEFAULT_VIDEO_CODEC, DEFAULT_AUDIO_CODEC, browserSupportsCodec, muxerSupportsCodec } from '@videojs/vhs-utils/es/codecs.js';
11import { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
12export { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
13import { isArrayBufferView, concatTypedArrays, stringToBytes, toUint8 } from '@videojs/vhs-utils/es/byte-helpers';
14import { generateSidxKey, parseUTCTiming, parse, addSidxSegmentsToPlaylist } from 'mpd-parser';
15import parseSidx from 'mux.js/lib/tools/parse-sidx';
16import { getId3Offset } from '@videojs/vhs-utils/es/id3-helpers';
17import { detectContainerForBytes, isLikelyFmp4MediaSegment } from '@videojs/vhs-utils/es/containers';
18import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
19import _wrapNativeSuper from '@babel/runtime/helpers/wrapNativeSuper';
20
21/**
22 * @file resolve-url.js - Handling how URLs are resolved and manipulated
23 */
24var resolveUrl = _resolveUrl;
25/**
26 * Checks whether xhr request was redirected and returns correct url depending
27 * on `handleManifestRedirects` option
28 *
29 * @api private
30 *
31 * @param {string} url - an url being requested
32 * @param {XMLHttpRequest} req - xhr request result
33 *
34 * @return {string}
35 */
36
37var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
38 // To understand how the responseURL below is set and generated:
39 // - https://fetch.spec.whatwg.org/#concept-response-url
40 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
41 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
42 return req.responseURL;
43 }
44
45 return url;
46};
47
48var logger = function logger(source) {
49 if (videojs.log.debug) {
50 return videojs.log.debug.bind(videojs, 'VHS:', source + " >");
51 }
52
53 return function () {};
54};
55
56/**
57 * ranges
58 *
59 * Utilities for working with TimeRanges.
60 *
61 */
62
63var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
64// can be misleading because of precision differences or when the current media has poorly
65// aligned audio and video, which can cause values to be slightly off from what you would
66// expect. This value is what we consider to be safe to use in such comparisons to account
67// for these scenarios.
68
69var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
70
71var filterRanges = function filterRanges(timeRanges, predicate) {
72 var results = [];
73 var i;
74
75 if (timeRanges && timeRanges.length) {
76 // Search for ranges that match the predicate
77 for (i = 0; i < timeRanges.length; i++) {
78 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
79 results.push([timeRanges.start(i), timeRanges.end(i)]);
80 }
81 }
82 }
83
84 return videojs.createTimeRanges(results);
85};
86/**
87 * Attempts to find the buffered TimeRange that contains the specified
88 * time.
89 *
90 * @param {TimeRanges} buffered - the TimeRanges object to query
91 * @param {number} time - the time to filter on.
92 * @return {TimeRanges} a new TimeRanges object
93 */
94
95
96var findRange = function findRange(buffered, time) {
97 return filterRanges(buffered, function (start, end) {
98 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
99 });
100};
101/**
102 * Returns the TimeRanges that begin later than the specified time.
103 *
104 * @param {TimeRanges} timeRanges - the TimeRanges object to query
105 * @param {number} time - the time to filter on.
106 * @return {TimeRanges} a new TimeRanges object.
107 */
108
109var findNextRange = function findNextRange(timeRanges, time) {
110 return filterRanges(timeRanges, function (start) {
111 return start - TIME_FUDGE_FACTOR >= time;
112 });
113};
114/**
115 * Returns gaps within a list of TimeRanges
116 *
117 * @param {TimeRanges} buffered - the TimeRanges object
118 * @return {TimeRanges} a TimeRanges object of gaps
119 */
120
121var findGaps = function findGaps(buffered) {
122 if (buffered.length < 2) {
123 return videojs.createTimeRanges();
124 }
125
126 var ranges = [];
127
128 for (var i = 1; i < buffered.length; i++) {
129 var start = buffered.end(i - 1);
130 var end = buffered.start(i);
131 ranges.push([start, end]);
132 }
133
134 return videojs.createTimeRanges(ranges);
135};
136/**
137 * Calculate the intersection of two TimeRanges
138 *
139 * @param {TimeRanges} bufferA
140 * @param {TimeRanges} bufferB
141 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
142 */
143
144var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
145 var start = null;
146 var end = null;
147 var arity = 0;
148 var extents = [];
149 var ranges = [];
150
151 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
152 return videojs.createTimeRange();
153 } // Handle the case where we have both buffers and create an
154 // intersection of the two
155
156
157 var count = bufferA.length; // A) Gather up all start and end times
158
159 while (count--) {
160 extents.push({
161 time: bufferA.start(count),
162 type: 'start'
163 });
164 extents.push({
165 time: bufferA.end(count),
166 type: 'end'
167 });
168 }
169
170 count = bufferB.length;
171
172 while (count--) {
173 extents.push({
174 time: bufferB.start(count),
175 type: 'start'
176 });
177 extents.push({
178 time: bufferB.end(count),
179 type: 'end'
180 });
181 } // B) Sort them by time
182
183
184 extents.sort(function (a, b) {
185 return a.time - b.time;
186 }); // C) Go along one by one incrementing arity for start and decrementing
187 // arity for ends
188
189 for (count = 0; count < extents.length; count++) {
190 if (extents[count].type === 'start') {
191 arity++; // D) If arity is ever incremented to 2 we are entering an
192 // overlapping range
193
194 if (arity === 2) {
195 start = extents[count].time;
196 }
197 } else if (extents[count].type === 'end') {
198 arity--; // E) If arity is ever decremented to 1 we leaving an
199 // overlapping range
200
201 if (arity === 1) {
202 end = extents[count].time;
203 }
204 } // F) Record overlapping ranges
205
206
207 if (start !== null && end !== null) {
208 ranges.push([start, end]);
209 start = null;
210 end = null;
211 }
212 }
213
214 return videojs.createTimeRanges(ranges);
215};
216/**
217 * Gets a human readable string for a TimeRange
218 *
219 * @param {TimeRange} range
220 * @return {string} a human readable string
221 */
222
223var printableRange = function printableRange(range) {
224 var strArr = [];
225
226 if (!range || !range.length) {
227 return '';
228 }
229
230 for (var i = 0; i < range.length; i++) {
231 strArr.push(range.start(i) + ' => ' + range.end(i));
232 }
233
234 return strArr.join(', ');
235};
236/**
237 * Calculates the amount of time left in seconds until the player hits the end of the
238 * buffer and causes a rebuffer
239 *
240 * @param {TimeRange} buffered
241 * The state of the buffer
242 * @param {Numnber} currentTime
243 * The current time of the player
244 * @param {number} playbackRate
245 * The current playback rate of the player. Defaults to 1.
246 * @return {number}
247 * Time until the player has to start rebuffering in seconds.
248 * @function timeUntilRebuffer
249 */
250
251var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
252 if (playbackRate === void 0) {
253 playbackRate = 1;
254 }
255
256 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
257 return (bufferedEnd - currentTime) / playbackRate;
258};
259/**
260 * Converts a TimeRanges object into an array representation
261 *
262 * @param {TimeRanges} timeRanges
263 * @return {Array}
264 */
265
266var timeRangesToArray = function timeRangesToArray(timeRanges) {
267 var timeRangesList = [];
268
269 for (var i = 0; i < timeRanges.length; i++) {
270 timeRangesList.push({
271 start: timeRanges.start(i),
272 end: timeRanges.end(i)
273 });
274 }
275
276 return timeRangesList;
277};
278/**
279 * Determines if two time range objects are different.
280 *
281 * @param {TimeRange} a
282 * the first time range object to check
283 *
284 * @param {TimeRange} b
285 * the second time range object to check
286 *
287 * @return {Boolean}
288 * Whether the time range objects differ
289 */
290
291var isRangeDifferent = function isRangeDifferent(a, b) {
292 // same object
293 if (a === b) {
294 return false;
295 } // one or the other is undefined
296
297
298 if (!a && b || !b && a) {
299 return true;
300 } // length is different
301
302
303 if (a.length !== b.length) {
304 return true;
305 } // see if any start/end pair is different
306
307
308 for (var i = 0; i < a.length; i++) {
309 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
310 return true;
311 }
312 } // if the length and every pair is the same
313 // this is the same time range
314
315
316 return false;
317};
318var lastBufferedEnd = function lastBufferedEnd(a) {
319 if (!a || !a.length || !a.end) {
320 return;
321 }
322
323 return a.end(a.length - 1);
324};
325/**
326 * A utility function to add up the amount of time in a timeRange
327 * after a specified startTime.
328 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
329 * would return 40 as there are 40s seconds after 0 in the timeRange
330 *
331 * @param {TimeRange} range
332 * The range to check against
333 * @param {number} startTime
334 * The time in the time range that you should start counting from
335 *
336 * @return {number}
337 * The number of seconds in the buffer passed the specified time.
338 */
339
340var timeAheadOf = function timeAheadOf(range, startTime) {
341 var time = 0;
342
343 if (!range || !range.length) {
344 return time;
345 }
346
347 for (var i = 0; i < range.length; i++) {
348 var start = range.start(i);
349 var end = range.end(i); // startTime is after this range entirely
350
351 if (startTime > end) {
352 continue;
353 } // startTime is within this range
354
355
356 if (startTime > start && startTime <= end) {
357 time += end - startTime;
358 continue;
359 } // startTime is before this range.
360
361
362 time += end - start;
363 }
364
365 return time;
366};
367
368/**
369 * @file playlist.js
370 *
371 * Playlist related utilities.
372 */
373var createTimeRange = videojs.createTimeRange;
374/**
375 * Get the duration of a segment, with special cases for
376 * llhls segments that do not have a duration yet.
377 *
378 * @param {Object} playlist
379 * the playlist that the segment belongs to.
380 * @param {Object} segment
381 * the segment to get a duration for.
382 *
383 * @return {number}
384 * the segment duration
385 */
386
387var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
388 // if this isn't a preload segment
389 // then we will have a segment duration that is accurate.
390 if (!segment.preload) {
391 return segment.duration;
392 } // otherwise we have to add up parts and preload hints
393 // to get an up to date duration.
394
395
396 var result = 0;
397 (segment.parts || []).forEach(function (p) {
398 result += p.duration;
399 }); // for preload hints we have to use partTargetDuration
400 // as they won't even have a duration yet.
401
402 (segment.preloadHints || []).forEach(function (p) {
403 if (p.type === 'PART') {
404 result += playlist.partTargetDuration;
405 }
406 });
407 return result;
408};
409/**
410 * A function to get a combined list of parts and segments with durations
411 * and indexes.
412 *
413 * @param {Playlist} playlist the playlist to get the list for.
414 *
415 * @return {Array} The part/segment list.
416 */
417
418var getPartsAndSegments = function getPartsAndSegments(playlist) {
419 return (playlist.segments || []).reduce(function (acc, segment, si) {
420 if (segment.parts) {
421 segment.parts.forEach(function (part, pi) {
422 acc.push({
423 duration: part.duration,
424 segmentIndex: si,
425 partIndex: pi,
426 part: part,
427 segment: segment
428 });
429 });
430 } else {
431 acc.push({
432 duration: segment.duration,
433 segmentIndex: si,
434 partIndex: null,
435 segment: segment,
436 part: null
437 });
438 }
439
440 return acc;
441 }, []);
442};
443var getLastParts = function getLastParts(media) {
444 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
445 return lastSegment && lastSegment.parts || [];
446};
447var getKnownPartCount = function getKnownPartCount(_ref) {
448 var preloadSegment = _ref.preloadSegment;
449
450 if (!preloadSegment) {
451 return;
452 }
453
454 var parts = preloadSegment.parts,
455 preloadHints = preloadSegment.preloadHints;
456 var partCount = (preloadHints || []).reduce(function (count, hint) {
457 return count + (hint.type === 'PART' ? 1 : 0);
458 }, 0);
459 partCount += parts && parts.length ? parts.length : 0;
460 return partCount;
461};
462/**
463 * Get the number of seconds to delay from the end of a
464 * live playlist.
465 *
466 * @param {Playlist} master the master playlist
467 * @param {Playlist} media the media playlist
468 * @return {number} the hold back in seconds.
469 */
470
471var liveEdgeDelay = function liveEdgeDelay(master, media) {
472 if (media.endList) {
473 return 0;
474 } // dash suggestedPresentationDelay trumps everything
475
476
477 if (master && master.suggestedPresentationDelay) {
478 return master.suggestedPresentationDelay;
479 }
480
481 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
482
483 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
484 return media.serverControl.partHoldBack;
485 } else if (hasParts && media.partTargetDuration) {
486 return media.partTargetDuration * 3; // finally look for full segment delays
487 } else if (media.serverControl && media.serverControl.holdBack) {
488 return media.serverControl.holdBack;
489 } else if (media.targetDuration) {
490 return media.targetDuration * 3;
491 }
492
493 return 0;
494};
495/**
496 * walk backward until we find a duration we can use
497 * or return a failure
498 *
499 * @param {Playlist} playlist the playlist to walk through
500 * @param {Number} endSequence the mediaSequence to stop walking on
501 */
502
503var backwardDuration = function backwardDuration(playlist, endSequence) {
504 var result = 0;
505 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
506 // the interval, use it
507
508 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
509 // information that is earlier than endSequence
510
511 if (segment) {
512 if (typeof segment.start !== 'undefined') {
513 return {
514 result: segment.start,
515 precise: true
516 };
517 }
518
519 if (typeof segment.end !== 'undefined') {
520 return {
521 result: segment.end - segment.duration,
522 precise: true
523 };
524 }
525 }
526
527 while (i--) {
528 segment = playlist.segments[i];
529
530 if (typeof segment.end !== 'undefined') {
531 return {
532 result: result + segment.end,
533 precise: true
534 };
535 }
536
537 result += segmentDurationWithParts(playlist, segment);
538
539 if (typeof segment.start !== 'undefined') {
540 return {
541 result: result + segment.start,
542 precise: true
543 };
544 }
545 }
546
547 return {
548 result: result,
549 precise: false
550 };
551};
552/**
553 * walk forward until we find a duration we can use
554 * or return a failure
555 *
556 * @param {Playlist} playlist the playlist to walk through
557 * @param {number} endSequence the mediaSequence to stop walking on
558 */
559
560
561var forwardDuration = function forwardDuration(playlist, endSequence) {
562 var result = 0;
563 var segment;
564 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
565 // information
566
567 for (; i < playlist.segments.length; i++) {
568 segment = playlist.segments[i];
569
570 if (typeof segment.start !== 'undefined') {
571 return {
572 result: segment.start - result,
573 precise: true
574 };
575 }
576
577 result += segmentDurationWithParts(playlist, segment);
578
579 if (typeof segment.end !== 'undefined') {
580 return {
581 result: segment.end - result,
582 precise: true
583 };
584 }
585 } // indicate we didn't find a useful duration estimate
586
587
588 return {
589 result: -1,
590 precise: false
591 };
592};
593/**
594 * Calculate the media duration from the segments associated with a
595 * playlist. The duration of a subinterval of the available segments
596 * may be calculated by specifying an end index.
597 *
598 * @param {Object} playlist a media playlist object
599 * @param {number=} endSequence an exclusive upper boundary
600 * for the playlist. Defaults to playlist length.
601 * @param {number} expired the amount of time that has dropped
602 * off the front of the playlist in a live scenario
603 * @return {number} the duration between the first available segment
604 * and end index.
605 */
606
607
608var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
609 if (typeof endSequence === 'undefined') {
610 endSequence = playlist.mediaSequence + playlist.segments.length;
611 }
612
613 if (endSequence < playlist.mediaSequence) {
614 return 0;
615 } // do a backward walk to estimate the duration
616
617
618 var backward = backwardDuration(playlist, endSequence);
619
620 if (backward.precise) {
621 // if we were able to base our duration estimate on timing
622 // information provided directly from the Media Source, return
623 // it
624 return backward.result;
625 } // walk forward to see if a precise duration estimate can be made
626 // that way
627
628
629 var forward = forwardDuration(playlist, endSequence);
630
631 if (forward.precise) {
632 // we found a segment that has been buffered and so it's
633 // position is known precisely
634 return forward.result;
635 } // return the less-precise, playlist-based duration estimate
636
637
638 return backward.result + expired;
639};
640/**
641 * Calculates the duration of a playlist. If a start and end index
642 * are specified, the duration will be for the subset of the media
643 * timeline between those two indices. The total duration for live
644 * playlists is always Infinity.
645 *
646 * @param {Object} playlist a media playlist object
647 * @param {number=} endSequence an exclusive upper
648 * boundary for the playlist. Defaults to the playlist media
649 * sequence number plus its length.
650 * @param {number=} expired the amount of time that has
651 * dropped off the front of the playlist in a live scenario
652 * @return {number} the duration between the start index and end
653 * index.
654 */
655
656
657var duration = function duration(playlist, endSequence, expired) {
658 if (!playlist) {
659 return 0;
660 }
661
662 if (typeof expired !== 'number') {
663 expired = 0;
664 } // if a slice of the total duration is not requested, use
665 // playlist-level duration indicators when they're present
666
667
668 if (typeof endSequence === 'undefined') {
669 // if present, use the duration specified in the playlist
670 if (playlist.totalDuration) {
671 return playlist.totalDuration;
672 } // duration should be Infinity for live playlists
673
674
675 if (!playlist.endList) {
676 return window$1.Infinity;
677 }
678 } // calculate the total duration based on the segment durations
679
680
681 return intervalDuration(playlist, endSequence, expired);
682};
683/**
684 * Calculate the time between two indexes in the current playlist
685 * neight the start- nor the end-index need to be within the current
686 * playlist in which case, the targetDuration of the playlist is used
687 * to approximate the durations of the segments
688 *
689 * @param {Array} options.durationList list to iterate over for durations.
690 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
691 * @param {number} options.startIndex partsAndSegments index to start
692 * @param {number} options.endIndex partsAndSegments index to end.
693 * @return {number} the number of seconds between startIndex and endIndex
694 */
695
696var sumDurations = function sumDurations(_ref2) {
697 var defaultDuration = _ref2.defaultDuration,
698 durationList = _ref2.durationList,
699 startIndex = _ref2.startIndex,
700 endIndex = _ref2.endIndex;
701 var durations = 0;
702
703 if (startIndex > endIndex) {
704 var _ref3 = [endIndex, startIndex];
705 startIndex = _ref3[0];
706 endIndex = _ref3[1];
707 }
708
709 if (startIndex < 0) {
710 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
711 durations += defaultDuration;
712 }
713
714 startIndex = 0;
715 }
716
717 for (var _i = startIndex; _i < endIndex; _i++) {
718 durations += durationList[_i].duration;
719 }
720
721 return durations;
722};
723/**
724 * Calculates the playlist end time
725 *
726 * @param {Object} playlist a media playlist object
727 * @param {number=} expired the amount of time that has
728 * dropped off the front of the playlist in a live scenario
729 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
730 * playlist end calculation should consider the safe live end
731 * (truncate the playlist end by three segments). This is normally
732 * used for calculating the end of the playlist's seekable range.
733 * This takes into account the value of liveEdgePadding.
734 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
735 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
736 * If this is provided, it is used in the safe live end calculation.
737 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
738 * Corresponds to suggestedPresentationDelay in DASH manifests.
739 * @return {number} the end time of playlist
740 * @function playlistEnd
741 */
742
743var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
744 if (!playlist || !playlist.segments) {
745 return null;
746 }
747
748 if (playlist.endList) {
749 return duration(playlist);
750 }
751
752 if (expired === null) {
753 return null;
754 }
755
756 expired = expired || 0;
757 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
758
759 if (useSafeLiveEnd) {
760 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
761 lastSegmentEndTime -= liveEdgePadding;
762 } // don't return a time less than zero
763
764
765 return Math.max(0, lastSegmentEndTime);
766};
767/**
768 * Calculates the interval of time that is currently seekable in a
769 * playlist. The returned time ranges are relative to the earliest
770 * moment in the specified playlist that is still available. A full
771 * seekable implementation for live streams would need to offset
772 * these values by the duration of content that has expired from the
773 * stream.
774 *
775 * @param {Object} playlist a media playlist object
776 * dropped off the front of the playlist in a live scenario
777 * @param {number=} expired the amount of time that has
778 * dropped off the front of the playlist in a live scenario
779 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
780 * Corresponds to suggestedPresentationDelay in DASH manifests.
781 * @return {TimeRanges} the periods of time that are valid targets
782 * for seeking
783 */
784
785var seekable = function seekable(playlist, expired, liveEdgePadding) {
786 var useSafeLiveEnd = true;
787 var seekableStart = expired || 0;
788 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
789
790 if (seekableEnd === null) {
791 return createTimeRange();
792 }
793
794 return createTimeRange(seekableStart, seekableEnd);
795};
796/**
797 * Determine the index and estimated starting time of the segment that
798 * contains a specified playback position in a media playlist.
799 *
800 * @param {Object} options.playlist the media playlist to query
801 * @param {number} options.currentTime The number of seconds since the earliest
802 * possible position to determine the containing segment for
803 * @param {number} options.startTime the time when the segment/part starts
804 * @param {number} options.startingSegmentIndex the segment index to start looking at.
805 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
806 *
807 * @return {Object} an object with partIndex, segmentIndex, and startTime.
808 */
809
810var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
811 var playlist = _ref4.playlist,
812 currentTime = _ref4.currentTime,
813 startingSegmentIndex = _ref4.startingSegmentIndex,
814 startingPartIndex = _ref4.startingPartIndex,
815 startTime = _ref4.startTime,
816 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
817 var time = currentTime - startTime;
818 var partsAndSegments = getPartsAndSegments(playlist);
819 var startIndex = 0;
820
821 for (var i = 0; i < partsAndSegments.length; i++) {
822 var partAndSegment = partsAndSegments[i];
823
824 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
825 continue;
826 } // skip this if part index does not match.
827
828
829 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
830 continue;
831 }
832
833 startIndex = i;
834 break;
835 }
836
837 if (time < 0) {
838 // Walk backward from startIndex in the playlist, adding durations
839 // until we find a segment that contains `time` and return it
840 if (startIndex > 0) {
841 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
842 var _partAndSegment = partsAndSegments[_i2];
843 time += _partAndSegment.duration;
844
845 if (experimentalExactManifestTimings) {
846 if (time < 0) {
847 continue;
848 }
849 } else if (time + TIME_FUDGE_FACTOR <= 0) {
850 continue;
851 }
852
853 return {
854 partIndex: _partAndSegment.partIndex,
855 segmentIndex: _partAndSegment.segmentIndex,
856 startTime: startTime - sumDurations({
857 defaultDuration: playlist.targetDuration,
858 durationList: partsAndSegments,
859 startIndex: startIndex,
860 endIndex: _i2
861 })
862 };
863 }
864 } // We were unable to find a good segment within the playlist
865 // so select the first segment
866
867
868 return {
869 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
870 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
871 startTime: currentTime
872 };
873 } // When startIndex is negative, we first walk forward to first segment
874 // adding target durations. If we "run out of time" before getting to
875 // the first segment, return the first segment
876
877
878 if (startIndex < 0) {
879 for (var _i3 = startIndex; _i3 < 0; _i3++) {
880 time -= playlist.targetDuration;
881
882 if (time < 0) {
883 return {
884 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
885 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
886 startTime: currentTime
887 };
888 }
889 }
890
891 startIndex = 0;
892 } // Walk forward from startIndex in the playlist, subtracting durations
893 // until we find a segment that contains `time` and return it
894
895
896 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
897 var _partAndSegment2 = partsAndSegments[_i4];
898 time -= _partAndSegment2.duration;
899
900 if (experimentalExactManifestTimings) {
901 if (time > 0) {
902 continue;
903 }
904 } else if (time - TIME_FUDGE_FACTOR >= 0) {
905 continue;
906 }
907
908 return {
909 partIndex: _partAndSegment2.partIndex,
910 segmentIndex: _partAndSegment2.segmentIndex,
911 startTime: startTime + sumDurations({
912 defaultDuration: playlist.targetDuration,
913 durationList: partsAndSegments,
914 startIndex: startIndex,
915 endIndex: _i4
916 })
917 };
918 } // We are out of possible candidates so load the last one...
919
920
921 return {
922 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
923 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
924 startTime: currentTime
925 };
926};
927/**
928 * Check whether the playlist is blacklisted or not.
929 *
930 * @param {Object} playlist the media playlist object
931 * @return {boolean} whether the playlist is blacklisted or not
932 * @function isBlacklisted
933 */
934
935var isBlacklisted = function isBlacklisted(playlist) {
936 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
937};
938/**
939 * Check whether the playlist is compatible with current playback configuration or has
940 * been blacklisted permanently for being incompatible.
941 *
942 * @param {Object} playlist the media playlist object
943 * @return {boolean} whether the playlist is incompatible or not
944 * @function isIncompatible
945 */
946
947var isIncompatible = function isIncompatible(playlist) {
948 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
949};
950/**
951 * Check whether the playlist is enabled or not.
952 *
953 * @param {Object} playlist the media playlist object
954 * @return {boolean} whether the playlist is enabled or not
955 * @function isEnabled
956 */
957
958var isEnabled = function isEnabled(playlist) {
959 var blacklisted = isBlacklisted(playlist);
960 return !playlist.disabled && !blacklisted;
961};
962/**
963 * Check whether the playlist has been manually disabled through the representations api.
964 *
965 * @param {Object} playlist the media playlist object
966 * @return {boolean} whether the playlist is disabled manually or not
967 * @function isDisabled
968 */
969
970var isDisabled = function isDisabled(playlist) {
971 return playlist.disabled;
972};
973/**
974 * Returns whether the current playlist is an AES encrypted HLS stream
975 *
976 * @return {boolean} true if it's an AES encrypted HLS stream
977 */
978
979var isAes = function isAes(media) {
980 for (var i = 0; i < media.segments.length; i++) {
981 if (media.segments[i].key) {
982 return true;
983 }
984 }
985
986 return false;
987};
988/**
989 * Checks if the playlist has a value for the specified attribute
990 *
991 * @param {string} attr
992 * Attribute to check for
993 * @param {Object} playlist
994 * The media playlist object
995 * @return {boolean}
996 * Whether the playlist contains a value for the attribute or not
997 * @function hasAttribute
998 */
999
1000var hasAttribute = function hasAttribute(attr, playlist) {
1001 return playlist.attributes && playlist.attributes[attr];
1002};
1003/**
1004 * Estimates the time required to complete a segment download from the specified playlist
1005 *
1006 * @param {number} segmentDuration
1007 * Duration of requested segment
1008 * @param {number} bandwidth
1009 * Current measured bandwidth of the player
1010 * @param {Object} playlist
1011 * The media playlist object
1012 * @param {number=} bytesReceived
1013 * Number of bytes already received for the request. Defaults to 0
1014 * @return {number|NaN}
1015 * The estimated time to request the segment. NaN if bandwidth information for
1016 * the given playlist is unavailable
1017 * @function estimateSegmentRequestTime
1018 */
1019
1020var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
1021 if (bytesReceived === void 0) {
1022 bytesReceived = 0;
1023 }
1024
1025 if (!hasAttribute('BANDWIDTH', playlist)) {
1026 return NaN;
1027 }
1028
1029 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1030 return (size - bytesReceived * 8) / bandwidth;
1031};
1032/*
1033 * Returns whether the current playlist is the lowest rendition
1034 *
1035 * @return {Boolean} true if on lowest rendition
1036 */
1037
1038var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1039 if (master.playlists.length === 1) {
1040 return true;
1041 }
1042
1043 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1044 return master.playlists.filter(function (playlist) {
1045 if (!isEnabled(playlist)) {
1046 return false;
1047 }
1048
1049 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1050 }).length === 0;
1051};
1052var playlistMatch = function playlistMatch(a, b) {
1053 // both playlits are null
1054 // or only one playlist is non-null
1055 // no match
1056 if (!a && !b || !a && b || a && !b) {
1057 return false;
1058 } // playlist objects are the same, match
1059
1060
1061 if (a === b) {
1062 return true;
1063 } // first try to use id as it should be the most
1064 // accurate
1065
1066
1067 if (a.id && b.id && a.id === b.id) {
1068 return true;
1069 } // next try to use reslovedUri as it should be the
1070 // second most accurate.
1071
1072
1073 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
1074 return true;
1075 } // finally try to use uri as it should be accurate
1076 // but might miss a few cases for relative uris
1077
1078
1079 if (a.uri && b.uri && a.uri === b.uri) {
1080 return true;
1081 }
1082
1083 return false;
1084};
1085
1086var someAudioVariant = function someAudioVariant(master, callback) {
1087 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
1088 var found = false;
1089
1090 for (var groupName in AUDIO) {
1091 for (var label in AUDIO[groupName]) {
1092 found = callback(AUDIO[groupName][label]);
1093
1094 if (found) {
1095 break;
1096 }
1097 }
1098
1099 if (found) {
1100 break;
1101 }
1102 }
1103
1104 return !!found;
1105};
1106
1107var isAudioOnly = function isAudioOnly(master) {
1108 // we are audio only if we have no main playlists but do
1109 // have media group playlists.
1110 if (!master || !master.playlists || !master.playlists.length) {
1111 // without audio variants or playlists this
1112 // is not an audio only master.
1113 var found = someAudioVariant(master, function (variant) {
1114 return variant.playlists && variant.playlists.length || variant.uri;
1115 });
1116 return found;
1117 } // if every playlist has only an audio codec it is audio only
1118
1119
1120 var _loop = function _loop(i) {
1121 var playlist = master.playlists[i];
1122 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
1123
1124 if (CODECS && CODECS.split(',').every(function (c) {
1125 return isAudioCodec(c);
1126 })) {
1127 return "continue";
1128 } // playlist is in an audio group it is audio only
1129
1130
1131 var found = someAudioVariant(master, function (variant) {
1132 return playlistMatch(playlist, variant);
1133 });
1134
1135 if (found) {
1136 return "continue";
1137 } // if we make it here this playlist isn't audio and we
1138 // are not audio only
1139
1140
1141 return {
1142 v: false
1143 };
1144 };
1145
1146 for (var i = 0; i < master.playlists.length; i++) {
1147 var _ret = _loop(i);
1148
1149 if (_ret === "continue") continue;
1150 if (typeof _ret === "object") return _ret.v;
1151 } // if we make it past every playlist without returning, then
1152 // this is an audio only playlist.
1153
1154
1155 return true;
1156}; // exports
1157
1158var Playlist = {
1159 liveEdgeDelay: liveEdgeDelay,
1160 duration: duration,
1161 seekable: seekable,
1162 getMediaInfoForTime: getMediaInfoForTime,
1163 isEnabled: isEnabled,
1164 isDisabled: isDisabled,
1165 isBlacklisted: isBlacklisted,
1166 isIncompatible: isIncompatible,
1167 playlistEnd: playlistEnd,
1168 isAes: isAes,
1169 hasAttribute: hasAttribute,
1170 estimateSegmentRequestTime: estimateSegmentRequestTime,
1171 isLowestEnabledRendition: isLowestEnabledRendition,
1172 isAudioOnly: isAudioOnly,
1173 playlistMatch: playlistMatch,
1174 segmentDurationWithParts: segmentDurationWithParts
1175};
1176
1177var log = videojs.log;
1178var createPlaylistID = function createPlaylistID(index, uri) {
1179 return index + "-" + uri;
1180};
1181/**
1182 * Parses a given m3u8 playlist
1183 *
1184 * @param {Function} [onwarn]
1185 * a function to call when the parser triggers a warning event.
1186 * @param {Function} [oninfo]
1187 * a function to call when the parser triggers an info event.
1188 * @param {string} manifestString
1189 * The downloaded manifest string
1190 * @param {Object[]} [customTagParsers]
1191 * An array of custom tag parsers for the m3u8-parser instance
1192 * @param {Object[]} [customTagMappers]
1193 * An array of custom tag mappers for the m3u8-parser instance
1194 * @param {boolean} [experimentalLLHLS=false]
1195 * Whether to keep ll-hls features in the manifest after parsing.
1196 * @return {Object}
1197 * The manifest object
1198 */
1199
1200var parseManifest = function parseManifest(_ref) {
1201 var onwarn = _ref.onwarn,
1202 oninfo = _ref.oninfo,
1203 manifestString = _ref.manifestString,
1204 _ref$customTagParsers = _ref.customTagParsers,
1205 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
1206 _ref$customTagMappers = _ref.customTagMappers,
1207 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
1208 experimentalLLHLS = _ref.experimentalLLHLS;
1209 var parser = new Parser();
1210
1211 if (onwarn) {
1212 parser.on('warn', onwarn);
1213 }
1214
1215 if (oninfo) {
1216 parser.on('info', oninfo);
1217 }
1218
1219 customTagParsers.forEach(function (customParser) {
1220 return parser.addParser(customParser);
1221 });
1222 customTagMappers.forEach(function (mapper) {
1223 return parser.addTagMapper(mapper);
1224 });
1225 parser.push(manifestString);
1226 parser.end();
1227 var manifest = parser.manifest; // remove llhls features from the parsed manifest
1228 // if we don't want llhls support.
1229
1230 if (!experimentalLLHLS) {
1231 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
1232 if (manifest.hasOwnProperty(k)) {
1233 delete manifest[k];
1234 }
1235 });
1236
1237 if (manifest.segments) {
1238 manifest.segments.forEach(function (segment) {
1239 ['parts', 'preloadHints'].forEach(function (k) {
1240 if (segment.hasOwnProperty(k)) {
1241 delete segment[k];
1242 }
1243 });
1244 });
1245 }
1246 }
1247
1248 if (!manifest.targetDuration) {
1249 var targetDuration = 10;
1250
1251 if (manifest.segments && manifest.segments.length) {
1252 targetDuration = manifest.segments.reduce(function (acc, s) {
1253 return Math.max(acc, s.duration);
1254 }, 0);
1255 }
1256
1257 if (onwarn) {
1258 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
1259 }
1260
1261 manifest.targetDuration = targetDuration;
1262 }
1263
1264 var parts = getLastParts(manifest);
1265
1266 if (parts.length && !manifest.partTargetDuration) {
1267 var partTargetDuration = parts.reduce(function (acc, p) {
1268 return Math.max(acc, p.duration);
1269 }, 0);
1270
1271 if (onwarn) {
1272 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
1273 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
1274 }
1275
1276 manifest.partTargetDuration = partTargetDuration;
1277 }
1278
1279 return manifest;
1280};
1281/**
1282 * Loops through all supported media groups in master and calls the provided
1283 * callback for each group
1284 *
1285 * @param {Object} master
1286 * The parsed master manifest object
1287 * @param {Function} callback
1288 * Callback to call for each media group
1289 */
1290
1291var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1292 if (!master.mediaGroups) {
1293 return;
1294 }
1295
1296 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1297 if (!master.mediaGroups[mediaType]) {
1298 return;
1299 }
1300
1301 for (var groupKey in master.mediaGroups[mediaType]) {
1302 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1303 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1304 callback(mediaProperties, mediaType, groupKey, labelKey);
1305 }
1306 }
1307 });
1308};
1309/**
1310 * Adds properties and attributes to the playlist to keep consistent functionality for
1311 * playlists throughout VHS.
1312 *
1313 * @param {Object} config
1314 * Arguments object
1315 * @param {Object} config.playlist
1316 * The media playlist
1317 * @param {string} [config.uri]
1318 * The uri to the media playlist (if media playlist is not from within a master
1319 * playlist)
1320 * @param {string} id
1321 * ID to use for the playlist
1322 */
1323
1324var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
1325 var playlist = _ref2.playlist,
1326 uri = _ref2.uri,
1327 id = _ref2.id;
1328 playlist.id = id;
1329 playlist.playlistErrors_ = 0;
1330
1331 if (uri) {
1332 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
1333 // playlists do not contain their own source URI, but one is needed for consistency in
1334 // VHS.
1335 playlist.uri = uri;
1336 } // For HLS master playlists, even though certain attributes MUST be defined, the
1337 // stream may still be played without them.
1338 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
1339 // manifest.
1340 //
1341 // To avoid undefined reference errors through the project, and make the code easier
1342 // to write/read, add an empty attributes object for these cases.
1343
1344
1345 playlist.attributes = playlist.attributes || {};
1346};
1347/**
1348 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
1349 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
1350 * playlist references to the playlists array.
1351 *
1352 * @param {Object} master
1353 * The master playlist
1354 */
1355
1356var setupMediaPlaylists = function setupMediaPlaylists(master) {
1357 var i = master.playlists.length;
1358
1359 while (i--) {
1360 var playlist = master.playlists[i];
1361 setupMediaPlaylist({
1362 playlist: playlist,
1363 id: createPlaylistID(i, playlist.uri)
1364 });
1365 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1366 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
1367
1368 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
1369 // the stream can be played without it. Although an attributes property may have been
1370 // added to the playlist to prevent undefined references, issue a warning to fix the
1371 // manifest.
1372
1373 if (!playlist.attributes.BANDWIDTH) {
1374 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1375 }
1376 }
1377};
1378/**
1379 * Adds resolvedUri properties to each media group.
1380 *
1381 * @param {Object} master
1382 * The master playlist
1383 */
1384
1385var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1386 forEachMediaGroup(master, function (properties) {
1387 if (properties.uri) {
1388 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1389 }
1390 });
1391};
1392/**
1393 * Creates a master playlist wrapper to insert a sole media playlist into.
1394 *
1395 * @param {Object} media
1396 * Media playlist
1397 * @param {string} uri
1398 * The media URI
1399 *
1400 * @return {Object}
1401 * Master playlist
1402 */
1403
1404var masterForMedia = function masterForMedia(media, uri) {
1405 var id = createPlaylistID(0, uri);
1406 var master = {
1407 mediaGroups: {
1408 'AUDIO': {},
1409 'VIDEO': {},
1410 'CLOSED-CAPTIONS': {},
1411 'SUBTITLES': {}
1412 },
1413 uri: window$1.location.href,
1414 resolvedUri: window$1.location.href,
1415 playlists: [{
1416 uri: uri,
1417 id: id,
1418 resolvedUri: uri,
1419 // m3u8-parser does not attach an attributes property to media playlists so make
1420 // sure that the property is attached to avoid undefined reference errors
1421 attributes: {}
1422 }]
1423 }; // set up ID reference
1424
1425 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
1426
1427 master.playlists[uri] = master.playlists[0];
1428 return master;
1429};
1430/**
1431 * Does an in-place update of the master manifest to add updated playlist URI references
1432 * as well as other properties needed by VHS that aren't included by the parser.
1433 *
1434 * @param {Object} master
1435 * Master manifest object
1436 * @param {string} uri
1437 * The source URI
1438 */
1439
1440var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
1441 master.uri = uri;
1442
1443 for (var i = 0; i < master.playlists.length; i++) {
1444 if (!master.playlists[i].uri) {
1445 // Set up phony URIs for the playlists since playlists are referenced by their URIs
1446 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
1447 // TODO: consider adding dummy URIs in mpd-parser
1448 var phonyUri = "placeholder-uri-" + i;
1449 master.playlists[i].uri = phonyUri;
1450 }
1451 }
1452
1453 var audioOnlyMaster = isAudioOnly(master);
1454 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1455 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
1456
1457 if (!properties.playlists || !properties.playlists.length) {
1458 // If the manifest is audio only and this media group does not have a uri, check
1459 // if the media group is located in the main list of playlists. If it is, don't add
1460 // placeholder properties as it shouldn't be considered an alternate audio track.
1461 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
1462 for (var _i = 0; _i < master.playlists.length; _i++) {
1463 var p = master.playlists[_i];
1464
1465 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
1466 return;
1467 }
1468 }
1469 }
1470
1471 properties.playlists = [_extends({}, properties)];
1472 }
1473
1474 properties.playlists.forEach(function (p, i) {
1475 var id = createPlaylistID(i, groupId);
1476
1477 if (p.uri) {
1478 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
1479 } else {
1480 // DEPRECATED, this has been added to prevent a breaking change.
1481 // previously we only ever had a single media group playlist, so
1482 // we mark the first playlist uri without prepending the index as we used to
1483 // ideally we would do all of the playlists the same way.
1484 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
1485 // the placeholder again
1486
1487 p.resolvedUri = p.uri;
1488 }
1489
1490 p.id = p.id || id; // add an empty attributes object, all playlists are
1491 // expected to have this.
1492
1493 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
1494
1495 master.playlists[p.id] = p;
1496 master.playlists[p.uri] = p;
1497 });
1498 });
1499 setupMediaPlaylists(master);
1500 resolveMediaGroupUris(master);
1501};
1502
1503var mergeOptions$2 = videojs.mergeOptions,
1504 EventTarget$1 = videojs.EventTarget;
1505
1506var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
1507 if (media.endList || !media.serverControl) {
1508 return uri;
1509 }
1510
1511 var parameters = {};
1512
1513 if (media.serverControl.canBlockReload) {
1514 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
1515
1516 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
1517 // that we are going to request a part of that preload segment.
1518 // the logic below is used to determine that.
1519
1520 if (preloadSegment) {
1521 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
1522
1523 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
1524 // length of parts, then we know we had part preload hints
1525 // and we need to add the _HLS_part= query
1526
1527 if (nextPart > -1 && nextPart !== parts.length - 1) {
1528 // add existing parts to our preload hints
1529 // eslint-disable-next-line
1530 parameters._HLS_part = nextPart;
1531 } // this if statement makes sure that we request the msn
1532 // of the preload segment if:
1533 // 1. the preload segment had parts (and was not yet a full segment)
1534 // but was added to our segments array
1535 // 2. the preload segment had preload hints for parts that are not in
1536 // the manifest yet.
1537 // in all other cases we want the segment after the preload segment
1538 // which will be given by using media.segments.length because it is 1 based
1539 // rather than 0 based.
1540
1541
1542 if (nextPart > -1 || parts.length) {
1543 nextMSN--;
1544 }
1545 } // add _HLS_msn= in front of any _HLS_part query
1546 // eslint-disable-next-line
1547
1548
1549 parameters._HLS_msn = nextMSN;
1550 }
1551
1552 if (media.serverControl && media.serverControl.canSkipUntil) {
1553 // add _HLS_skip= infront of all other queries.
1554 // eslint-disable-next-line
1555 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
1556 }
1557
1558 if (Object.keys(parameters).length) {
1559 var parsedUri = new window$1.URL(uri);
1560 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
1561 if (!parameters.hasOwnProperty(name)) {
1562 return;
1563 }
1564
1565 parsedUri.searchParams.set(name, parameters[name]);
1566 });
1567 uri = parsedUri.toString();
1568 }
1569
1570 return uri;
1571};
1572/**
1573 * Returns a new segment object with properties and
1574 * the parts array merged.
1575 *
1576 * @param {Object} a the old segment
1577 * @param {Object} b the new segment
1578 *
1579 * @return {Object} the merged segment
1580 */
1581
1582
1583var updateSegment = function updateSegment(a, b) {
1584 if (!a) {
1585 return b;
1586 }
1587
1588 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
1589 // and the new one does not, remove preload hints.
1590
1591 if (a.preloadHints && !b.preloadHints) {
1592 delete result.preloadHints;
1593 } // if only the old segment has parts
1594 // then the parts are no longer valid
1595
1596
1597 if (a.parts && !b.parts) {
1598 delete result.parts; // if both segments have parts
1599 // copy part propeties from the old segment
1600 // to the new one.
1601 } else if (a.parts && b.parts) {
1602 for (var i = 0; i < b.parts.length; i++) {
1603 if (a.parts && a.parts[i]) {
1604 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
1605 }
1606 }
1607 } // set skipped to false for segments that have
1608 // have had information merged from the old segment.
1609
1610
1611 if (!a.skipped && b.skipped) {
1612 result.skipped = false;
1613 } // set preload to false for segments that have
1614 // had information added in the new segment.
1615
1616
1617 if (a.preload && !b.preload) {
1618 result.preload = false;
1619 }
1620
1621 return result;
1622};
1623/**
1624 * Returns a new array of segments that is the result of merging
1625 * properties from an older list of segments onto an updated
1626 * list. No properties on the updated playlist will be ovewritten.
1627 *
1628 * @param {Array} original the outdated list of segments
1629 * @param {Array} update the updated list of segments
1630 * @param {number=} offset the index of the first update
1631 * segment in the original segment list. For non-live playlists,
1632 * this should always be zero and does not need to be
1633 * specified. For live playlists, it should be the difference
1634 * between the media sequence numbers in the original and updated
1635 * playlists.
1636 * @return {Array} a list of merged segment objects
1637 */
1638
1639var updateSegments = function updateSegments(original, update, offset) {
1640 var oldSegments = original.slice();
1641 var newSegments = update.slice();
1642 offset = offset || 0;
1643 var result = [];
1644 var currentMap;
1645
1646 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
1647 var oldSegment = oldSegments[newIndex + offset];
1648 var newSegment = newSegments[newIndex];
1649
1650 if (oldSegment) {
1651 currentMap = oldSegment.map || currentMap;
1652 result.push(updateSegment(oldSegment, newSegment));
1653 } else {
1654 // carry over map to new segment if it is missing
1655 if (currentMap && !newSegment.map) {
1656 newSegment.map = currentMap;
1657 }
1658
1659 result.push(newSegment);
1660 }
1661 }
1662
1663 return result;
1664};
1665var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1666 // preloadSegment will not have a uri at all
1667 // as the segment isn't actually in the manifest yet, only parts
1668 if (!segment.resolvedUri && segment.uri) {
1669 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1670 }
1671
1672 if (segment.key && !segment.key.resolvedUri) {
1673 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1674 }
1675
1676 if (segment.map && !segment.map.resolvedUri) {
1677 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1678 }
1679
1680 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
1681 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
1682 }
1683
1684 if (segment.parts && segment.parts.length) {
1685 segment.parts.forEach(function (p) {
1686 if (p.resolvedUri) {
1687 return;
1688 }
1689
1690 p.resolvedUri = resolveUrl(baseUri, p.uri);
1691 });
1692 }
1693
1694 if (segment.preloadHints && segment.preloadHints.length) {
1695 segment.preloadHints.forEach(function (p) {
1696 if (p.resolvedUri) {
1697 return;
1698 }
1699
1700 p.resolvedUri = resolveUrl(baseUri, p.uri);
1701 });
1702 }
1703};
1704
1705var getAllSegments = function getAllSegments(media) {
1706 var segments = media.segments || [];
1707 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
1708 // a usable segment, only include a preloadSegment that has
1709 // parts.
1710
1711 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
1712 // if preloadHints has a MAP that means that the
1713 // init segment is going to change. We cannot use any of the parts
1714 // from this preload segment.
1715 if (preloadSegment.preloadHints) {
1716 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
1717 if (preloadSegment.preloadHints[i].type === 'MAP') {
1718 return segments;
1719 }
1720 }
1721 } // set the duration for our preload segment to target duration.
1722
1723
1724 preloadSegment.duration = media.targetDuration;
1725 preloadSegment.preload = true;
1726 segments.push(preloadSegment);
1727 }
1728
1729 return segments;
1730}; // consider the playlist unchanged if the playlist object is the same or
1731// the number of segments is equal, the media sequence number is unchanged,
1732// and this playlist hasn't become the end of the playlist
1733
1734
1735var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
1736 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
1737};
1738/**
1739 * Returns a new master playlist that is the result of merging an
1740 * updated media playlist into the original version. If the
1741 * updated media playlist does not match any of the playlist
1742 * entries in the original master playlist, null is returned.
1743 *
1744 * @param {Object} master a parsed master M3U8 object
1745 * @param {Object} media a parsed media M3U8 object
1746 * @return {Object} a new object that represents the original
1747 * master playlist with the updated media playlist merged in, or
1748 * null if the merge produced no change.
1749 */
1750
1751var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
1752 if (unchangedCheck === void 0) {
1753 unchangedCheck = isPlaylistUnchanged;
1754 }
1755
1756 var result = mergeOptions$2(master, {});
1757 var oldMedia = result.playlists[newMedia.id];
1758
1759 if (!oldMedia) {
1760 return null;
1761 }
1762
1763 if (unchangedCheck(oldMedia, newMedia)) {
1764 return null;
1765 }
1766
1767 newMedia.segments = getAllSegments(newMedia);
1768 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
1769
1770 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
1771 delete mergedPlaylist.preloadSegment;
1772 } // if the update could overlap existing segment information, merge the two segment lists
1773
1774
1775 if (oldMedia.segments) {
1776 if (newMedia.skip) {
1777 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
1778 // old properties into the new segments
1779
1780 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
1781 newMedia.segments.unshift({
1782 skipped: true
1783 });
1784 }
1785 }
1786
1787 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
1788 } // resolve any segment URIs to prevent us from having to do it later
1789
1790
1791 mergedPlaylist.segments.forEach(function (segment) {
1792 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1793 }); // TODO Right now in the playlists array there are two references to each playlist, one
1794 // that is referenced by index, and one by URI. The index reference may no longer be
1795 // necessary.
1796
1797 for (var _i = 0; _i < result.playlists.length; _i++) {
1798 if (result.playlists[_i].id === newMedia.id) {
1799 result.playlists[_i] = mergedPlaylist;
1800 }
1801 }
1802
1803 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
1804
1805 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
1806
1807 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1808 if (!properties.playlists) {
1809 return;
1810 }
1811
1812 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
1813 if (newMedia.id === properties.playlists[_i2].id) {
1814 properties.playlists[_i2] = mergedPlaylist;
1815 }
1816 }
1817 });
1818 return result;
1819};
1820/**
1821 * Calculates the time to wait before refreshing a live playlist
1822 *
1823 * @param {Object} media
1824 * The current media
1825 * @param {boolean} update
1826 * True if there were any updates from the last refresh, false otherwise
1827 * @return {number}
1828 * The time in ms to wait before refreshing the live playlist
1829 */
1830
1831var refreshDelay = function refreshDelay(media, update) {
1832 var segments = media.segments || [];
1833 var lastSegment = segments[segments.length - 1];
1834 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
1835 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
1836
1837 if (update && lastDuration) {
1838 return lastDuration * 1000;
1839 } // if the playlist is unchanged since the last reload or last segment duration
1840 // cannot be determined, try again after half the target duration
1841
1842
1843 return (media.partTargetDuration || media.targetDuration || 10) * 500;
1844};
1845/**
1846 * Load a playlist from a remote location
1847 *
1848 * @class PlaylistLoader
1849 * @extends Stream
1850 * @param {string|Object} src url or object of manifest
1851 * @param {boolean} withCredentials the withCredentials xhr option
1852 * @class
1853 */
1854
1855var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
1856 _inheritsLoose(PlaylistLoader, _EventTarget);
1857
1858 function PlaylistLoader(src, vhs, options) {
1859 var _this;
1860
1861 if (options === void 0) {
1862 options = {};
1863 }
1864
1865 _this = _EventTarget.call(this) || this;
1866
1867 if (!src) {
1868 throw new Error('A non-empty playlist URL or object is required');
1869 }
1870
1871 _this.logger_ = logger('PlaylistLoader');
1872 var _options = options,
1873 _options$withCredenti = _options.withCredentials,
1874 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
1875 _options$handleManife = _options.handleManifestRedirects,
1876 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
1877 _this.src = src;
1878 _this.vhs_ = vhs;
1879 _this.withCredentials = withCredentials;
1880 _this.handleManifestRedirects = handleManifestRedirects;
1881 var vhsOptions = vhs.options_;
1882 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
1883 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
1884 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
1885
1886 if (videojs.browser.IE_VERSION) {
1887 _this.experimentalLLHLS = false;
1888 } // initialize the loader state
1889
1890
1891 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
1892
1893 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized(_this));
1894
1895 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
1896
1897 return _this;
1898 }
1899
1900 var _proto = PlaylistLoader.prototype;
1901
1902 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
1903 var _this2 = this;
1904
1905 if (this.state !== 'HAVE_METADATA') {
1906 // only refresh the media playlist if no other activity is going on
1907 return;
1908 }
1909
1910 var media = this.media();
1911 var uri = resolveUrl(this.master.uri, media.uri);
1912
1913 if (this.experimentalLLHLS) {
1914 uri = addLLHLSQueryDirectives(uri, media);
1915 }
1916
1917 this.state = 'HAVE_CURRENT_METADATA';
1918 this.request = this.vhs_.xhr({
1919 uri: uri,
1920 withCredentials: this.withCredentials
1921 }, function (error, req) {
1922 // disposed
1923 if (!_this2.request) {
1924 return;
1925 }
1926
1927 if (error) {
1928 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
1929 }
1930
1931 _this2.haveMetadata({
1932 playlistString: _this2.request.responseText,
1933 url: _this2.media().uri,
1934 id: _this2.media().id
1935 });
1936 });
1937 };
1938
1939 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
1940 var uri = playlist.uri,
1941 id = playlist.id; // any in-flight request is now finished
1942
1943 this.request = null;
1944
1945 if (startingState) {
1946 this.state = startingState;
1947 }
1948
1949 this.error = {
1950 playlist: this.master.playlists[id],
1951 status: xhr.status,
1952 message: "HLS playlist request error at URL: " + uri + ".",
1953 responseText: xhr.responseText,
1954 code: xhr.status >= 500 ? 4 : 2
1955 };
1956 this.trigger('error');
1957 };
1958
1959 _proto.parseManifest_ = function parseManifest_(_ref) {
1960 var _this3 = this;
1961
1962 var url = _ref.url,
1963 manifestString = _ref.manifestString;
1964 return parseManifest({
1965 onwarn: function onwarn(_ref2) {
1966 var message = _ref2.message;
1967 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
1968 },
1969 oninfo: function oninfo(_ref3) {
1970 var message = _ref3.message;
1971 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
1972 },
1973 manifestString: manifestString,
1974 customTagParsers: this.customTagParsers,
1975 customTagMappers: this.customTagMappers,
1976 experimentalLLHLS: this.experimentalLLHLS
1977 });
1978 }
1979 /**
1980 * Update the playlist loader's state in response to a new or updated playlist.
1981 *
1982 * @param {string} [playlistString]
1983 * Playlist string (if playlistObject is not provided)
1984 * @param {Object} [playlistObject]
1985 * Playlist object (if playlistString is not provided)
1986 * @param {string} url
1987 * URL of playlist
1988 * @param {string} id
1989 * ID to use for playlist
1990 */
1991 ;
1992
1993 _proto.haveMetadata = function haveMetadata(_ref4) {
1994 var playlistString = _ref4.playlistString,
1995 playlistObject = _ref4.playlistObject,
1996 url = _ref4.url,
1997 id = _ref4.id;
1998 // any in-flight request is now finished
1999 this.request = null;
2000 this.state = 'HAVE_METADATA';
2001 var playlist = playlistObject || this.parseManifest_({
2002 url: url,
2003 manifestString: playlistString
2004 });
2005 playlist.lastRequest = Date.now();
2006 setupMediaPlaylist({
2007 playlist: playlist,
2008 uri: url,
2009 id: id
2010 }); // merge this playlist into the master
2011
2012 var update = updateMaster$1(this.master, playlist);
2013 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
2014 this.pendingMedia_ = null;
2015
2016 if (update) {
2017 this.master = update;
2018 this.media_ = this.master.playlists[id];
2019 } else {
2020 this.trigger('playlistunchanged');
2021 }
2022
2023 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
2024 this.trigger('loadedplaylist');
2025 }
2026 /**
2027 * Abort any outstanding work and clean up.
2028 */
2029 ;
2030
2031 _proto.dispose = function dispose() {
2032 this.trigger('dispose');
2033 this.stopRequest();
2034 window$1.clearTimeout(this.mediaUpdateTimeout);
2035 window$1.clearTimeout(this.finalRenditionTimeout);
2036 this.off();
2037 };
2038
2039 _proto.stopRequest = function stopRequest() {
2040 if (this.request) {
2041 var oldRequest = this.request;
2042 this.request = null;
2043 oldRequest.onreadystatechange = null;
2044 oldRequest.abort();
2045 }
2046 }
2047 /**
2048 * When called without any arguments, returns the currently
2049 * active media playlist. When called with a single argument,
2050 * triggers the playlist loader to asynchronously switch to the
2051 * specified media playlist. Calling this method while the
2052 * loader is in the HAVE_NOTHING causes an error to be emitted
2053 * but otherwise has no effect.
2054 *
2055 * @param {Object=} playlist the parsed media playlist
2056 * object to switch to
2057 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
2058 *
2059 * @return {Playlist} the current loaded media
2060 */
2061 ;
2062
2063 _proto.media = function media(playlist, shouldDelay) {
2064 var _this4 = this;
2065
2066 // getter
2067 if (!playlist) {
2068 return this.media_;
2069 } // setter
2070
2071
2072 if (this.state === 'HAVE_NOTHING') {
2073 throw new Error('Cannot switch media playlist from ' + this.state);
2074 } // find the playlist object if the target playlist has been
2075 // specified by URI
2076
2077
2078 if (typeof playlist === 'string') {
2079 if (!this.master.playlists[playlist]) {
2080 throw new Error('Unknown playlist URI: ' + playlist);
2081 }
2082
2083 playlist = this.master.playlists[playlist];
2084 }
2085
2086 window$1.clearTimeout(this.finalRenditionTimeout);
2087
2088 if (shouldDelay) {
2089 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
2090 this.finalRenditionTimeout = window$1.setTimeout(this.media.bind(this, playlist, false), delay);
2091 return;
2092 }
2093
2094 var startingState = this.state;
2095 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2096 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
2097
2098 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
2099 // media playlist or, for the case of demuxed audio, a resolved audio media group)
2100 playlist.endList && playlist.segments.length) {
2101 // abort outstanding playlist requests
2102 if (this.request) {
2103 this.request.onreadystatechange = null;
2104 this.request.abort();
2105 this.request = null;
2106 }
2107
2108 this.state = 'HAVE_METADATA';
2109 this.media_ = playlist; // trigger media change if the active media has been updated
2110
2111 if (mediaChange) {
2112 this.trigger('mediachanging');
2113
2114 if (startingState === 'HAVE_MASTER') {
2115 // The initial playlist was a master manifest, and the first media selected was
2116 // also provided (in the form of a resolved playlist object) as part of the
2117 // source object (rather than just a URL). Therefore, since the media playlist
2118 // doesn't need to be requested, loadedmetadata won't trigger as part of the
2119 // normal flow, and needs an explicit trigger here.
2120 this.trigger('loadedmetadata');
2121 } else {
2122 this.trigger('mediachange');
2123 }
2124 }
2125
2126 return;
2127 } // We update/set the timeout here so that live playlists
2128 // that are not a media change will "start" the loader as expected.
2129 // We expect that this function will start the media update timeout
2130 // cycle again. This also prevents a playlist switch failure from
2131 // causing us to stall during live.
2132
2133
2134 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
2135
2136 if (!mediaChange) {
2137 return;
2138 }
2139
2140 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
2141
2142 if (this.request) {
2143 if (playlist.resolvedUri === this.request.url) {
2144 // requesting to switch to the same playlist multiple times
2145 // has no effect after the first
2146 return;
2147 }
2148
2149 this.request.onreadystatechange = null;
2150 this.request.abort();
2151 this.request = null;
2152 } // request the new playlist
2153
2154
2155 if (this.media_) {
2156 this.trigger('mediachanging');
2157 }
2158
2159 this.pendingMedia_ = playlist;
2160 this.request = this.vhs_.xhr({
2161 uri: playlist.resolvedUri,
2162 withCredentials: this.withCredentials
2163 }, function (error, req) {
2164 // disposed
2165 if (!_this4.request) {
2166 return;
2167 }
2168
2169 playlist.lastRequest = Date.now();
2170 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
2171
2172 if (error) {
2173 return _this4.playlistRequestError(_this4.request, playlist, startingState);
2174 }
2175
2176 _this4.haveMetadata({
2177 playlistString: req.responseText,
2178 url: playlist.uri,
2179 id: playlist.id
2180 }); // fire loadedmetadata the first time a media playlist is loaded
2181
2182
2183 if (startingState === 'HAVE_MASTER') {
2184 _this4.trigger('loadedmetadata');
2185 } else {
2186 _this4.trigger('mediachange');
2187 }
2188 });
2189 }
2190 /**
2191 * pause loading of the playlist
2192 */
2193 ;
2194
2195 _proto.pause = function pause() {
2196 if (this.mediaUpdateTimeout) {
2197 window$1.clearTimeout(this.mediaUpdateTimeout);
2198 this.mediaUpdateTimeout = null;
2199 }
2200
2201 this.stopRequest();
2202
2203 if (this.state === 'HAVE_NOTHING') {
2204 // If we pause the loader before any data has been retrieved, its as if we never
2205 // started, so reset to an unstarted state.
2206 this.started = false;
2207 } // Need to restore state now that no activity is happening
2208
2209
2210 if (this.state === 'SWITCHING_MEDIA') {
2211 // if the loader was in the process of switching media, it should either return to
2212 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2213 // playlist yet. This is determined by the existence of loader.media_
2214 if (this.media_) {
2215 this.state = 'HAVE_METADATA';
2216 } else {
2217 this.state = 'HAVE_MASTER';
2218 }
2219 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2220 this.state = 'HAVE_METADATA';
2221 }
2222 }
2223 /**
2224 * start loading of the playlist
2225 */
2226 ;
2227
2228 _proto.load = function load(shouldDelay) {
2229 var _this5 = this;
2230
2231 if (this.mediaUpdateTimeout) {
2232 window$1.clearTimeout(this.mediaUpdateTimeout);
2233 this.mediaUpdateTimeout = null;
2234 }
2235
2236 var media = this.media();
2237
2238 if (shouldDelay) {
2239 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
2240 this.mediaUpdateTimeout = window$1.setTimeout(function () {
2241 _this5.mediaUpdateTimeout = null;
2242
2243 _this5.load();
2244 }, delay);
2245 return;
2246 }
2247
2248 if (!this.started) {
2249 this.start();
2250 return;
2251 }
2252
2253 if (media && !media.endList) {
2254 this.trigger('mediaupdatetimeout');
2255 } else {
2256 this.trigger('loadedplaylist');
2257 }
2258 };
2259
2260 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
2261 var _this6 = this;
2262
2263 if (this.mediaUpdateTimeout) {
2264 window$1.clearTimeout(this.mediaUpdateTimeout);
2265 this.mediaUpdateTimeout = null;
2266 } // we only have use mediaupdatetimeout for live playlists.
2267
2268
2269 if (!this.media() || this.media().endList) {
2270 return;
2271 }
2272
2273 this.mediaUpdateTimeout = window$1.setTimeout(function () {
2274 _this6.mediaUpdateTimeout = null;
2275
2276 _this6.trigger('mediaupdatetimeout');
2277
2278 _this6.updateMediaUpdateTimeout_(delay);
2279 }, delay);
2280 }
2281 /**
2282 * start loading of the playlist
2283 */
2284 ;
2285
2286 _proto.start = function start() {
2287 var _this7 = this;
2288
2289 this.started = true;
2290
2291 if (typeof this.src === 'object') {
2292 // in the case of an entirely constructed manifest object (meaning there's no actual
2293 // manifest on a server), default the uri to the page's href
2294 if (!this.src.uri) {
2295 this.src.uri = window$1.location.href;
2296 } // resolvedUri is added on internally after the initial request. Since there's no
2297 // request for pre-resolved manifests, add on resolvedUri here.
2298
2299
2300 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
2301 // request can be skipped (since the top level of the manifest, at a minimum, is
2302 // already available as a parsed manifest object). However, if the manifest object
2303 // represents a master playlist, some media playlists may need to be resolved before
2304 // the starting segment list is available. Therefore, go directly to setup of the
2305 // initial playlist, and let the normal flow continue from there.
2306 //
2307 // Note that the call to setup is asynchronous, as other sections of VHS may assume
2308 // that the first request is asynchronous.
2309
2310 setTimeout(function () {
2311 _this7.setupInitialPlaylist(_this7.src);
2312 }, 0);
2313 return;
2314 } // request the specified URL
2315
2316
2317 this.request = this.vhs_.xhr({
2318 uri: this.src,
2319 withCredentials: this.withCredentials
2320 }, function (error, req) {
2321 // disposed
2322 if (!_this7.request) {
2323 return;
2324 } // clear the loader's request reference
2325
2326
2327 _this7.request = null;
2328
2329 if (error) {
2330 _this7.error = {
2331 status: req.status,
2332 message: "HLS playlist request error at URL: " + _this7.src + ".",
2333 responseText: req.responseText,
2334 // MEDIA_ERR_NETWORK
2335 code: 2
2336 };
2337
2338 if (_this7.state === 'HAVE_NOTHING') {
2339 _this7.started = false;
2340 }
2341
2342 return _this7.trigger('error');
2343 }
2344
2345 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
2346
2347 var manifest = _this7.parseManifest_({
2348 manifestString: req.responseText,
2349 url: _this7.src
2350 });
2351
2352 _this7.setupInitialPlaylist(manifest);
2353 });
2354 };
2355
2356 _proto.srcUri = function srcUri() {
2357 return typeof this.src === 'string' ? this.src : this.src.uri;
2358 }
2359 /**
2360 * Given a manifest object that's either a master or media playlist, trigger the proper
2361 * events and set the state of the playlist loader.
2362 *
2363 * If the manifest object represents a master playlist, `loadedplaylist` will be
2364 * triggered to allow listeners to select a playlist. If none is selected, the loader
2365 * will default to the first one in the playlists array.
2366 *
2367 * If the manifest object represents a media playlist, `loadedplaylist` will be
2368 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
2369 *
2370 * In the case of a media playlist, a master playlist object wrapper with one playlist
2371 * will be created so that all logic can handle playlists in the same fashion (as an
2372 * assumed manifest object schema).
2373 *
2374 * @param {Object} manifest
2375 * The parsed manifest object
2376 */
2377 ;
2378
2379 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
2380 this.state = 'HAVE_MASTER';
2381
2382 if (manifest.playlists) {
2383 this.master = manifest;
2384 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
2385 // then resolve URIs in advance, as they are usually done after a playlist request,
2386 // which may not happen if the playlist is resolved.
2387
2388 manifest.playlists.forEach(function (playlist) {
2389 playlist.segments = getAllSegments(playlist);
2390 playlist.segments.forEach(function (segment) {
2391 resolveSegmentUris(segment, playlist.resolvedUri);
2392 });
2393 });
2394 this.trigger('loadedplaylist');
2395
2396 if (!this.request) {
2397 // no media playlist was specifically selected so start
2398 // from the first listed one
2399 this.media(this.master.playlists[0]);
2400 }
2401
2402 return;
2403 } // In order to support media playlists passed in as vhs-json, the case where the uri
2404 // is not provided as part of the manifest should be considered, and an appropriate
2405 // default used.
2406
2407
2408 var uri = this.srcUri() || window$1.location.href;
2409 this.master = masterForMedia(manifest, uri);
2410 this.haveMetadata({
2411 playlistObject: manifest,
2412 url: uri,
2413 id: this.master.playlists[0].id
2414 });
2415 this.trigger('loadedmetadata');
2416 };
2417
2418 return PlaylistLoader;
2419}(EventTarget$1);
2420
2421/**
2422 * @file xhr.js
2423 */
2424var videojsXHR = videojs.xhr,
2425 mergeOptions$1 = videojs.mergeOptions;
2426
2427var callbackWrapper = function callbackWrapper(request, error, response, callback) {
2428 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
2429
2430 if (!error && reqResponse) {
2431 request.responseTime = Date.now();
2432 request.roundTripTime = request.responseTime - request.requestTime;
2433 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2434
2435 if (!request.bandwidth) {
2436 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2437 }
2438 }
2439
2440 if (response.headers) {
2441 request.responseHeaders = response.headers;
2442 } // videojs.xhr now uses a specific code on the error
2443 // object to signal that a request has timed out instead
2444 // of setting a boolean on the request object
2445
2446
2447 if (error && error.code === 'ETIMEDOUT') {
2448 request.timedout = true;
2449 } // videojs.xhr no longer considers status codes outside of 200 and 0
2450 // (for file uris) to be errors, but the old XHR did, so emulate that
2451 // behavior. Status 206 may be used in response to byterange requests.
2452
2453
2454 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2455 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2456 }
2457
2458 callback(error, request);
2459};
2460
2461var xhrFactory = function xhrFactory() {
2462 var xhr = function XhrFunction(options, callback) {
2463 // Add a default timeout
2464 options = mergeOptions$1({
2465 timeout: 45e3
2466 }, options); // Allow an optional user-specified function to modify the option
2467 // object before we construct the xhr request
2468
2469 var beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
2470
2471 if (beforeRequest && typeof beforeRequest === 'function') {
2472 var newOptions = beforeRequest(options);
2473
2474 if (newOptions) {
2475 options = newOptions;
2476 }
2477 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
2478 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
2479
2480
2481 var xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr;
2482 var request = xhrMethod(options, function (error, response) {
2483 return callbackWrapper(request, error, response, callback);
2484 });
2485 var originalAbort = request.abort;
2486
2487 request.abort = function () {
2488 request.aborted = true;
2489 return originalAbort.apply(request, arguments);
2490 };
2491
2492 request.uri = options.uri;
2493 request.requestTime = Date.now();
2494 return request;
2495 };
2496
2497 xhr.original = true;
2498 return xhr;
2499};
2500/**
2501 * Turns segment byterange into a string suitable for use in
2502 * HTTP Range requests
2503 *
2504 * @param {Object} byterange - an object with two values defining the start and end
2505 * of a byte-range
2506 */
2507
2508
2509var byterangeStr = function byterangeStr(byterange) {
2510 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2511 // header uses inclusive ranges
2512 var byterangeEnd;
2513 var byterangeStart = byterange.offset;
2514
2515 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
2516 byterangeEnd = window$1.BigInt(byterange.offset) + window$1.BigInt(byterange.length) - window$1.BigInt(1);
2517 } else {
2518 byterangeEnd = byterange.offset + byterange.length - 1;
2519 }
2520
2521 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2522};
2523/**
2524 * Defines headers for use in the xhr request for a particular segment.
2525 *
2526 * @param {Object} segment - a simplified copy of the segmentInfo object
2527 * from SegmentLoader
2528 */
2529
2530var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2531 var headers = {};
2532
2533 if (segment.byterange) {
2534 headers.Range = byterangeStr(segment.byterange);
2535 }
2536
2537 return headers;
2538};
2539
2540/**
2541 * @file bin-utils.js
2542 */
2543
2544/**
2545 * convert a TimeRange to text
2546 *
2547 * @param {TimeRange} range the timerange to use for conversion
2548 * @param {number} i the iterator on the range to convert
2549 * @return {string} the range in string format
2550 */
2551
2552var textRange = function textRange(range, i) {
2553 return range.start(i) + '-' + range.end(i);
2554};
2555/**
2556 * format a number as hex string
2557 *
2558 * @param {number} e The number
2559 * @param {number} i the iterator
2560 * @return {string} the hex formatted number as a string
2561 */
2562
2563
2564var formatHexString = function formatHexString(e, i) {
2565 var value = e.toString(16);
2566 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
2567};
2568
2569var formatAsciiString = function formatAsciiString(e) {
2570 if (e >= 0x20 && e < 0x7e) {
2571 return String.fromCharCode(e);
2572 }
2573
2574 return '.';
2575};
2576/**
2577 * Creates an object for sending to a web worker modifying properties that are TypedArrays
2578 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
2579 *
2580 * @param {Object} message
2581 * Object of properties and values to send to the web worker
2582 * @return {Object}
2583 * Modified message with TypedArray values expanded
2584 * @function createTransferableMessage
2585 */
2586
2587
2588var createTransferableMessage = function createTransferableMessage(message) {
2589 var transferable = {};
2590 Object.keys(message).forEach(function (key) {
2591 var value = message[key];
2592
2593 if (isArrayBufferView(value)) {
2594 transferable[key] = {
2595 bytes: value.buffer,
2596 byteOffset: value.byteOffset,
2597 byteLength: value.byteLength
2598 };
2599 } else {
2600 transferable[key] = value;
2601 }
2602 });
2603 return transferable;
2604};
2605/**
2606 * Returns a unique string identifier for a media initialization
2607 * segment.
2608 *
2609 * @param {Object} initSegment
2610 * the init segment object.
2611 *
2612 * @return {string} the generated init segment id
2613 */
2614
2615var initSegmentId = function initSegmentId(initSegment) {
2616 var byterange = initSegment.byterange || {
2617 length: Infinity,
2618 offset: 0
2619 };
2620 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
2621};
2622/**
2623 * Returns a unique string identifier for a media segment key.
2624 *
2625 * @param {Object} key the encryption key
2626 * @return {string} the unique id for the media segment key.
2627 */
2628
2629var segmentKeyId = function segmentKeyId(key) {
2630 return key.resolvedUri;
2631};
2632/**
2633 * utils to help dump binary data to the console
2634 *
2635 * @param {Array|TypedArray} data
2636 * data to dump to a string
2637 *
2638 * @return {string} the data as a hex string.
2639 */
2640
2641var hexDump = function hexDump(data) {
2642 var bytes = Array.prototype.slice.call(data);
2643 var step = 16;
2644 var result = '';
2645 var hex;
2646 var ascii;
2647
2648 for (var j = 0; j < bytes.length / step; j++) {
2649 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
2650 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
2651 result += hex + ' ' + ascii + '\n';
2652 }
2653
2654 return result;
2655};
2656var tagDump = function tagDump(_ref) {
2657 var bytes = _ref.bytes;
2658 return hexDump(bytes);
2659};
2660var textRanges = function textRanges(ranges) {
2661 var result = '';
2662 var i;
2663
2664 for (i = 0; i < ranges.length; i++) {
2665 result += textRange(ranges, i) + ' ';
2666 }
2667
2668 return result;
2669};
2670
2671var utils = /*#__PURE__*/Object.freeze({
2672 __proto__: null,
2673 createTransferableMessage: createTransferableMessage,
2674 initSegmentId: initSegmentId,
2675 segmentKeyId: segmentKeyId,
2676 hexDump: hexDump,
2677 tagDump: tagDump,
2678 textRanges: textRanges
2679});
2680
2681// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2682// 25% was arbitrarily chosen, and may need to be refined over time.
2683
2684var SEGMENT_END_FUDGE_PERCENT = 0.25;
2685/**
2686 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2687 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2688 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2689 *
2690 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2691 * point" (a point where we have a mapping from program time to player time, with player
2692 * time being the post transmux start of the segment).
2693 *
2694 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2695 *
2696 * @param {number} playerTime the player time
2697 * @param {Object} segment the segment which contains the player time
2698 * @return {Date} program time
2699 */
2700
2701var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2702 if (!segment.dateTimeObject) {
2703 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2704 // be used to map the start of a segment with a real world time).
2705 return null;
2706 }
2707
2708 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2709 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2710
2711 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2712 var offsetFromSegmentStart = playerTime - startOfSegment;
2713 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2714};
2715var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2716 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2717};
2718/**
2719 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2720 * returned segment might be an estimate or an accurate match.
2721 *
2722 * @param {string} programTime The ISO-8601 programTime to find a match for
2723 * @param {Object} playlist A playlist object to search within
2724 */
2725
2726var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2727 // Assumptions:
2728 // - verifyProgramDateTimeTags has already been run
2729 // - live streams have been started
2730 var dateTimeObject;
2731
2732 try {
2733 dateTimeObject = new Date(programTime);
2734 } catch (e) {
2735 return null;
2736 }
2737
2738 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2739 return null;
2740 }
2741
2742 var segment = playlist.segments[0];
2743
2744 if (dateTimeObject < segment.dateTimeObject) {
2745 // Requested time is before stream start.
2746 return null;
2747 }
2748
2749 for (var i = 0; i < playlist.segments.length - 1; i++) {
2750 segment = playlist.segments[i];
2751 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2752
2753 if (dateTimeObject < nextSegmentStart) {
2754 break;
2755 }
2756 }
2757
2758 var lastSegment = playlist.segments[playlist.segments.length - 1];
2759 var lastSegmentStart = lastSegment.dateTimeObject;
2760 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2761 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2762
2763 if (dateTimeObject > lastSegmentEnd) {
2764 // Beyond the end of the stream, or our best guess of the end of the stream.
2765 return null;
2766 }
2767
2768 if (dateTimeObject > lastSegmentStart) {
2769 segment = lastSegment;
2770 }
2771
2772 return {
2773 segment: segment,
2774 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2775 // Although, given that all segments have accurate date time objects, the segment
2776 // selected should be accurate, unless the video has been transmuxed at some point
2777 // (determined by the presence of the videoTimingInfo object), the segment's "player
2778 // time" (the start time in the player) can't be considered accurate.
2779 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2780 };
2781};
2782/**
2783 * Finds a segment that contains the given player time(in seconds).
2784 *
2785 * @param {number} time The player time to find a match for
2786 * @param {Object} playlist A playlist object to search within
2787 */
2788
2789var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2790 // Assumptions:
2791 // - there will always be a segment.duration
2792 // - we can start from zero
2793 // - segments are in time order
2794 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2795 return null;
2796 }
2797
2798 var segmentEnd = 0;
2799 var segment;
2800
2801 for (var i = 0; i < playlist.segments.length; i++) {
2802 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2803 // should contain the most accurate values we have for the segment's player times.
2804 //
2805 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2806 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2807 // calculate an end value.
2808
2809 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2810
2811 if (time <= segmentEnd) {
2812 break;
2813 }
2814 }
2815
2816 var lastSegment = playlist.segments[playlist.segments.length - 1];
2817
2818 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2819 // The time requested is beyond the stream end.
2820 return null;
2821 }
2822
2823 if (time > segmentEnd) {
2824 // The time is within or beyond the last segment.
2825 //
2826 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2827 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2828 // Technically, because the duration value is only an estimate, the time may still
2829 // exist in the last segment, however, there isn't enough information to make even
2830 // a reasonable estimate.
2831 return null;
2832 }
2833
2834 segment = lastSegment;
2835 }
2836
2837 return {
2838 segment: segment,
2839 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2840 // Because videoTimingInfo is only set after transmux, it is the only way to get
2841 // accurate timing values.
2842 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2843 };
2844};
2845/**
2846 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2847 * If the offset returned is positive, the programTime occurs after the
2848 * comparisonTimestamp.
2849 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2850 *
2851 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2852 * @param {string} programTime The programTime as an ISO-8601 string
2853 * @return {number} offset
2854 */
2855
2856var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2857 var segmentDateTime;
2858 var programDateTime;
2859
2860 try {
2861 segmentDateTime = new Date(comparisonTimeStamp);
2862 programDateTime = new Date(programTime);
2863 } catch (e) {// TODO handle error
2864 }
2865
2866 var segmentTimeEpoch = segmentDateTime.getTime();
2867 var programTimeEpoch = programDateTime.getTime();
2868 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2869};
2870/**
2871 * Checks that all segments in this playlist have programDateTime tags.
2872 *
2873 * @param {Object} playlist A playlist object
2874 */
2875
2876var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2877 if (!playlist.segments || playlist.segments.length === 0) {
2878 return false;
2879 }
2880
2881 for (var i = 0; i < playlist.segments.length; i++) {
2882 var segment = playlist.segments[i];
2883
2884 if (!segment.dateTimeObject) {
2885 return false;
2886 }
2887 }
2888
2889 return true;
2890};
2891/**
2892 * Returns the programTime of the media given a playlist and a playerTime.
2893 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2894 * If the segments containing the time requested have not been buffered yet, an estimate
2895 * may be returned to the callback.
2896 *
2897 * @param {Object} args
2898 * @param {Object} args.playlist A playlist object to search within
2899 * @param {number} time A playerTime in seconds
2900 * @param {Function} callback(err, programTime)
2901 * @return {string} err.message A detailed error message
2902 * @return {Object} programTime
2903 * @return {number} programTime.mediaSeconds The streamTime in seconds
2904 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2905 */
2906
2907var getProgramTime = function getProgramTime(_ref) {
2908 var playlist = _ref.playlist,
2909 _ref$time = _ref.time,
2910 time = _ref$time === void 0 ? undefined : _ref$time,
2911 callback = _ref.callback;
2912
2913 if (!callback) {
2914 throw new Error('getProgramTime: callback must be provided');
2915 }
2916
2917 if (!playlist || time === undefined) {
2918 return callback({
2919 message: 'getProgramTime: playlist and time must be provided'
2920 });
2921 }
2922
2923 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2924
2925 if (!matchedSegment) {
2926 return callback({
2927 message: 'valid programTime was not found'
2928 });
2929 }
2930
2931 if (matchedSegment.type === 'estimate') {
2932 return callback({
2933 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2934 seekTime: matchedSegment.estimatedStart
2935 });
2936 }
2937
2938 var programTimeObject = {
2939 mediaSeconds: time
2940 };
2941 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2942
2943 if (programTime) {
2944 programTimeObject.programDateTime = programTime.toISOString();
2945 }
2946
2947 return callback(null, programTimeObject);
2948};
2949/**
2950 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2951 *
2952 * @param {Object} args
2953 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2954 * @param {Object} args.playlist A playlist to look within
2955 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2956 * @param {Function} args.seekTo A method to perform a seek
2957 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2958 * @param {Object} args.tech The tech to seek on
2959 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2960 * @return {string} err.message A detailed error message
2961 * @return {number} newTime The exact time that was seeked to in seconds
2962 */
2963
2964var seekToProgramTime = function seekToProgramTime(_ref2) {
2965 var programTime = _ref2.programTime,
2966 playlist = _ref2.playlist,
2967 _ref2$retryCount = _ref2.retryCount,
2968 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2969 seekTo = _ref2.seekTo,
2970 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2971 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2972 tech = _ref2.tech,
2973 callback = _ref2.callback;
2974
2975 if (!callback) {
2976 throw new Error('seekToProgramTime: callback must be provided');
2977 }
2978
2979 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2980 return callback({
2981 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2982 });
2983 }
2984
2985 if (!playlist.endList && !tech.hasStarted_) {
2986 return callback({
2987 message: 'player must be playing a live stream to start buffering'
2988 });
2989 }
2990
2991 if (!verifyProgramDateTimeTags(playlist)) {
2992 return callback({
2993 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
2994 });
2995 }
2996
2997 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
2998
2999 if (!matchedSegment) {
3000 return callback({
3001 message: programTime + " was not found in the stream"
3002 });
3003 }
3004
3005 var segment = matchedSegment.segment;
3006 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
3007
3008 if (matchedSegment.type === 'estimate') {
3009 // we've run out of retries
3010 if (retryCount === 0) {
3011 return callback({
3012 message: programTime + " is not buffered yet. Try again"
3013 });
3014 }
3015
3016 seekTo(matchedSegment.estimatedStart + mediaOffset);
3017 tech.one('seeked', function () {
3018 seekToProgramTime({
3019 programTime: programTime,
3020 playlist: playlist,
3021 retryCount: retryCount - 1,
3022 seekTo: seekTo,
3023 pauseAfterSeek: pauseAfterSeek,
3024 tech: tech,
3025 callback: callback
3026 });
3027 });
3028 return;
3029 } // Since the segment.start value is determined from the buffered end or ending time
3030 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
3031 // modifications.
3032
3033
3034 var seekToTime = segment.start + mediaOffset;
3035
3036 var seekedCallback = function seekedCallback() {
3037 return callback(null, tech.currentTime());
3038 }; // listen for seeked event
3039
3040
3041 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
3042
3043 if (pauseAfterSeek) {
3044 tech.pause();
3045 }
3046
3047 seekTo(seekToTime);
3048};
3049
3050// which will only happen if the request is complete.
3051
3052var callbackOnCompleted = function callbackOnCompleted(request, cb) {
3053 if (request.readyState === 4) {
3054 return cb();
3055 }
3056
3057 return;
3058};
3059
3060var containerRequest = function containerRequest(uri, xhr, cb) {
3061 var bytes = [];
3062 var id3Offset;
3063 var finished = false;
3064
3065 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
3066 req.abort();
3067 finished = true;
3068 return cb(err, req, type, _bytes);
3069 };
3070
3071 var progressListener = function progressListener(error, request) {
3072 if (finished) {
3073 return;
3074 }
3075
3076 if (error) {
3077 return endRequestAndCallback(error, request, '', bytes);
3078 } // grap the new part of content that was just downloaded
3079
3080
3081 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
3082
3083 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
3084 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
3085 // or we need at least two bytes after an id3Offset
3086
3087 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
3088 return callbackOnCompleted(request, function () {
3089 return endRequestAndCallback(error, request, '', bytes);
3090 });
3091 }
3092
3093 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
3094 // to see the second sync byte, wait until we have enough data
3095 // before declaring it ts
3096
3097 if (type === 'ts' && bytes.length < 188) {
3098 return callbackOnCompleted(request, function () {
3099 return endRequestAndCallback(error, request, '', bytes);
3100 });
3101 } // this may be an unsynced ts segment
3102 // wait for 376 bytes before detecting no container
3103
3104
3105 if (!type && bytes.length < 376) {
3106 return callbackOnCompleted(request, function () {
3107 return endRequestAndCallback(error, request, '', bytes);
3108 });
3109 }
3110
3111 return endRequestAndCallback(null, request, type, bytes);
3112 };
3113
3114 var options = {
3115 uri: uri,
3116 beforeSend: function beforeSend(request) {
3117 // this forces the browser to pass the bytes to us unprocessed
3118 request.overrideMimeType('text/plain; charset=x-user-defined');
3119 request.addEventListener('progress', function (_ref) {
3120 _ref.total;
3121 _ref.loaded;
3122 return callbackWrapper(request, null, {
3123 statusCode: request.status
3124 }, progressListener);
3125 });
3126 }
3127 };
3128 var request = xhr(options, function (error, response) {
3129 return callbackWrapper(request, error, response, progressListener);
3130 });
3131 return request;
3132};
3133
3134var EventTarget = videojs.EventTarget,
3135 mergeOptions = videojs.mergeOptions;
3136
3137var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
3138 if (!isPlaylistUnchanged(a, b)) {
3139 return false;
3140 } // for dash the above check will often return true in scenarios where
3141 // the playlist actually has changed because mediaSequence isn't a
3142 // dash thing, and we often set it to 1. So if the playlists have the same amount
3143 // of segments we return true.
3144 // So for dash we need to make sure that the underlying segments are different.
3145 // if sidx changed then the playlists are different.
3146
3147
3148 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
3149 return false;
3150 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
3151 return false;
3152 } // one or the other does not have segments
3153 // there was a change.
3154
3155
3156 if (a.segments && !b.segments || !a.segments && b.segments) {
3157 return false;
3158 } // neither has segments nothing changed
3159
3160
3161 if (!a.segments && !b.segments) {
3162 return true;
3163 } // check segments themselves
3164
3165
3166 for (var i = 0; i < a.segments.length; i++) {
3167 var aSegment = a.segments[i];
3168 var bSegment = b.segments[i]; // if uris are different between segments there was a change
3169
3170 if (aSegment.uri !== bSegment.uri) {
3171 return false;
3172 } // neither segment has a byterange, there will be no byterange change.
3173
3174
3175 if (!aSegment.byterange && !bSegment.byterange) {
3176 continue;
3177 }
3178
3179 var aByterange = aSegment.byterange;
3180 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
3181
3182 if (aByterange && !bByterange || !aByterange && bByterange) {
3183 return false;
3184 } // if both segments have byterange with different offsets, there was a change.
3185
3186
3187 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
3188 return false;
3189 }
3190 } // if everything was the same with segments, this is the same playlist.
3191
3192
3193 return true;
3194};
3195/**
3196 * Parses the master XML string and updates playlist URI references.
3197 *
3198 * @param {Object} config
3199 * Object of arguments
3200 * @param {string} config.masterXml
3201 * The mpd XML
3202 * @param {string} config.srcUrl
3203 * The mpd URL
3204 * @param {Date} config.clientOffset
3205 * A time difference between server and client
3206 * @param {Object} config.sidxMapping
3207 * SIDX mappings for moof/mdat URIs and byte ranges
3208 * @return {Object}
3209 * The parsed mpd manifest object
3210 */
3211
3212
3213var parseMasterXml = function parseMasterXml(_ref) {
3214 var masterXml = _ref.masterXml,
3215 srcUrl = _ref.srcUrl,
3216 clientOffset = _ref.clientOffset,
3217 sidxMapping = _ref.sidxMapping,
3218 previousManifest = _ref.previousManifest;
3219 var manifest = parse(masterXml, {
3220 manifestUri: srcUrl,
3221 clientOffset: clientOffset,
3222 sidxMapping: sidxMapping,
3223 previousManifest: previousManifest
3224 });
3225 addPropertiesToMaster(manifest, srcUrl);
3226 return manifest;
3227};
3228/**
3229 * Returns a new master manifest that is the result of merging an updated master manifest
3230 * into the original version.
3231 *
3232 * @param {Object} oldMaster
3233 * The old parsed mpd object
3234 * @param {Object} newMaster
3235 * The updated parsed mpd object
3236 * @return {Object}
3237 * A new object representing the original master manifest with the updated media
3238 * playlists merged in
3239 */
3240
3241var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
3242 var noChanges = true;
3243 var update = mergeOptions(oldMaster, {
3244 // These are top level properties that can be updated
3245 duration: newMaster.duration,
3246 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
3247 timelineStarts: newMaster.timelineStarts
3248 }); // First update the playlists in playlist list
3249
3250 for (var i = 0; i < newMaster.playlists.length; i++) {
3251 var playlist = newMaster.playlists[i];
3252
3253 if (playlist.sidx) {
3254 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
3255
3256 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
3257 addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
3258 }
3259 }
3260
3261 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
3262
3263 if (playlistUpdate) {
3264 update = playlistUpdate;
3265 noChanges = false;
3266 }
3267 } // Then update media group playlists
3268
3269
3270 forEachMediaGroup(newMaster, function (properties, type, group, label) {
3271 if (properties.playlists && properties.playlists.length) {
3272 var id = properties.playlists[0].id;
3273
3274 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
3275
3276 if (_playlistUpdate) {
3277 update = _playlistUpdate; // update the playlist reference within media groups
3278
3279 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
3280 noChanges = false;
3281 }
3282 }
3283 });
3284
3285 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3286 noChanges = false;
3287 }
3288
3289 if (noChanges) {
3290 return null;
3291 }
3292
3293 return update;
3294}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
3295// If the SIDXs have maps, the two maps should match,
3296// both `a` and `b` missing SIDXs is considered matching.
3297// If `a` or `b` but not both have a map, they aren't matching.
3298
3299var equivalentSidx = function equivalentSidx(a, b) {
3300 var neitherMap = Boolean(!a.map && !b.map);
3301 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
3302 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
3303}; // exported for testing
3304
3305
3306var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
3307 var newSidxMapping = {};
3308
3309 for (var id in playlists) {
3310 var playlist = playlists[id];
3311 var currentSidxInfo = playlist.sidx;
3312
3313 if (currentSidxInfo) {
3314 var key = generateSidxKey(currentSidxInfo);
3315
3316 if (!oldSidxMapping[key]) {
3317 break;
3318 }
3319
3320 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
3321
3322 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
3323 newSidxMapping[key] = oldSidxMapping[key];
3324 }
3325 }
3326 }
3327
3328 return newSidxMapping;
3329};
3330/**
3331 * A function that filters out changed items as they need to be requested separately.
3332 *
3333 * The method is exported for testing
3334 *
3335 * @param {Object} master the parsed mpd XML returned via mpd-parser
3336 * @param {Object} oldSidxMapping the SIDX to compare against
3337 */
3338
3339var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
3340 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
3341 var mediaGroupSidx = videoSidx;
3342 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3343 if (properties.playlists && properties.playlists.length) {
3344 var playlists = properties.playlists;
3345 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
3346 }
3347 });
3348 return mediaGroupSidx;
3349};
3350
3351var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
3352 _inheritsLoose(DashPlaylistLoader, _EventTarget);
3353
3354 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
3355 // playlist loader setups from media groups will expect to be able to pass a playlist
3356 // (since there aren't external URLs to media playlists with DASH)
3357 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
3358 var _this;
3359
3360 if (options === void 0) {
3361 options = {};
3362 }
3363
3364 _this = _EventTarget.call(this) || this;
3365 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized(_this);
3366
3367 if (!masterPlaylistLoader) {
3368 _this.isMaster_ = true;
3369 }
3370
3371 var _options = options,
3372 _options$withCredenti = _options.withCredentials,
3373 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
3374 _options$handleManife = _options.handleManifestRedirects,
3375 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
3376 _this.vhs_ = vhs;
3377 _this.withCredentials = withCredentials;
3378 _this.handleManifestRedirects = handleManifestRedirects;
3379
3380 if (!srcUrlOrPlaylist) {
3381 throw new Error('A non-empty playlist URL or object is required');
3382 } // event naming?
3383
3384
3385 _this.on('minimumUpdatePeriod', function () {
3386 _this.refreshXml_();
3387 }); // live playlist staleness timeout
3388
3389
3390 _this.on('mediaupdatetimeout', function () {
3391 _this.refreshMedia_(_this.media().id);
3392 });
3393
3394 _this.state = 'HAVE_NOTHING';
3395 _this.loadedPlaylists_ = {};
3396 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
3397 // The masterPlaylistLoader will be created with a string
3398
3399 if (_this.isMaster_) {
3400 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
3401 // once multi-period is refactored
3402
3403 _this.masterPlaylistLoader_.sidxMapping_ = {};
3404 } else {
3405 _this.childPlaylist_ = srcUrlOrPlaylist;
3406 }
3407
3408 return _this;
3409 }
3410
3411 var _proto = DashPlaylistLoader.prototype;
3412
3413 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
3414 // disposed
3415 if (!this.request) {
3416 return true;
3417 } // pending request is cleared
3418
3419
3420 this.request = null;
3421
3422 if (err) {
3423 // use the provided error object or create one
3424 // based on the request/response
3425 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
3426 status: request.status,
3427 message: 'DASH request error at URL: ' + request.uri,
3428 response: request.response,
3429 // MEDIA_ERR_NETWORK
3430 code: 2
3431 };
3432
3433 if (startingState) {
3434 this.state = startingState;
3435 }
3436
3437 this.trigger('error');
3438 return true;
3439 }
3440 }
3441 /**
3442 * Verify that the container of the sidx segment can be parsed
3443 * and if it can, get and parse that segment.
3444 */
3445 ;
3446
3447 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
3448 var _this2 = this;
3449
3450 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
3451
3452 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
3453 // keep this function async
3454 this.mediaRequest_ = window$1.setTimeout(function () {
3455 return cb(false);
3456 }, 0);
3457 return;
3458 } // resolve the segment URL relative to the playlist
3459
3460
3461 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
3462
3463 var fin = function fin(err, request) {
3464 if (_this2.requestErrored_(err, request, startingState)) {
3465 return;
3466 }
3467
3468 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
3469 var sidx;
3470
3471 try {
3472 sidx = parseSidx(toUint8(request.response).subarray(8));
3473 } catch (e) {
3474 // sidx parsing failed.
3475 _this2.requestErrored_(e, request, startingState);
3476
3477 return;
3478 }
3479
3480 sidxMapping[sidxKey] = {
3481 sidxInfo: playlist.sidx,
3482 sidx: sidx
3483 };
3484 addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
3485 return cb(true);
3486 };
3487
3488 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
3489 if (err) {
3490 return fin(err, request);
3491 }
3492
3493 if (!container || container !== 'mp4') {
3494 return fin({
3495 status: request.status,
3496 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
3497 // response is just bytes in this case
3498 // but we really don't want to return that.
3499 response: '',
3500 playlist: playlist,
3501 internal: true,
3502 blacklistDuration: Infinity,
3503 // MEDIA_ERR_NETWORK
3504 code: 2
3505 }, request);
3506 } // if we already downloaded the sidx bytes in the container request, use them
3507
3508
3509 var _playlist$sidx$bytera = playlist.sidx.byterange,
3510 offset = _playlist$sidx$bytera.offset,
3511 length = _playlist$sidx$bytera.length;
3512
3513 if (bytes.length >= length + offset) {
3514 return fin(err, {
3515 response: bytes.subarray(offset, offset + length),
3516 status: request.status,
3517 uri: request.uri
3518 });
3519 } // otherwise request sidx bytes
3520
3521
3522 _this2.request = _this2.vhs_.xhr({
3523 uri: uri,
3524 responseType: 'arraybuffer',
3525 headers: segmentXhrHeaders({
3526 byterange: playlist.sidx.byterange
3527 })
3528 }, fin);
3529 });
3530 };
3531
3532 _proto.dispose = function dispose() {
3533 this.trigger('dispose');
3534 this.stopRequest();
3535 this.loadedPlaylists_ = {};
3536 window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
3537 window$1.clearTimeout(this.mediaRequest_);
3538 window$1.clearTimeout(this.mediaUpdateTimeout);
3539 this.mediaUpdateTimeout = null;
3540 this.mediaRequest_ = null;
3541 this.minimumUpdatePeriodTimeout_ = null;
3542
3543 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3544 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3545 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3546 }
3547
3548 this.off();
3549 };
3550
3551 _proto.hasPendingRequest = function hasPendingRequest() {
3552 return this.request || this.mediaRequest_;
3553 };
3554
3555 _proto.stopRequest = function stopRequest() {
3556 if (this.request) {
3557 var oldRequest = this.request;
3558 this.request = null;
3559 oldRequest.onreadystatechange = null;
3560 oldRequest.abort();
3561 }
3562 };
3563
3564 _proto.media = function media(playlist) {
3565 var _this3 = this;
3566
3567 // getter
3568 if (!playlist) {
3569 return this.media_;
3570 } // setter
3571
3572
3573 if (this.state === 'HAVE_NOTHING') {
3574 throw new Error('Cannot switch media playlist from ' + this.state);
3575 }
3576
3577 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
3578
3579 if (typeof playlist === 'string') {
3580 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
3581 throw new Error('Unknown playlist URI: ' + playlist);
3582 }
3583
3584 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
3585 }
3586
3587 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
3588
3589 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
3590 this.state = 'HAVE_METADATA';
3591 this.media_ = playlist; // trigger media change if the active media has been updated
3592
3593 if (mediaChange) {
3594 this.trigger('mediachanging');
3595 this.trigger('mediachange');
3596 }
3597
3598 return;
3599 } // switching to the active playlist is a no-op
3600
3601
3602 if (!mediaChange) {
3603 return;
3604 } // switching from an already loaded playlist
3605
3606
3607 if (this.media_) {
3608 this.trigger('mediachanging');
3609 }
3610
3611 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
3612 // everything is ready just continue to haveMetadata
3613 _this3.haveMetadata({
3614 startingState: startingState,
3615 playlist: playlist
3616 });
3617 });
3618 };
3619
3620 _proto.haveMetadata = function haveMetadata(_ref2) {
3621 var startingState = _ref2.startingState,
3622 playlist = _ref2.playlist;
3623 this.state = 'HAVE_METADATA';
3624 this.loadedPlaylists_[playlist.id] = playlist;
3625 this.mediaRequest_ = null; // This will trigger loadedplaylist
3626
3627 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
3628 // to resolve setup of media groups
3629
3630 if (startingState === 'HAVE_MASTER') {
3631 this.trigger('loadedmetadata');
3632 } else {
3633 // trigger media change if the active media has been updated
3634 this.trigger('mediachange');
3635 }
3636 };
3637
3638 _proto.pause = function pause() {
3639 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3640 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3641 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3642 }
3643
3644 this.stopRequest();
3645 window$1.clearTimeout(this.mediaUpdateTimeout);
3646 this.mediaUpdateTimeout = null;
3647
3648 if (this.isMaster_) {
3649 window$1.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
3650 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
3651 }
3652
3653 if (this.state === 'HAVE_NOTHING') {
3654 // If we pause the loader before any data has been retrieved, its as if we never
3655 // started, so reset to an unstarted state.
3656 this.started = false;
3657 }
3658 };
3659
3660 _proto.load = function load(isFinalRendition) {
3661 var _this4 = this;
3662
3663 window$1.clearTimeout(this.mediaUpdateTimeout);
3664 this.mediaUpdateTimeout = null;
3665 var media = this.media();
3666
3667 if (isFinalRendition) {
3668 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3669 this.mediaUpdateTimeout = window$1.setTimeout(function () {
3670 return _this4.load();
3671 }, delay);
3672 return;
3673 } // because the playlists are internal to the manifest, load should either load the
3674 // main manifest, or do nothing but trigger an event
3675
3676
3677 if (!this.started) {
3678 this.start();
3679 return;
3680 }
3681
3682 if (media && !media.endList) {
3683 // Check to see if this is the master loader and the MUP was cleared (this happens
3684 // when the loader was paused). `media` should be set at this point since one is always
3685 // set during `start()`.
3686 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
3687 // Trigger minimumUpdatePeriod to refresh the master manifest
3688 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
3689
3690 this.updateMinimumUpdatePeriodTimeout_();
3691 }
3692
3693 this.trigger('mediaupdatetimeout');
3694 } else {
3695 this.trigger('loadedplaylist');
3696 }
3697 };
3698
3699 _proto.start = function start() {
3700 var _this5 = this;
3701
3702 this.started = true; // We don't need to request the master manifest again
3703 // Call this asynchronously to match the xhr request behavior below
3704
3705 if (!this.isMaster_) {
3706 this.mediaRequest_ = window$1.setTimeout(function () {
3707 return _this5.haveMaster_();
3708 }, 0);
3709 return;
3710 }
3711
3712 this.requestMaster_(function (req, masterChanged) {
3713 _this5.haveMaster_();
3714
3715 if (!_this5.hasPendingRequest() && !_this5.media_) {
3716 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3717 }
3718 });
3719 };
3720
3721 _proto.requestMaster_ = function requestMaster_(cb) {
3722 var _this6 = this;
3723
3724 this.request = this.vhs_.xhr({
3725 uri: this.masterPlaylistLoader_.srcUrl,
3726 withCredentials: this.withCredentials
3727 }, function (error, req) {
3728 if (_this6.requestErrored_(error, req)) {
3729 if (_this6.state === 'HAVE_NOTHING') {
3730 _this6.started = false;
3731 }
3732
3733 return;
3734 }
3735
3736 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3737 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3738
3739 if (req.responseHeaders && req.responseHeaders.date) {
3740 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3741 } else {
3742 _this6.masterLoaded_ = Date.now();
3743 }
3744
3745 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3746
3747 if (masterChanged) {
3748 _this6.handleMaster_();
3749
3750 _this6.syncClientServerClock_(function () {
3751 return cb(req, masterChanged);
3752 });
3753
3754 return;
3755 }
3756
3757 return cb(req, masterChanged);
3758 });
3759 }
3760 /**
3761 * Parses the master xml for UTCTiming node to sync the client clock to the server
3762 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3763 *
3764 * @param {Function} done
3765 * Function to call when clock sync has completed
3766 */
3767 ;
3768
3769 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3770 var _this7 = this;
3771
3772 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3773 // server clock
3774
3775 if (utcTiming === null) {
3776 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3777 return done();
3778 }
3779
3780 if (utcTiming.method === 'DIRECT') {
3781 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3782 return done();
3783 }
3784
3785 this.request = this.vhs_.xhr({
3786 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3787 method: utcTiming.method,
3788 withCredentials: this.withCredentials
3789 }, function (error, req) {
3790 // disposed
3791 if (!_this7.request) {
3792 return;
3793 }
3794
3795 if (error) {
3796 // sync request failed, fall back to using date header from mpd
3797 // TODO: log warning
3798 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3799 return done();
3800 }
3801
3802 var serverTime;
3803
3804 if (utcTiming.method === 'HEAD') {
3805 if (!req.responseHeaders || !req.responseHeaders.date) {
3806 // expected date header not preset, fall back to using date header from mpd
3807 // TODO: log warning
3808 serverTime = _this7.masterLoaded_;
3809 } else {
3810 serverTime = Date.parse(req.responseHeaders.date);
3811 }
3812 } else {
3813 serverTime = Date.parse(req.responseText);
3814 }
3815
3816 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3817 done();
3818 });
3819 };
3820
3821 _proto.haveMaster_ = function haveMaster_() {
3822 this.state = 'HAVE_MASTER';
3823
3824 if (this.isMaster_) {
3825 // We have the master playlist at this point, so
3826 // trigger this to allow MasterPlaylistController
3827 // to make an initial playlist selection
3828 this.trigger('loadedplaylist');
3829 } else if (!this.media_) {
3830 // no media playlist was specifically selected so select
3831 // the one the child playlist loader was created with
3832 this.media(this.childPlaylist_);
3833 }
3834 };
3835
3836 _proto.handleMaster_ = function handleMaster_() {
3837 // clear media request
3838 this.mediaRequest_ = null;
3839 var oldMaster = this.masterPlaylistLoader_.master;
3840 var newMaster = parseMasterXml({
3841 masterXml: this.masterPlaylistLoader_.masterXml_,
3842 srcUrl: this.masterPlaylistLoader_.srcUrl,
3843 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3844 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
3845 previousManifest: oldMaster
3846 }); // if we have an old master to compare the new master against
3847
3848 if (oldMaster) {
3849 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3850 } // only update master if we have a new master
3851
3852
3853 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3854 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3855
3856 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3857 this.masterPlaylistLoader_.srcUrl = location;
3858 }
3859
3860 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3861 this.updateMinimumUpdatePeriodTimeout_();
3862 }
3863
3864 return Boolean(newMaster);
3865 };
3866
3867 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3868 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
3869 // a new one will be added if needed.
3870
3871 if (mpl.createMupOnMedia_) {
3872 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
3873 mpl.createMupOnMedia_ = null;
3874 } // clear any pending timeouts
3875
3876
3877 if (mpl.minimumUpdatePeriodTimeout_) {
3878 window$1.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
3879 mpl.minimumUpdatePeriodTimeout_ = null;
3880 }
3881
3882 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3883 // MPD has no future validity, so a new one will need to be acquired when new
3884 // media segments are to be made available. Thus, we use the target duration
3885 // in this case
3886
3887 if (mup === 0) {
3888 if (mpl.media()) {
3889 mup = mpl.media().targetDuration * 1000;
3890 } else {
3891 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
3892 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
3893 }
3894 } // if minimumUpdatePeriod is invalid or <= zero, which
3895 // can happen when a live video becomes VOD. skip timeout
3896 // creation.
3897
3898
3899 if (typeof mup !== 'number' || mup <= 0) {
3900 if (mup < 0) {
3901 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
3902 }
3903
3904 return;
3905 }
3906
3907 this.createMUPTimeout_(mup);
3908 };
3909
3910 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
3911 var mpl = this.masterPlaylistLoader_;
3912 mpl.minimumUpdatePeriodTimeout_ = window$1.setTimeout(function () {
3913 mpl.minimumUpdatePeriodTimeout_ = null;
3914 mpl.trigger('minimumUpdatePeriod');
3915 mpl.createMUPTimeout_(mup);
3916 }, mup);
3917 }
3918 /**
3919 * Sends request to refresh the master xml and updates the parsed master manifest
3920 */
3921 ;
3922
3923 _proto.refreshXml_ = function refreshXml_() {
3924 var _this8 = this;
3925
3926 this.requestMaster_(function (req, masterChanged) {
3927 if (!masterChanged) {
3928 return;
3929 }
3930
3931 if (_this8.media_) {
3932 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
3933 } // This will filter out updated sidx info from the mapping
3934
3935
3936 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
3937
3938 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
3939 // TODO: do we need to reload the current playlist?
3940 _this8.refreshMedia_(_this8.media().id);
3941 });
3942 });
3943 }
3944 /**
3945 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3946 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3947 * from the master loader.
3948 */
3949 ;
3950
3951 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3952 var _this9 = this;
3953
3954 if (!mediaID) {
3955 throw new Error('refreshMedia_ must take a media id');
3956 } // for master we have to reparse the master xml
3957 // to re-create segments based on current timing values
3958 // which may change media. We only skip updating master
3959 // if this is the first time this.media_ is being set.
3960 // as master was just parsed in that case.
3961
3962
3963 if (this.media_ && this.isMaster_) {
3964 this.handleMaster_();
3965 }
3966
3967 var playlists = this.masterPlaylistLoader_.master.playlists;
3968 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3969
3970 if (mediaChanged) {
3971 this.media_ = playlists[mediaID];
3972 } else {
3973 this.trigger('playlistunchanged');
3974 }
3975
3976 if (!this.mediaUpdateTimeout) {
3977 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
3978 if (_this9.media().endList) {
3979 return;
3980 }
3981
3982 _this9.mediaUpdateTimeout = window$1.setTimeout(function () {
3983 _this9.trigger('mediaupdatetimeout');
3984
3985 createMediaUpdateTimeout();
3986 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
3987 };
3988
3989 createMediaUpdateTimeout();
3990 }
3991
3992 this.trigger('loadedplaylist');
3993 };
3994
3995 return DashPlaylistLoader;
3996}(EventTarget);
3997
3998var Config = {
3999 GOAL_BUFFER_LENGTH: 30,
4000 MAX_GOAL_BUFFER_LENGTH: 60,
4001 BACK_BUFFER_LENGTH: 30,
4002 GOAL_BUFFER_LENGTH_RATE: 1,
4003 // 0.5 MB/s
4004 INITIAL_BANDWIDTH: 4194304,
4005 // A fudge factor to apply to advertised playlist bitrates to account for
4006 // temporary flucations in client bandwidth
4007 BANDWIDTH_VARIANCE: 1.2,
4008 // How much of the buffer must be filled before we consider upswitching
4009 BUFFER_LOW_WATER_LINE: 0,
4010 MAX_BUFFER_LOW_WATER_LINE: 30,
4011 // TODO: Remove this when experimentalBufferBasedABR is removed
4012 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
4013 BUFFER_LOW_WATER_LINE_RATE: 1,
4014 // If the buffer is greater than the high water line, we won't switch down
4015 BUFFER_HIGH_WATER_LINE: 30
4016};
4017
4018var stringToArrayBuffer = function stringToArrayBuffer(string) {
4019 var view = new Uint8Array(new ArrayBuffer(string.length));
4020
4021 for (var i = 0; i < string.length; i++) {
4022 view[i] = string.charCodeAt(i);
4023 }
4024
4025 return view.buffer;
4026};
4027
4028/* global Blob, BlobBuilder, Worker */
4029// unify worker interface
4030var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
4031 // node only supports on/off
4032 workerObj.on = workerObj.addEventListener;
4033 workerObj.off = workerObj.removeEventListener;
4034 return workerObj;
4035};
4036
4037var createObjectURL = function createObjectURL(str) {
4038 try {
4039 return URL.createObjectURL(new Blob([str], {
4040 type: 'application/javascript'
4041 }));
4042 } catch (e) {
4043 var blob = new BlobBuilder();
4044 blob.append(str);
4045 return URL.createObjectURL(blob.getBlob());
4046 }
4047};
4048
4049var factory = function factory(code) {
4050 return function () {
4051 var objectUrl = createObjectURL(code);
4052 var worker = browserWorkerPolyFill(new Worker(objectUrl));
4053 worker.objURL = objectUrl;
4054 var terminate = worker.terminate;
4055 worker.on = worker.addEventListener;
4056 worker.off = worker.removeEventListener;
4057
4058 worker.terminate = function () {
4059 URL.revokeObjectURL(objectUrl);
4060 return terminate.call(this);
4061 };
4062
4063 return worker;
4064 };
4065};
4066var transform = function transform(code) {
4067 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
4068};
4069
4070var getWorkerString = function getWorkerString(fn) {
4071 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
4072};
4073
4074/* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
4075var workerCode$1 = transform(getWorkerString(function () {
4076 /**
4077 * mux.js
4078 *
4079 * Copyright (c) Brightcove
4080 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4081 *
4082 * A lightweight readable stream implemention that handles event dispatching.
4083 * Objects that inherit from streams should call init in their constructors.
4084 */
4085
4086 var Stream = function Stream() {
4087 this.init = function () {
4088 var listeners = {};
4089 /**
4090 * Add a listener for a specified event type.
4091 * @param type {string} the event name
4092 * @param listener {function} the callback to be invoked when an event of
4093 * the specified type occurs
4094 */
4095
4096 this.on = function (type, listener) {
4097 if (!listeners[type]) {
4098 listeners[type] = [];
4099 }
4100
4101 listeners[type] = listeners[type].concat(listener);
4102 };
4103 /**
4104 * Remove a listener for a specified event type.
4105 * @param type {string} the event name
4106 * @param listener {function} a function previously registered for this
4107 * type of event through `on`
4108 */
4109
4110
4111 this.off = function (type, listener) {
4112 var index;
4113
4114 if (!listeners[type]) {
4115 return false;
4116 }
4117
4118 index = listeners[type].indexOf(listener);
4119 listeners[type] = listeners[type].slice();
4120 listeners[type].splice(index, 1);
4121 return index > -1;
4122 };
4123 /**
4124 * Trigger an event of the specified type on this stream. Any additional
4125 * arguments to this function are passed as parameters to event listeners.
4126 * @param type {string} the event name
4127 */
4128
4129
4130 this.trigger = function (type) {
4131 var callbacks, i, length, args;
4132 callbacks = listeners[type];
4133
4134 if (!callbacks) {
4135 return;
4136 } // Slicing the arguments on every invocation of this method
4137 // can add a significant amount of overhead. Avoid the
4138 // intermediate object creation for the common case of a
4139 // single callback argument
4140
4141
4142 if (arguments.length === 2) {
4143 length = callbacks.length;
4144
4145 for (i = 0; i < length; ++i) {
4146 callbacks[i].call(this, arguments[1]);
4147 }
4148 } else {
4149 args = [];
4150 i = arguments.length;
4151
4152 for (i = 1; i < arguments.length; ++i) {
4153 args.push(arguments[i]);
4154 }
4155
4156 length = callbacks.length;
4157
4158 for (i = 0; i < length; ++i) {
4159 callbacks[i].apply(this, args);
4160 }
4161 }
4162 };
4163 /**
4164 * Destroys the stream and cleans up.
4165 */
4166
4167
4168 this.dispose = function () {
4169 listeners = {};
4170 };
4171 };
4172 };
4173 /**
4174 * Forwards all `data` events on this stream to the destination stream. The
4175 * destination stream should provide a method `push` to receive the data
4176 * events as they arrive.
4177 * @param destination {stream} the stream that will receive all `data` events
4178 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4179 * when the current stream emits a 'done' event
4180 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4181 */
4182
4183
4184 Stream.prototype.pipe = function (destination) {
4185 this.on('data', function (data) {
4186 destination.push(data);
4187 });
4188 this.on('done', function (flushSource) {
4189 destination.flush(flushSource);
4190 });
4191 this.on('partialdone', function (flushSource) {
4192 destination.partialFlush(flushSource);
4193 });
4194 this.on('endedtimeline', function (flushSource) {
4195 destination.endTimeline(flushSource);
4196 });
4197 this.on('reset', function (flushSource) {
4198 destination.reset(flushSource);
4199 });
4200 return destination;
4201 }; // Default stream functions that are expected to be overridden to perform
4202 // actual work. These are provided by the prototype as a sort of no-op
4203 // implementation so that we don't have to check for their existence in the
4204 // `pipe` function above.
4205
4206
4207 Stream.prototype.push = function (data) {
4208 this.trigger('data', data);
4209 };
4210
4211 Stream.prototype.flush = function (flushSource) {
4212 this.trigger('done', flushSource);
4213 };
4214
4215 Stream.prototype.partialFlush = function (flushSource) {
4216 this.trigger('partialdone', flushSource);
4217 };
4218
4219 Stream.prototype.endTimeline = function (flushSource) {
4220 this.trigger('endedtimeline', flushSource);
4221 };
4222
4223 Stream.prototype.reset = function (flushSource) {
4224 this.trigger('reset', flushSource);
4225 };
4226
4227 var stream = Stream;
4228 var MAX_UINT32$1 = Math.pow(2, 32);
4229
4230 var getUint64$2 = function getUint64(uint8) {
4231 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
4232 var value;
4233
4234 if (dv.getBigUint64) {
4235 value = dv.getBigUint64(0);
4236
4237 if (value < Number.MAX_SAFE_INTEGER) {
4238 return Number(value);
4239 }
4240
4241 return value;
4242 }
4243
4244 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
4245 };
4246
4247 var numbers = {
4248 getUint64: getUint64$2,
4249 MAX_UINT32: MAX_UINT32$1
4250 };
4251 var MAX_UINT32 = numbers.MAX_UINT32;
4252 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
4253
4254 (function () {
4255 var i;
4256 types = {
4257 avc1: [],
4258 // codingname
4259 avcC: [],
4260 btrt: [],
4261 dinf: [],
4262 dref: [],
4263 esds: [],
4264 ftyp: [],
4265 hdlr: [],
4266 mdat: [],
4267 mdhd: [],
4268 mdia: [],
4269 mfhd: [],
4270 minf: [],
4271 moof: [],
4272 moov: [],
4273 mp4a: [],
4274 // codingname
4275 mvex: [],
4276 mvhd: [],
4277 pasp: [],
4278 sdtp: [],
4279 smhd: [],
4280 stbl: [],
4281 stco: [],
4282 stsc: [],
4283 stsd: [],
4284 stsz: [],
4285 stts: [],
4286 styp: [],
4287 tfdt: [],
4288 tfhd: [],
4289 traf: [],
4290 trak: [],
4291 trun: [],
4292 trex: [],
4293 tkhd: [],
4294 vmhd: []
4295 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4296 // don't throw an error
4297
4298 if (typeof Uint8Array === 'undefined') {
4299 return;
4300 }
4301
4302 for (i in types) {
4303 if (types.hasOwnProperty(i)) {
4304 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4305 }
4306 }
4307
4308 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4309 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4310 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4311 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4312 0x00, 0x00, 0x00, // flags
4313 0x00, 0x00, 0x00, 0x00, // pre_defined
4314 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4315 0x00, 0x00, 0x00, 0x00, // reserved
4316 0x00, 0x00, 0x00, 0x00, // reserved
4317 0x00, 0x00, 0x00, 0x00, // reserved
4318 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4319 ]);
4320 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4321 0x00, 0x00, 0x00, // flags
4322 0x00, 0x00, 0x00, 0x00, // pre_defined
4323 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4324 0x00, 0x00, 0x00, 0x00, // reserved
4325 0x00, 0x00, 0x00, 0x00, // reserved
4326 0x00, 0x00, 0x00, 0x00, // reserved
4327 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4328 ]);
4329 HDLR_TYPES = {
4330 video: VIDEO_HDLR,
4331 audio: AUDIO_HDLR
4332 };
4333 DREF = new Uint8Array([0x00, // version 0
4334 0x00, 0x00, 0x00, // flags
4335 0x00, 0x00, 0x00, 0x01, // entry_count
4336 0x00, 0x00, 0x00, 0x0c, // entry_size
4337 0x75, 0x72, 0x6c, 0x20, // 'url' type
4338 0x00, // version 0
4339 0x00, 0x00, 0x01 // entry_flags
4340 ]);
4341 SMHD = new Uint8Array([0x00, // version
4342 0x00, 0x00, 0x00, // flags
4343 0x00, 0x00, // balance, 0 means centered
4344 0x00, 0x00 // reserved
4345 ]);
4346 STCO = new Uint8Array([0x00, // version
4347 0x00, 0x00, 0x00, // flags
4348 0x00, 0x00, 0x00, 0x00 // entry_count
4349 ]);
4350 STSC = STCO;
4351 STSZ = new Uint8Array([0x00, // version
4352 0x00, 0x00, 0x00, // flags
4353 0x00, 0x00, 0x00, 0x00, // sample_size
4354 0x00, 0x00, 0x00, 0x00 // sample_count
4355 ]);
4356 STTS = STCO;
4357 VMHD = new Uint8Array([0x00, // version
4358 0x00, 0x00, 0x01, // flags
4359 0x00, 0x00, // graphicsmode
4360 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4361 ]);
4362 })();
4363
4364 box = function box(type) {
4365 var payload = [],
4366 size = 0,
4367 i,
4368 result,
4369 view;
4370
4371 for (i = 1; i < arguments.length; i++) {
4372 payload.push(arguments[i]);
4373 }
4374
4375 i = payload.length; // calculate the total size we need to allocate
4376
4377 while (i--) {
4378 size += payload[i].byteLength;
4379 }
4380
4381 result = new Uint8Array(size + 8);
4382 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4383 view.setUint32(0, result.byteLength);
4384 result.set(type, 4); // copy the payload into the result
4385
4386 for (i = 0, size = 8; i < payload.length; i++) {
4387 result.set(payload[i], size);
4388 size += payload[i].byteLength;
4389 }
4390
4391 return result;
4392 };
4393
4394 dinf = function dinf() {
4395 return box(types.dinf, box(types.dref, DREF));
4396 };
4397
4398 esds = function esds(track) {
4399 return box(types.esds, new Uint8Array([0x00, // version
4400 0x00, 0x00, 0x00, // flags
4401 // ES_Descriptor
4402 0x03, // tag, ES_DescrTag
4403 0x19, // length
4404 0x00, 0x00, // ES_ID
4405 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4406 // DecoderConfigDescriptor
4407 0x04, // tag, DecoderConfigDescrTag
4408 0x11, // length
4409 0x40, // object type
4410 0x15, // streamType
4411 0x00, 0x06, 0x00, // bufferSizeDB
4412 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4413 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4414 // DecoderSpecificInfo
4415 0x05, // tag, DecoderSpecificInfoTag
4416 0x02, // length
4417 // ISO/IEC 14496-3, AudioSpecificConfig
4418 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4419 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4420 ]));
4421 };
4422
4423 ftyp = function ftyp() {
4424 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4425 };
4426
4427 hdlr = function hdlr(type) {
4428 return box(types.hdlr, HDLR_TYPES[type]);
4429 };
4430
4431 mdat = function mdat(data) {
4432 return box(types.mdat, data);
4433 };
4434
4435 mdhd = function mdhd(track) {
4436 var result = new Uint8Array([0x00, // version 0
4437 0x00, 0x00, 0x00, // flags
4438 0x00, 0x00, 0x00, 0x02, // creation_time
4439 0x00, 0x00, 0x00, 0x03, // modification_time
4440 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4441 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4442 0x55, 0xc4, // 'und' language (undetermined)
4443 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
4444 // defined. The sample rate can be parsed out of an ADTS header, for
4445 // instance.
4446
4447 if (track.samplerate) {
4448 result[12] = track.samplerate >>> 24 & 0xFF;
4449 result[13] = track.samplerate >>> 16 & 0xFF;
4450 result[14] = track.samplerate >>> 8 & 0xFF;
4451 result[15] = track.samplerate & 0xFF;
4452 }
4453
4454 return box(types.mdhd, result);
4455 };
4456
4457 mdia = function mdia(track) {
4458 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4459 };
4460
4461 mfhd = function mfhd(sequenceNumber) {
4462 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4463 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4464 ]));
4465 };
4466
4467 minf = function minf(track) {
4468 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4469 };
4470
4471 moof = function moof(sequenceNumber, tracks) {
4472 var trackFragments = [],
4473 i = tracks.length; // build traf boxes for each track fragment
4474
4475 while (i--) {
4476 trackFragments[i] = traf(tracks[i]);
4477 }
4478
4479 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4480 };
4481 /**
4482 * Returns a movie box.
4483 * @param tracks {array} the tracks associated with this movie
4484 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4485 */
4486
4487
4488 moov = function moov(tracks) {
4489 var i = tracks.length,
4490 boxes = [];
4491
4492 while (i--) {
4493 boxes[i] = trak(tracks[i]);
4494 }
4495
4496 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4497 };
4498
4499 mvex = function mvex(tracks) {
4500 var i = tracks.length,
4501 boxes = [];
4502
4503 while (i--) {
4504 boxes[i] = trex(tracks[i]);
4505 }
4506
4507 return box.apply(null, [types.mvex].concat(boxes));
4508 };
4509
4510 mvhd = function mvhd(duration) {
4511 var bytes = new Uint8Array([0x00, // version 0
4512 0x00, 0x00, 0x00, // flags
4513 0x00, 0x00, 0x00, 0x01, // creation_time
4514 0x00, 0x00, 0x00, 0x02, // modification_time
4515 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4516 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
4517 0x00, 0x01, 0x00, 0x00, // 1.0 rate
4518 0x01, 0x00, // 1.0 volume
4519 0x00, 0x00, // reserved
4520 0x00, 0x00, 0x00, 0x00, // reserved
4521 0x00, 0x00, 0x00, 0x00, // reserved
4522 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4523 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4524 0xff, 0xff, 0xff, 0xff // next_track_ID
4525 ]);
4526 return box(types.mvhd, bytes);
4527 };
4528
4529 sdtp = function sdtp(track) {
4530 var samples = track.samples || [],
4531 bytes = new Uint8Array(4 + samples.length),
4532 flags,
4533 i; // leave the full box header (4 bytes) all zero
4534 // write the sample table
4535
4536 for (i = 0; i < samples.length; i++) {
4537 flags = samples[i].flags;
4538 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
4539 }
4540
4541 return box(types.sdtp, bytes);
4542 };
4543
4544 stbl = function stbl(track) {
4545 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
4546 };
4547
4548 (function () {
4549 var videoSample, audioSample;
4550
4551 stsd = function stsd(track) {
4552 return box(types.stsd, new Uint8Array([0x00, // version 0
4553 0x00, 0x00, 0x00, // flags
4554 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
4555 };
4556
4557 videoSample = function videoSample(track) {
4558 var sps = track.sps || [],
4559 pps = track.pps || [],
4560 sequenceParameterSets = [],
4561 pictureParameterSets = [],
4562 i,
4563 avc1Box; // assemble the SPSs
4564
4565 for (i = 0; i < sps.length; i++) {
4566 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
4567 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
4568
4569 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
4570 } // assemble the PPSs
4571
4572
4573 for (i = 0; i < pps.length; i++) {
4574 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
4575 pictureParameterSets.push(pps[i].byteLength & 0xFF);
4576 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
4577 }
4578
4579 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4580 0x00, 0x01, // data_reference_index
4581 0x00, 0x00, // pre_defined
4582 0x00, 0x00, // reserved
4583 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4584 (track.width & 0xff00) >> 8, track.width & 0xff, // width
4585 (track.height & 0xff00) >> 8, track.height & 0xff, // height
4586 0x00, 0x48, 0x00, 0x00, // horizresolution
4587 0x00, 0x48, 0x00, 0x00, // vertresolution
4588 0x00, 0x00, 0x00, 0x00, // reserved
4589 0x00, 0x01, // frame_count
4590 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
4591 0x00, 0x18, // depth = 24
4592 0x11, 0x11 // pre_defined = -1
4593 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
4594 track.profileIdc, // AVCProfileIndication
4595 track.profileCompatibility, // profile_compatibility
4596 track.levelIdc, // AVCLevelIndication
4597 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
4598 ].concat([sps.length], // numOfSequenceParameterSets
4599 sequenceParameterSets, // "SPS"
4600 [pps.length], // numOfPictureParameterSets
4601 pictureParameterSets // "PPS"
4602 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
4603 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
4604 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
4605 ]))];
4606
4607 if (track.sarRatio) {
4608 var hSpacing = track.sarRatio[0],
4609 vSpacing = track.sarRatio[1];
4610 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
4611 }
4612
4613 return box.apply(null, avc1Box);
4614 };
4615
4616 audioSample = function audioSample(track) {
4617 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
4618 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4619 0x00, 0x01, // data_reference_index
4620 // AudioSampleEntry, ISO/IEC 14496-12
4621 0x00, 0x00, 0x00, 0x00, // reserved
4622 0x00, 0x00, 0x00, 0x00, // reserved
4623 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
4624 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
4625 0x00, 0x00, // pre_defined
4626 0x00, 0x00, // reserved
4627 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
4628 // MP4AudioSampleEntry, ISO/IEC 14496-14
4629 ]), esds(track));
4630 };
4631 })();
4632
4633 tkhd = function tkhd(track) {
4634 var result = new Uint8Array([0x00, // version 0
4635 0x00, 0x00, 0x07, // flags
4636 0x00, 0x00, 0x00, 0x00, // creation_time
4637 0x00, 0x00, 0x00, 0x00, // modification_time
4638 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4639 0x00, 0x00, 0x00, 0x00, // reserved
4640 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
4641 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4642 0x00, 0x00, // layer
4643 0x00, 0x00, // alternate_group
4644 0x01, 0x00, // non-audio track volume
4645 0x00, 0x00, // reserved
4646 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4647 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
4648 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
4649 ]);
4650 return box(types.tkhd, result);
4651 };
4652 /**
4653 * Generate a track fragment (traf) box. A traf box collects metadata
4654 * about tracks in a movie fragment (moof) box.
4655 */
4656
4657
4658 traf = function traf(track) {
4659 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
4660 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
4661 0x00, 0x00, 0x3a, // flags
4662 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4663 0x00, 0x00, 0x00, 0x01, // sample_description_index
4664 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4665 0x00, 0x00, 0x00, 0x00, // default_sample_size
4666 0x00, 0x00, 0x00, 0x00 // default_sample_flags
4667 ]));
4668 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
4669 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
4670 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
4671 0x00, 0x00, 0x00, // flags
4672 // baseMediaDecodeTime
4673 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
4674 // the containing moof to the first payload byte of the associated
4675 // mdat
4676
4677 dataOffset = 32 + // tfhd
4678 20 + // tfdt
4679 8 + // traf header
4680 16 + // mfhd
4681 8 + // moof header
4682 8; // mdat header
4683 // audio tracks require less metadata
4684
4685 if (track.type === 'audio') {
4686 trackFragmentRun = trun$1(track, dataOffset);
4687 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
4688 } // video tracks should contain an independent and disposable samples
4689 // box (sdtp)
4690 // generate one and adjust offsets to match
4691
4692
4693 sampleDependencyTable = sdtp(track);
4694 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
4695 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
4696 };
4697 /**
4698 * Generate a track box.
4699 * @param track {object} a track definition
4700 * @return {Uint8Array} the track box
4701 */
4702
4703
4704 trak = function trak(track) {
4705 track.duration = track.duration || 0xffffffff;
4706 return box(types.trak, tkhd(track), mdia(track));
4707 };
4708
4709 trex = function trex(track) {
4710 var result = new Uint8Array([0x00, // version 0
4711 0x00, 0x00, 0x00, // flags
4712 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4713 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
4714 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4715 0x00, 0x00, 0x00, 0x00, // default_sample_size
4716 0x00, 0x01, 0x00, 0x01 // default_sample_flags
4717 ]); // the last two bytes of default_sample_flags is the sample
4718 // degradation priority, a hint about the importance of this sample
4719 // relative to others. Lower the degradation priority for all sample
4720 // types other than video.
4721
4722 if (track.type !== 'video') {
4723 result[result.length - 1] = 0x00;
4724 }
4725
4726 return box(types.trex, result);
4727 };
4728
4729 (function () {
4730 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
4731 // duration is present for the first sample, it will be present for
4732 // all subsequent samples.
4733 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
4734
4735 trunHeader = function trunHeader(samples, offset) {
4736 var durationPresent = 0,
4737 sizePresent = 0,
4738 flagsPresent = 0,
4739 compositionTimeOffset = 0; // trun flag constants
4740
4741 if (samples.length) {
4742 if (samples[0].duration !== undefined) {
4743 durationPresent = 0x1;
4744 }
4745
4746 if (samples[0].size !== undefined) {
4747 sizePresent = 0x2;
4748 }
4749
4750 if (samples[0].flags !== undefined) {
4751 flagsPresent = 0x4;
4752 }
4753
4754 if (samples[0].compositionTimeOffset !== undefined) {
4755 compositionTimeOffset = 0x8;
4756 }
4757 }
4758
4759 return [0x00, // version 0
4760 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4761 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4762 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4763 ];
4764 };
4765
4766 videoTrun = function videoTrun(track, offset) {
4767 var bytesOffest, bytes, header, samples, sample, i;
4768 samples = track.samples || [];
4769 offset += 8 + 12 + 16 * samples.length;
4770 header = trunHeader(samples, offset);
4771 bytes = new Uint8Array(header.length + samples.length * 16);
4772 bytes.set(header);
4773 bytesOffest = header.length;
4774
4775 for (i = 0; i < samples.length; i++) {
4776 sample = samples[i];
4777 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4778 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4779 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4780 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4781
4782 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4783 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4784 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4785 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4786
4787 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4788 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4789 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4790 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4791
4792 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4793 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4794 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4795 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4796 }
4797
4798 return box(types.trun, bytes);
4799 };
4800
4801 audioTrun = function audioTrun(track, offset) {
4802 var bytes, bytesOffest, header, samples, sample, i;
4803 samples = track.samples || [];
4804 offset += 8 + 12 + 8 * samples.length;
4805 header = trunHeader(samples, offset);
4806 bytes = new Uint8Array(header.length + samples.length * 8);
4807 bytes.set(header);
4808 bytesOffest = header.length;
4809
4810 for (i = 0; i < samples.length; i++) {
4811 sample = samples[i];
4812 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4813 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4814 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4815 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4816
4817 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4818 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4819 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4820 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4821 }
4822
4823 return box(types.trun, bytes);
4824 };
4825
4826 trun$1 = function trun(track, offset) {
4827 if (track.type === 'audio') {
4828 return audioTrun(track, offset);
4829 }
4830
4831 return videoTrun(track, offset);
4832 };
4833 })();
4834
4835 var mp4Generator = {
4836 ftyp: ftyp,
4837 mdat: mdat,
4838 moof: moof,
4839 moov: moov,
4840 initSegment: function initSegment(tracks) {
4841 var fileType = ftyp(),
4842 movie = moov(tracks),
4843 result;
4844 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4845 result.set(fileType);
4846 result.set(movie, fileType.byteLength);
4847 return result;
4848 }
4849 };
4850 /**
4851 * mux.js
4852 *
4853 * Copyright (c) Brightcove
4854 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4855 */
4856 // Convert an array of nal units into an array of frames with each frame being
4857 // composed of the nal units that make up that frame
4858 // Also keep track of cummulative data about the frame from the nal units such
4859 // as the frame duration, starting pts, etc.
4860
4861 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4862 var i,
4863 currentNal,
4864 currentFrame = [],
4865 frames = []; // TODO added for LHLS, make sure this is OK
4866
4867 frames.byteLength = 0;
4868 frames.nalCount = 0;
4869 frames.duration = 0;
4870 currentFrame.byteLength = 0;
4871
4872 for (i = 0; i < nalUnits.length; i++) {
4873 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4874
4875 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4876 // Since the very first nal unit is expected to be an AUD
4877 // only push to the frames array when currentFrame is not empty
4878 if (currentFrame.length) {
4879 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4880
4881 frames.byteLength += currentFrame.byteLength;
4882 frames.nalCount += currentFrame.length;
4883 frames.duration += currentFrame.duration;
4884 frames.push(currentFrame);
4885 }
4886
4887 currentFrame = [currentNal];
4888 currentFrame.byteLength = currentNal.data.byteLength;
4889 currentFrame.pts = currentNal.pts;
4890 currentFrame.dts = currentNal.dts;
4891 } else {
4892 // Specifically flag key frames for ease of use later
4893 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4894 currentFrame.keyFrame = true;
4895 }
4896
4897 currentFrame.duration = currentNal.dts - currentFrame.dts;
4898 currentFrame.byteLength += currentNal.data.byteLength;
4899 currentFrame.push(currentNal);
4900 }
4901 } // For the last frame, use the duration of the previous frame if we
4902 // have nothing better to go on
4903
4904
4905 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4906 currentFrame.duration = frames[frames.length - 1].duration;
4907 } // Push the final frame
4908 // TODO added for LHLS, make sure this is OK
4909
4910
4911 frames.byteLength += currentFrame.byteLength;
4912 frames.nalCount += currentFrame.length;
4913 frames.duration += currentFrame.duration;
4914 frames.push(currentFrame);
4915 return frames;
4916 }; // Convert an array of frames into an array of Gop with each Gop being composed
4917 // of the frames that make up that Gop
4918 // Also keep track of cummulative data about the Gop from the frames such as the
4919 // Gop duration, starting pts, etc.
4920
4921
4922 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4923 var i,
4924 currentFrame,
4925 currentGop = [],
4926 gops = []; // We must pre-set some of the values on the Gop since we
4927 // keep running totals of these values
4928
4929 currentGop.byteLength = 0;
4930 currentGop.nalCount = 0;
4931 currentGop.duration = 0;
4932 currentGop.pts = frames[0].pts;
4933 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4934
4935 gops.byteLength = 0;
4936 gops.nalCount = 0;
4937 gops.duration = 0;
4938 gops.pts = frames[0].pts;
4939 gops.dts = frames[0].dts;
4940
4941 for (i = 0; i < frames.length; i++) {
4942 currentFrame = frames[i];
4943
4944 if (currentFrame.keyFrame) {
4945 // Since the very first frame is expected to be an keyframe
4946 // only push to the gops array when currentGop is not empty
4947 if (currentGop.length) {
4948 gops.push(currentGop);
4949 gops.byteLength += currentGop.byteLength;
4950 gops.nalCount += currentGop.nalCount;
4951 gops.duration += currentGop.duration;
4952 }
4953
4954 currentGop = [currentFrame];
4955 currentGop.nalCount = currentFrame.length;
4956 currentGop.byteLength = currentFrame.byteLength;
4957 currentGop.pts = currentFrame.pts;
4958 currentGop.dts = currentFrame.dts;
4959 currentGop.duration = currentFrame.duration;
4960 } else {
4961 currentGop.duration += currentFrame.duration;
4962 currentGop.nalCount += currentFrame.length;
4963 currentGop.byteLength += currentFrame.byteLength;
4964 currentGop.push(currentFrame);
4965 }
4966 }
4967
4968 if (gops.length && currentGop.duration <= 0) {
4969 currentGop.duration = gops[gops.length - 1].duration;
4970 }
4971
4972 gops.byteLength += currentGop.byteLength;
4973 gops.nalCount += currentGop.nalCount;
4974 gops.duration += currentGop.duration; // push the final Gop
4975
4976 gops.push(currentGop);
4977 return gops;
4978 };
4979 /*
4980 * Search for the first keyframe in the GOPs and throw away all frames
4981 * until that keyframe. Then extend the duration of the pulled keyframe
4982 * and pull the PTS and DTS of the keyframe so that it covers the time
4983 * range of the frames that were disposed.
4984 *
4985 * @param {Array} gops video GOPs
4986 * @returns {Array} modified video GOPs
4987 */
4988
4989
4990 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
4991 var currentGop;
4992
4993 if (!gops[0][0].keyFrame && gops.length > 1) {
4994 // Remove the first GOP
4995 currentGop = gops.shift();
4996 gops.byteLength -= currentGop.byteLength;
4997 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
4998 // first gop to cover the time period of the
4999 // frames we just removed
5000
5001 gops[0][0].dts = currentGop.dts;
5002 gops[0][0].pts = currentGop.pts;
5003 gops[0][0].duration += currentGop.duration;
5004 }
5005
5006 return gops;
5007 };
5008 /**
5009 * Default sample object
5010 * see ISO/IEC 14496-12:2012, section 8.6.4.3
5011 */
5012
5013
5014 var createDefaultSample = function createDefaultSample() {
5015 return {
5016 size: 0,
5017 flags: {
5018 isLeading: 0,
5019 dependsOn: 1,
5020 isDependedOn: 0,
5021 hasRedundancy: 0,
5022 degradationPriority: 0,
5023 isNonSyncSample: 1
5024 }
5025 };
5026 };
5027 /*
5028 * Collates information from a video frame into an object for eventual
5029 * entry into an MP4 sample table.
5030 *
5031 * @param {Object} frame the video frame
5032 * @param {Number} dataOffset the byte offset to position the sample
5033 * @return {Object} object containing sample table info for a frame
5034 */
5035
5036
5037 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
5038 var sample = createDefaultSample();
5039 sample.dataOffset = dataOffset;
5040 sample.compositionTimeOffset = frame.pts - frame.dts;
5041 sample.duration = frame.duration;
5042 sample.size = 4 * frame.length; // Space for nal unit size
5043
5044 sample.size += frame.byteLength;
5045
5046 if (frame.keyFrame) {
5047 sample.flags.dependsOn = 2;
5048 sample.flags.isNonSyncSample = 0;
5049 }
5050
5051 return sample;
5052 }; // generate the track's sample table from an array of gops
5053
5054
5055 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
5056 var h,
5057 i,
5058 sample,
5059 currentGop,
5060 currentFrame,
5061 dataOffset = baseDataOffset || 0,
5062 samples = [];
5063
5064 for (h = 0; h < gops.length; h++) {
5065 currentGop = gops[h];
5066
5067 for (i = 0; i < currentGop.length; i++) {
5068 currentFrame = currentGop[i];
5069 sample = sampleForFrame(currentFrame, dataOffset);
5070 dataOffset += sample.size;
5071 samples.push(sample);
5072 }
5073 }
5074
5075 return samples;
5076 }; // generate the track's raw mdat data from an array of gops
5077
5078
5079 var concatenateNalData = function concatenateNalData(gops) {
5080 var h,
5081 i,
5082 j,
5083 currentGop,
5084 currentFrame,
5085 currentNal,
5086 dataOffset = 0,
5087 nalsByteLength = gops.byteLength,
5088 numberOfNals = gops.nalCount,
5089 totalByteLength = nalsByteLength + 4 * numberOfNals,
5090 data = new Uint8Array(totalByteLength),
5091 view = new DataView(data.buffer); // For each Gop..
5092
5093 for (h = 0; h < gops.length; h++) {
5094 currentGop = gops[h]; // For each Frame..
5095
5096 for (i = 0; i < currentGop.length; i++) {
5097 currentFrame = currentGop[i]; // For each NAL..
5098
5099 for (j = 0; j < currentFrame.length; j++) {
5100 currentNal = currentFrame[j];
5101 view.setUint32(dataOffset, currentNal.data.byteLength);
5102 dataOffset += 4;
5103 data.set(currentNal.data, dataOffset);
5104 dataOffset += currentNal.data.byteLength;
5105 }
5106 }
5107 }
5108
5109 return data;
5110 }; // generate the track's sample table from a frame
5111
5112
5113 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5114 var sample,
5115 dataOffset = baseDataOffset || 0,
5116 samples = [];
5117 sample = sampleForFrame(frame, dataOffset);
5118 samples.push(sample);
5119 return samples;
5120 }; // generate the track's raw mdat data from a frame
5121
5122
5123 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5124 var i,
5125 currentNal,
5126 dataOffset = 0,
5127 nalsByteLength = frame.byteLength,
5128 numberOfNals = frame.length,
5129 totalByteLength = nalsByteLength + 4 * numberOfNals,
5130 data = new Uint8Array(totalByteLength),
5131 view = new DataView(data.buffer); // For each NAL..
5132
5133 for (i = 0; i < frame.length; i++) {
5134 currentNal = frame[i];
5135 view.setUint32(dataOffset, currentNal.data.byteLength);
5136 dataOffset += 4;
5137 data.set(currentNal.data, dataOffset);
5138 dataOffset += currentNal.data.byteLength;
5139 }
5140
5141 return data;
5142 };
5143
5144 var frameUtils = {
5145 groupNalsIntoFrames: groupNalsIntoFrames,
5146 groupFramesIntoGops: groupFramesIntoGops,
5147 extendFirstKeyFrame: extendFirstKeyFrame,
5148 generateSampleTable: generateSampleTable$1,
5149 concatenateNalData: concatenateNalData,
5150 generateSampleTableForFrame: generateSampleTableForFrame,
5151 concatenateNalDataForFrame: concatenateNalDataForFrame
5152 };
5153 /**
5154 * mux.js
5155 *
5156 * Copyright (c) Brightcove
5157 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5158 */
5159
5160 var highPrefix = [33, 16, 5, 32, 164, 27];
5161 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5162
5163 var zeroFill = function zeroFill(count) {
5164 var a = [];
5165
5166 while (count--) {
5167 a.push(0);
5168 }
5169
5170 return a;
5171 };
5172
5173 var makeTable = function makeTable(metaTable) {
5174 return Object.keys(metaTable).reduce(function (obj, key) {
5175 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5176 return arr.concat(part);
5177 }, []));
5178 return obj;
5179 }, {});
5180 };
5181
5182 var silence;
5183
5184 var silence_1 = function silence_1() {
5185 if (!silence) {
5186 // Frames-of-silence to use for filling in missing AAC frames
5187 var coneOfSilence = {
5188 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5189 88200: [highPrefix, [231], zeroFill(170), [56]],
5190 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5191 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5192 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5193 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5194 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5195 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5196 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5197 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5198 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5199 };
5200 silence = makeTable(coneOfSilence);
5201 }
5202
5203 return silence;
5204 };
5205 /**
5206 * mux.js
5207 *
5208 * Copyright (c) Brightcove
5209 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5210 */
5211
5212
5213 var ONE_SECOND_IN_TS$4 = 90000,
5214 // 90kHz clock
5215 secondsToVideoTs,
5216 secondsToAudioTs,
5217 videoTsToSeconds,
5218 audioTsToSeconds,
5219 audioTsToVideoTs,
5220 videoTsToAudioTs,
5221 metadataTsToSeconds;
5222
5223 secondsToVideoTs = function secondsToVideoTs(seconds) {
5224 return seconds * ONE_SECOND_IN_TS$4;
5225 };
5226
5227 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5228 return seconds * sampleRate;
5229 };
5230
5231 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5232 return timestamp / ONE_SECOND_IN_TS$4;
5233 };
5234
5235 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5236 return timestamp / sampleRate;
5237 };
5238
5239 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5240 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5241 };
5242
5243 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5244 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5245 };
5246 /**
5247 * Adjust ID3 tag or caption timing information by the timeline pts values
5248 * (if keepOriginalTimestamps is false) and convert to seconds
5249 */
5250
5251
5252 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5253 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5254 };
5255
5256 var clock = {
5257 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
5258 secondsToVideoTs: secondsToVideoTs,
5259 secondsToAudioTs: secondsToAudioTs,
5260 videoTsToSeconds: videoTsToSeconds,
5261 audioTsToSeconds: audioTsToSeconds,
5262 audioTsToVideoTs: audioTsToVideoTs,
5263 videoTsToAudioTs: videoTsToAudioTs,
5264 metadataTsToSeconds: metadataTsToSeconds
5265 };
5266 /**
5267 * mux.js
5268 *
5269 * Copyright (c) Brightcove
5270 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5271 */
5272
5273 /**
5274 * Sum the `byteLength` properties of the data in each AAC frame
5275 */
5276
5277 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5278 var i,
5279 currentObj,
5280 sum = 0; // sum the byteLength's all each nal unit in the frame
5281
5282 for (i = 0; i < array.length; i++) {
5283 currentObj = array[i];
5284 sum += currentObj.data.byteLength;
5285 }
5286
5287 return sum;
5288 }; // Possibly pad (prefix) the audio track with silence if appending this track
5289 // would lead to the introduction of a gap in the audio buffer
5290
5291
5292 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5293 var baseMediaDecodeTimeTs,
5294 frameDuration = 0,
5295 audioGapDuration = 0,
5296 audioFillFrameCount = 0,
5297 audioFillDuration = 0,
5298 silentFrame,
5299 i,
5300 firstFrame;
5301
5302 if (!frames.length) {
5303 return;
5304 }
5305
5306 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
5307
5308 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5309
5310 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5311 // insert the shortest possible amount (audio gap or audio to video gap)
5312 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
5313
5314 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5315 audioFillDuration = audioFillFrameCount * frameDuration;
5316 } // don't attempt to fill gaps smaller than a single frame or larger
5317 // than a half second
5318
5319
5320 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5321 return;
5322 }
5323
5324 silentFrame = silence_1()[track.samplerate];
5325
5326 if (!silentFrame) {
5327 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5328 // from the content instead
5329 silentFrame = frames[0].data;
5330 }
5331
5332 for (i = 0; i < audioFillFrameCount; i++) {
5333 firstFrame = frames[0];
5334 frames.splice(0, 0, {
5335 data: silentFrame,
5336 dts: firstFrame.dts - frameDuration,
5337 pts: firstFrame.pts - frameDuration
5338 });
5339 }
5340
5341 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5342 return audioFillDuration;
5343 }; // If the audio segment extends before the earliest allowed dts
5344 // value, remove AAC frames until starts at or after the earliest
5345 // allowed DTS so that we don't end up with a negative baseMedia-
5346 // DecodeTime for the audio track
5347
5348
5349 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5350 if (track.minSegmentDts >= earliestAllowedDts) {
5351 return adtsFrames;
5352 } // We will need to recalculate the earliest segment Dts
5353
5354
5355 track.minSegmentDts = Infinity;
5356 return adtsFrames.filter(function (currentFrame) {
5357 // If this is an allowed frame, keep it and record it's Dts
5358 if (currentFrame.dts >= earliestAllowedDts) {
5359 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5360 track.minSegmentPts = track.minSegmentDts;
5361 return true;
5362 } // Otherwise, discard it
5363
5364
5365 return false;
5366 });
5367 }; // generate the track's raw mdat data from an array of frames
5368
5369
5370 var generateSampleTable = function generateSampleTable(frames) {
5371 var i,
5372 currentFrame,
5373 samples = [];
5374
5375 for (i = 0; i < frames.length; i++) {
5376 currentFrame = frames[i];
5377 samples.push({
5378 size: currentFrame.data.byteLength,
5379 duration: 1024 // For AAC audio, all samples contain 1024 samples
5380
5381 });
5382 }
5383
5384 return samples;
5385 }; // generate the track's sample table from an array of frames
5386
5387
5388 var concatenateFrameData = function concatenateFrameData(frames) {
5389 var i,
5390 currentFrame,
5391 dataOffset = 0,
5392 data = new Uint8Array(sumFrameByteLengths(frames));
5393
5394 for (i = 0; i < frames.length; i++) {
5395 currentFrame = frames[i];
5396 data.set(currentFrame.data, dataOffset);
5397 dataOffset += currentFrame.data.byteLength;
5398 }
5399
5400 return data;
5401 };
5402
5403 var audioFrameUtils = {
5404 prefixWithSilence: prefixWithSilence,
5405 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5406 generateSampleTable: generateSampleTable,
5407 concatenateFrameData: concatenateFrameData
5408 };
5409 /**
5410 * mux.js
5411 *
5412 * Copyright (c) Brightcove
5413 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5414 */
5415
5416 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
5417 /**
5418 * Store information about the start and end of the track and the
5419 * duration for each frame/sample we process in order to calculate
5420 * the baseMediaDecodeTime
5421 */
5422
5423 var collectDtsInfo = function collectDtsInfo(track, data) {
5424 if (typeof data.pts === 'number') {
5425 if (track.timelineStartInfo.pts === undefined) {
5426 track.timelineStartInfo.pts = data.pts;
5427 }
5428
5429 if (track.minSegmentPts === undefined) {
5430 track.minSegmentPts = data.pts;
5431 } else {
5432 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5433 }
5434
5435 if (track.maxSegmentPts === undefined) {
5436 track.maxSegmentPts = data.pts;
5437 } else {
5438 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5439 }
5440 }
5441
5442 if (typeof data.dts === 'number') {
5443 if (track.timelineStartInfo.dts === undefined) {
5444 track.timelineStartInfo.dts = data.dts;
5445 }
5446
5447 if (track.minSegmentDts === undefined) {
5448 track.minSegmentDts = data.dts;
5449 } else {
5450 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5451 }
5452
5453 if (track.maxSegmentDts === undefined) {
5454 track.maxSegmentDts = data.dts;
5455 } else {
5456 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5457 }
5458 }
5459 };
5460 /**
5461 * Clear values used to calculate the baseMediaDecodeTime between
5462 * tracks
5463 */
5464
5465
5466 var clearDtsInfo = function clearDtsInfo(track) {
5467 delete track.minSegmentDts;
5468 delete track.maxSegmentDts;
5469 delete track.minSegmentPts;
5470 delete track.maxSegmentPts;
5471 };
5472 /**
5473 * Calculate the track's baseMediaDecodeTime based on the earliest
5474 * DTS the transmuxer has ever seen and the minimum DTS for the
5475 * current track
5476 * @param track {object} track metadata configuration
5477 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5478 * in the source; false to adjust the first segment to start at 0.
5479 */
5480
5481
5482 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5483 var baseMediaDecodeTime,
5484 scale,
5485 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
5486
5487 if (!keepOriginalTimestamps) {
5488 minSegmentDts -= track.timelineStartInfo.dts;
5489 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5490 // we want the start of the first segment to be placed
5491
5492
5493 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
5494
5495 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
5496
5497 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5498
5499 if (track.type === 'audio') {
5500 // Audio has a different clock equal to the sampling_rate so we need to
5501 // scale the PTS values into the clock rate of the track
5502 scale = track.samplerate / ONE_SECOND_IN_TS$3;
5503 baseMediaDecodeTime *= scale;
5504 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5505 }
5506
5507 return baseMediaDecodeTime;
5508 };
5509
5510 var trackDecodeInfo = {
5511 clearDtsInfo: clearDtsInfo,
5512 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5513 collectDtsInfo: collectDtsInfo
5514 };
5515 /**
5516 * mux.js
5517 *
5518 * Copyright (c) Brightcove
5519 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5520 *
5521 * Reads in-band caption information from a video elementary
5522 * stream. Captions must follow the CEA-708 standard for injection
5523 * into an MPEG-2 transport streams.
5524 * @see https://en.wikipedia.org/wiki/CEA-708
5525 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
5526 */
5527 // payload type field to indicate how they are to be
5528 // interpreted. CEAS-708 caption content is always transmitted with
5529 // payload type 0x04.
5530
5531 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
5532 RBSP_TRAILING_BITS = 128;
5533 /**
5534 * Parse a supplemental enhancement information (SEI) NAL unit.
5535 * Stops parsing once a message of type ITU T T35 has been found.
5536 *
5537 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
5538 * @return {object} the parsed SEI payload
5539 * @see Rec. ITU-T H.264, 7.3.2.3.1
5540 */
5541
5542 var parseSei = function parseSei(bytes) {
5543 var i = 0,
5544 result = {
5545 payloadType: -1,
5546 payloadSize: 0
5547 },
5548 payloadType = 0,
5549 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
5550
5551 while (i < bytes.byteLength) {
5552 // stop once we have hit the end of the sei_rbsp
5553 if (bytes[i] === RBSP_TRAILING_BITS) {
5554 break;
5555 } // Parse payload type
5556
5557
5558 while (bytes[i] === 0xFF) {
5559 payloadType += 255;
5560 i++;
5561 }
5562
5563 payloadType += bytes[i++]; // Parse payload size
5564
5565 while (bytes[i] === 0xFF) {
5566 payloadSize += 255;
5567 i++;
5568 }
5569
5570 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
5571 // there can only ever be one caption message in a frame's sei
5572
5573 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
5574 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
5575
5576 if (userIdentifier === 'GA94') {
5577 result.payloadType = payloadType;
5578 result.payloadSize = payloadSize;
5579 result.payload = bytes.subarray(i, i + payloadSize);
5580 break;
5581 } else {
5582 result.payload = void 0;
5583 }
5584 } // skip the payload and parse the next message
5585
5586
5587 i += payloadSize;
5588 payloadType = 0;
5589 payloadSize = 0;
5590 }
5591
5592 return result;
5593 }; // see ANSI/SCTE 128-1 (2013), section 8.1
5594
5595
5596 var parseUserData = function parseUserData(sei) {
5597 // itu_t_t35_contry_code must be 181 (United States) for
5598 // captions
5599 if (sei.payload[0] !== 181) {
5600 return null;
5601 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
5602
5603
5604 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
5605 return null;
5606 } // the user_identifier should be "GA94" to indicate ATSC1 data
5607
5608
5609 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
5610 return null;
5611 } // finally, user_data_type_code should be 0x03 for caption data
5612
5613
5614 if (sei.payload[7] !== 0x03) {
5615 return null;
5616 } // return the user_data_type_structure and strip the trailing
5617 // marker bits
5618
5619
5620 return sei.payload.subarray(8, sei.payload.length - 1);
5621 }; // see CEA-708-D, section 4.4
5622
5623
5624 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
5625 var results = [],
5626 i,
5627 count,
5628 offset,
5629 data; // if this is just filler, return immediately
5630
5631 if (!(userData[0] & 0x40)) {
5632 return results;
5633 } // parse out the cc_data_1 and cc_data_2 fields
5634
5635
5636 count = userData[0] & 0x1f;
5637
5638 for (i = 0; i < count; i++) {
5639 offset = i * 3;
5640 data = {
5641 type: userData[offset + 2] & 0x03,
5642 pts: pts
5643 }; // capture cc data when cc_valid is 1
5644
5645 if (userData[offset + 2] & 0x04) {
5646 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
5647 results.push(data);
5648 }
5649 }
5650
5651 return results;
5652 };
5653
5654 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
5655 var length = data.byteLength,
5656 emulationPreventionBytesPositions = [],
5657 i = 1,
5658 newLength,
5659 newData; // Find all `Emulation Prevention Bytes`
5660
5661 while (i < length - 2) {
5662 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
5663 emulationPreventionBytesPositions.push(i + 2);
5664 i += 2;
5665 } else {
5666 i++;
5667 }
5668 } // If no Emulation Prevention Bytes were found just return the original
5669 // array
5670
5671
5672 if (emulationPreventionBytesPositions.length === 0) {
5673 return data;
5674 } // Create a new array to hold the NAL unit data
5675
5676
5677 newLength = length - emulationPreventionBytesPositions.length;
5678 newData = new Uint8Array(newLength);
5679 var sourceIndex = 0;
5680
5681 for (i = 0; i < newLength; sourceIndex++, i++) {
5682 if (sourceIndex === emulationPreventionBytesPositions[0]) {
5683 // Skip this byte
5684 sourceIndex++; // Remove this position index
5685
5686 emulationPreventionBytesPositions.shift();
5687 }
5688
5689 newData[i] = data[sourceIndex];
5690 }
5691
5692 return newData;
5693 }; // exports
5694
5695
5696 var captionPacketParser = {
5697 parseSei: parseSei,
5698 parseUserData: parseUserData,
5699 parseCaptionPackets: parseCaptionPackets,
5700 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
5701 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
5702 }; // Link To Transport
5703 // -----------------
5704
5705 var CaptionStream$1 = function CaptionStream(options) {
5706 options = options || {};
5707 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
5708
5709 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
5710 this.captionPackets_ = [];
5711 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
5712 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
5713 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
5714 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
5715 ];
5716
5717 if (this.parse708captions_) {
5718 this.cc708Stream_ = new Cea708Stream({
5719 captionServices: options.captionServices
5720 }); // eslint-disable-line no-use-before-define
5721 }
5722
5723 this.reset(); // forward data and done events from CCs to this CaptionStream
5724
5725 this.ccStreams_.forEach(function (cc) {
5726 cc.on('data', this.trigger.bind(this, 'data'));
5727 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
5728 cc.on('done', this.trigger.bind(this, 'done'));
5729 }, this);
5730
5731 if (this.parse708captions_) {
5732 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
5733 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
5734 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
5735 }
5736 };
5737
5738 CaptionStream$1.prototype = new stream();
5739
5740 CaptionStream$1.prototype.push = function (event) {
5741 var sei, userData, newCaptionPackets; // only examine SEI NALs
5742
5743 if (event.nalUnitType !== 'sei_rbsp') {
5744 return;
5745 } // parse the sei
5746
5747
5748 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5749
5750 if (!sei.payload) {
5751 return;
5752 } // ignore everything but user_data_registered_itu_t_t35
5753
5754
5755 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5756 return;
5757 } // parse out the user data payload
5758
5759
5760 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5761
5762 if (!userData) {
5763 return;
5764 } // Sometimes, the same segment # will be downloaded twice. To stop the
5765 // caption data from being processed twice, we track the latest dts we've
5766 // received and ignore everything with a dts before that. However, since
5767 // data for a specific dts can be split across packets on either side of
5768 // a segment boundary, we need to make sure we *don't* ignore the packets
5769 // from the *next* segment that have dts === this.latestDts_. By constantly
5770 // tracking the number of packets received with dts === this.latestDts_, we
5771 // know how many should be ignored once we start receiving duplicates.
5772
5773
5774 if (event.dts < this.latestDts_) {
5775 // We've started getting older data, so set the flag.
5776 this.ignoreNextEqualDts_ = true;
5777 return;
5778 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5779 this.numSameDts_--;
5780
5781 if (!this.numSameDts_) {
5782 // We've received the last duplicate packet, time to start processing again
5783 this.ignoreNextEqualDts_ = false;
5784 }
5785
5786 return;
5787 } // parse out CC data packets and save them for later
5788
5789
5790 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5791 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5792
5793 if (this.latestDts_ !== event.dts) {
5794 this.numSameDts_ = 0;
5795 }
5796
5797 this.numSameDts_++;
5798 this.latestDts_ = event.dts;
5799 };
5800
5801 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
5802 this.ccStreams_.forEach(function (cc) {
5803 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5804 }, this);
5805 };
5806
5807 CaptionStream$1.prototype.flushStream = function (flushType) {
5808 // make sure we actually parsed captions before proceeding
5809 if (!this.captionPackets_.length) {
5810 this.flushCCStreams(flushType);
5811 return;
5812 } // In Chrome, the Array#sort function is not stable so add a
5813 // presortIndex that we can use to ensure we get a stable-sort
5814
5815
5816 this.captionPackets_.forEach(function (elem, idx) {
5817 elem.presortIndex = idx;
5818 }); // sort caption byte-pairs based on their PTS values
5819
5820 this.captionPackets_.sort(function (a, b) {
5821 if (a.pts === b.pts) {
5822 return a.presortIndex - b.presortIndex;
5823 }
5824
5825 return a.pts - b.pts;
5826 });
5827 this.captionPackets_.forEach(function (packet) {
5828 if (packet.type < 2) {
5829 // Dispatch packet to the right Cea608Stream
5830 this.dispatchCea608Packet(packet);
5831 } else {
5832 // Dispatch packet to the Cea708Stream
5833 this.dispatchCea708Packet(packet);
5834 }
5835 }, this);
5836 this.captionPackets_.length = 0;
5837 this.flushCCStreams(flushType);
5838 };
5839
5840 CaptionStream$1.prototype.flush = function () {
5841 return this.flushStream('flush');
5842 }; // Only called if handling partial data
5843
5844
5845 CaptionStream$1.prototype.partialFlush = function () {
5846 return this.flushStream('partialFlush');
5847 };
5848
5849 CaptionStream$1.prototype.reset = function () {
5850 this.latestDts_ = null;
5851 this.ignoreNextEqualDts_ = false;
5852 this.numSameDts_ = 0;
5853 this.activeCea608Channel_ = [null, null];
5854 this.ccStreams_.forEach(function (ccStream) {
5855 ccStream.reset();
5856 });
5857 }; // From the CEA-608 spec:
5858
5859 /*
5860 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5861 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5862 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5863 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5864 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5865 * to switch to captioning or Text.
5866 */
5867 // With that in mind, we ignore any data between an XDS control code and a
5868 // subsequent closed-captioning control code.
5869
5870
5871 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
5872 // NOTE: packet.type is the CEA608 field
5873 if (this.setsTextOrXDSActive(packet)) {
5874 this.activeCea608Channel_[packet.type] = null;
5875 } else if (this.setsChannel1Active(packet)) {
5876 this.activeCea608Channel_[packet.type] = 0;
5877 } else if (this.setsChannel2Active(packet)) {
5878 this.activeCea608Channel_[packet.type] = 1;
5879 }
5880
5881 if (this.activeCea608Channel_[packet.type] === null) {
5882 // If we haven't received anything to set the active channel, or the
5883 // packets are Text/XDS data, discard the data; we don't want jumbled
5884 // captions
5885 return;
5886 }
5887
5888 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5889 };
5890
5891 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
5892 return (packet.ccData & 0x7800) === 0x1000;
5893 };
5894
5895 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
5896 return (packet.ccData & 0x7800) === 0x1800;
5897 };
5898
5899 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
5900 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5901 };
5902
5903 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
5904 if (this.parse708captions_) {
5905 this.cc708Stream_.push(packet);
5906 }
5907 }; // ----------------------
5908 // Session to Application
5909 // ----------------------
5910 // This hash maps special and extended character codes to their
5911 // proper Unicode equivalent. The first one-byte key is just a
5912 // non-standard character code. The two-byte keys that follow are
5913 // the extended CEA708 character codes, along with the preceding
5914 // 0x10 extended character byte to distinguish these codes from
5915 // non-extended character codes. Every CEA708 character code that
5916 // is not in this object maps directly to a standard unicode
5917 // character code.
5918 // The transparent space and non-breaking transparent space are
5919 // technically not fully supported since there is no code to
5920 // make them transparent, so they have normal non-transparent
5921 // stand-ins.
5922 // The special closed caption (CC) character isn't a standard
5923 // unicode character, so a fairly similar unicode character was
5924 // chosen in it's place.
5925
5926
5927 var CHARACTER_TRANSLATION_708 = {
5928 0x7f: 0x266a,
5929 // ♪
5930 0x1020: 0x20,
5931 // Transparent Space
5932 0x1021: 0xa0,
5933 // Nob-breaking Transparent Space
5934 0x1025: 0x2026,
5935 // …
5936 0x102a: 0x0160,
5937 // Š
5938 0x102c: 0x0152,
5939 // Œ
5940 0x1030: 0x2588,
5941 // █
5942 0x1031: 0x2018,
5943 // ‘
5944 0x1032: 0x2019,
5945 // ’
5946 0x1033: 0x201c,
5947 // “
5948 0x1034: 0x201d,
5949 // ”
5950 0x1035: 0x2022,
5951 // •
5952 0x1039: 0x2122,
5953 // ™
5954 0x103a: 0x0161,
5955 // š
5956 0x103c: 0x0153,
5957 // œ
5958 0x103d: 0x2120,
5959 // ℠
5960 0x103f: 0x0178,
5961 // Ÿ
5962 0x1076: 0x215b,
5963 // ⅛
5964 0x1077: 0x215c,
5965 // ⅜
5966 0x1078: 0x215d,
5967 // ⅝
5968 0x1079: 0x215e,
5969 // ⅞
5970 0x107a: 0x23d0,
5971 // ⏐
5972 0x107b: 0x23a4,
5973 // ⎤
5974 0x107c: 0x23a3,
5975 // ⎣
5976 0x107d: 0x23af,
5977 // ⎯
5978 0x107e: 0x23a6,
5979 // ⎦
5980 0x107f: 0x23a1,
5981 // ⎡
5982 0x10a0: 0x3138 // ㄸ (CC char)
5983
5984 };
5985
5986 var get708CharFromCode = function get708CharFromCode(code) {
5987 var newCode = CHARACTER_TRANSLATION_708[code] || code;
5988
5989 if (code & 0x1000 && code === newCode) {
5990 // Invalid extended code
5991 return '';
5992 }
5993
5994 return String.fromCharCode(newCode);
5995 };
5996
5997 var within708TextBlock = function within708TextBlock(b) {
5998 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
5999 };
6000
6001 var Cea708Window = function Cea708Window(windowNum) {
6002 this.windowNum = windowNum;
6003 this.reset();
6004 };
6005
6006 Cea708Window.prototype.reset = function () {
6007 this.clearText();
6008 this.pendingNewLine = false;
6009 this.winAttr = {};
6010 this.penAttr = {};
6011 this.penLoc = {};
6012 this.penColor = {}; // These default values are arbitrary,
6013 // defineWindow will usually override them
6014
6015 this.visible = 0;
6016 this.rowLock = 0;
6017 this.columnLock = 0;
6018 this.priority = 0;
6019 this.relativePositioning = 0;
6020 this.anchorVertical = 0;
6021 this.anchorHorizontal = 0;
6022 this.anchorPoint = 0;
6023 this.rowCount = 1;
6024 this.virtualRowCount = this.rowCount + 1;
6025 this.columnCount = 41;
6026 this.windowStyle = 0;
6027 this.penStyle = 0;
6028 };
6029
6030 Cea708Window.prototype.getText = function () {
6031 return this.rows.join('\n');
6032 };
6033
6034 Cea708Window.prototype.clearText = function () {
6035 this.rows = [''];
6036 this.rowIdx = 0;
6037 };
6038
6039 Cea708Window.prototype.newLine = function (pts) {
6040 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
6041 this.beforeRowOverflow(pts);
6042 }
6043
6044 if (this.rows.length > 0) {
6045 this.rows.push('');
6046 this.rowIdx++;
6047 } // Show all virtual rows since there's no visible scrolling
6048
6049
6050 while (this.rows.length > this.virtualRowCount) {
6051 this.rows.shift();
6052 this.rowIdx--;
6053 }
6054 };
6055
6056 Cea708Window.prototype.isEmpty = function () {
6057 if (this.rows.length === 0) {
6058 return true;
6059 } else if (this.rows.length === 1) {
6060 return this.rows[0] === '';
6061 }
6062
6063 return false;
6064 };
6065
6066 Cea708Window.prototype.addText = function (text) {
6067 this.rows[this.rowIdx] += text;
6068 };
6069
6070 Cea708Window.prototype.backspace = function () {
6071 if (!this.isEmpty()) {
6072 var row = this.rows[this.rowIdx];
6073 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
6074 }
6075 };
6076
6077 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
6078 this.serviceNum = serviceNum;
6079 this.text = '';
6080 this.currentWindow = new Cea708Window(-1);
6081 this.windows = [];
6082 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
6083
6084 if (typeof encoding === 'string') {
6085 this.createTextDecoder(encoding);
6086 }
6087 };
6088 /**
6089 * Initialize service windows
6090 * Must be run before service use
6091 *
6092 * @param {Integer} pts PTS value
6093 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
6094 */
6095
6096
6097 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
6098 this.startPts = pts;
6099
6100 for (var win = 0; win < 8; win++) {
6101 this.windows[win] = new Cea708Window(win);
6102
6103 if (typeof beforeRowOverflow === 'function') {
6104 this.windows[win].beforeRowOverflow = beforeRowOverflow;
6105 }
6106 }
6107 };
6108 /**
6109 * Set current window of service to be affected by commands
6110 *
6111 * @param {Integer} windowNum Window number
6112 */
6113
6114
6115 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
6116 this.currentWindow = this.windows[windowNum];
6117 };
6118 /**
6119 * Try to create a TextDecoder if it is natively supported
6120 */
6121
6122
6123 Cea708Service.prototype.createTextDecoder = function (encoding) {
6124 if (typeof TextDecoder === 'undefined') {
6125 this.stream.trigger('log', {
6126 level: 'warn',
6127 message: 'The `encoding` option is unsupported without TextDecoder support'
6128 });
6129 } else {
6130 try {
6131 this.textDecoder_ = new TextDecoder(encoding);
6132 } catch (error) {
6133 this.stream.trigger('log', {
6134 level: 'warn',
6135 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
6136 });
6137 }
6138 }
6139 };
6140
6141 var Cea708Stream = function Cea708Stream(options) {
6142 options = options || {};
6143 Cea708Stream.prototype.init.call(this);
6144 var self = this;
6145 var captionServices = options.captionServices || {};
6146 var captionServiceEncodings = {};
6147 var serviceProps; // Get service encodings from captionServices option block
6148
6149 Object.keys(captionServices).forEach(function (serviceName) {
6150 serviceProps = captionServices[serviceName];
6151
6152 if (/^SERVICE/.test(serviceName)) {
6153 captionServiceEncodings[serviceName] = serviceProps.encoding;
6154 }
6155 });
6156 this.serviceEncodings = captionServiceEncodings;
6157 this.current708Packet = null;
6158 this.services = {};
6159
6160 this.push = function (packet) {
6161 if (packet.type === 3) {
6162 // 708 packet start
6163 self.new708Packet();
6164 self.add708Bytes(packet);
6165 } else {
6166 if (self.current708Packet === null) {
6167 // This should only happen at the start of a file if there's no packet start.
6168 self.new708Packet();
6169 }
6170
6171 self.add708Bytes(packet);
6172 }
6173 };
6174 };
6175
6176 Cea708Stream.prototype = new stream();
6177 /**
6178 * Push current 708 packet, create new 708 packet.
6179 */
6180
6181 Cea708Stream.prototype.new708Packet = function () {
6182 if (this.current708Packet !== null) {
6183 this.push708Packet();
6184 }
6185
6186 this.current708Packet = {
6187 data: [],
6188 ptsVals: []
6189 };
6190 };
6191 /**
6192 * Add pts and both bytes from packet into current 708 packet.
6193 */
6194
6195
6196 Cea708Stream.prototype.add708Bytes = function (packet) {
6197 var data = packet.ccData;
6198 var byte0 = data >>> 8;
6199 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
6200 // that service blocks will always line up with byte pairs.
6201
6202 this.current708Packet.ptsVals.push(packet.pts);
6203 this.current708Packet.data.push(byte0);
6204 this.current708Packet.data.push(byte1);
6205 };
6206 /**
6207 * Parse completed 708 packet into service blocks and push each service block.
6208 */
6209
6210
6211 Cea708Stream.prototype.push708Packet = function () {
6212 var packet708 = this.current708Packet;
6213 var packetData = packet708.data;
6214 var serviceNum = null;
6215 var blockSize = null;
6216 var i = 0;
6217 var b = packetData[i++];
6218 packet708.seq = b >> 6;
6219 packet708.sizeCode = b & 0x3f; // 0b00111111;
6220
6221 for (; i < packetData.length; i++) {
6222 b = packetData[i++];
6223 serviceNum = b >> 5;
6224 blockSize = b & 0x1f; // 0b00011111
6225
6226 if (serviceNum === 7 && blockSize > 0) {
6227 // Extended service num
6228 b = packetData[i++];
6229 serviceNum = b;
6230 }
6231
6232 this.pushServiceBlock(serviceNum, i, blockSize);
6233
6234 if (blockSize > 0) {
6235 i += blockSize - 1;
6236 }
6237 }
6238 };
6239 /**
6240 * Parse service block, execute commands, read text.
6241 *
6242 * Note: While many of these commands serve important purposes,
6243 * many others just parse out the parameters or attributes, but
6244 * nothing is done with them because this is not a full and complete
6245 * implementation of the entire 708 spec.
6246 *
6247 * @param {Integer} serviceNum Service number
6248 * @param {Integer} start Start index of the 708 packet data
6249 * @param {Integer} size Block size
6250 */
6251
6252
6253 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
6254 var b;
6255 var i = start;
6256 var packetData = this.current708Packet.data;
6257 var service = this.services[serviceNum];
6258
6259 if (!service) {
6260 service = this.initService(serviceNum, i);
6261 }
6262
6263 for (; i < start + size && i < packetData.length; i++) {
6264 b = packetData[i];
6265
6266 if (within708TextBlock(b)) {
6267 i = this.handleText(i, service);
6268 } else if (b === 0x18) {
6269 i = this.multiByteCharacter(i, service);
6270 } else if (b === 0x10) {
6271 i = this.extendedCommands(i, service);
6272 } else if (0x80 <= b && b <= 0x87) {
6273 i = this.setCurrentWindow(i, service);
6274 } else if (0x98 <= b && b <= 0x9f) {
6275 i = this.defineWindow(i, service);
6276 } else if (b === 0x88) {
6277 i = this.clearWindows(i, service);
6278 } else if (b === 0x8c) {
6279 i = this.deleteWindows(i, service);
6280 } else if (b === 0x89) {
6281 i = this.displayWindows(i, service);
6282 } else if (b === 0x8a) {
6283 i = this.hideWindows(i, service);
6284 } else if (b === 0x8b) {
6285 i = this.toggleWindows(i, service);
6286 } else if (b === 0x97) {
6287 i = this.setWindowAttributes(i, service);
6288 } else if (b === 0x90) {
6289 i = this.setPenAttributes(i, service);
6290 } else if (b === 0x91) {
6291 i = this.setPenColor(i, service);
6292 } else if (b === 0x92) {
6293 i = this.setPenLocation(i, service);
6294 } else if (b === 0x8f) {
6295 service = this.reset(i, service);
6296 } else if (b === 0x08) {
6297 // BS: Backspace
6298 service.currentWindow.backspace();
6299 } else if (b === 0x0c) {
6300 // FF: Form feed
6301 service.currentWindow.clearText();
6302 } else if (b === 0x0d) {
6303 // CR: Carriage return
6304 service.currentWindow.pendingNewLine = true;
6305 } else if (b === 0x0e) {
6306 // HCR: Horizontal carriage return
6307 service.currentWindow.clearText();
6308 } else if (b === 0x8d) {
6309 // DLY: Delay, nothing to do
6310 i++;
6311 } else ;
6312 }
6313 };
6314 /**
6315 * Execute an extended command
6316 *
6317 * @param {Integer} i Current index in the 708 packet
6318 * @param {Service} service The service object to be affected
6319 * @return {Integer} New index after parsing
6320 */
6321
6322
6323 Cea708Stream.prototype.extendedCommands = function (i, service) {
6324 var packetData = this.current708Packet.data;
6325 var b = packetData[++i];
6326
6327 if (within708TextBlock(b)) {
6328 i = this.handleText(i, service, {
6329 isExtended: true
6330 });
6331 }
6332
6333 return i;
6334 };
6335 /**
6336 * Get PTS value of a given byte index
6337 *
6338 * @param {Integer} byteIndex Index of the byte
6339 * @return {Integer} PTS
6340 */
6341
6342
6343 Cea708Stream.prototype.getPts = function (byteIndex) {
6344 // There's 1 pts value per 2 bytes
6345 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
6346 };
6347 /**
6348 * Initializes a service
6349 *
6350 * @param {Integer} serviceNum Service number
6351 * @return {Service} Initialized service object
6352 */
6353
6354
6355 Cea708Stream.prototype.initService = function (serviceNum, i) {
6356 var serviceName = 'SERVICE' + serviceNum;
6357 var self = this;
6358 var serviceName;
6359 var encoding;
6360
6361 if (serviceName in this.serviceEncodings) {
6362 encoding = this.serviceEncodings[serviceName];
6363 }
6364
6365 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
6366 this.services[serviceNum].init(this.getPts(i), function (pts) {
6367 self.flushDisplayed(pts, self.services[serviceNum]);
6368 });
6369 return this.services[serviceNum];
6370 };
6371 /**
6372 * Execute text writing to current window
6373 *
6374 * @param {Integer} i Current index in the 708 packet
6375 * @param {Service} service The service object to be affected
6376 * @return {Integer} New index after parsing
6377 */
6378
6379
6380 Cea708Stream.prototype.handleText = function (i, service, options) {
6381 var isExtended = options && options.isExtended;
6382 var isMultiByte = options && options.isMultiByte;
6383 var packetData = this.current708Packet.data;
6384 var extended = isExtended ? 0x1000 : 0x0000;
6385 var currentByte = packetData[i];
6386 var nextByte = packetData[i + 1];
6387 var win = service.currentWindow;
6388 var char;
6389 var charCodeArray; // Use the TextDecoder if one was created for this service
6390
6391 if (service.textDecoder_ && !isExtended) {
6392 if (isMultiByte) {
6393 charCodeArray = [currentByte, nextByte];
6394 i++;
6395 } else {
6396 charCodeArray = [currentByte];
6397 }
6398
6399 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
6400 } else {
6401 char = get708CharFromCode(extended | currentByte);
6402 }
6403
6404 if (win.pendingNewLine && !win.isEmpty()) {
6405 win.newLine(this.getPts(i));
6406 }
6407
6408 win.pendingNewLine = false;
6409 win.addText(char);
6410 return i;
6411 };
6412 /**
6413 * Handle decoding of multibyte character
6414 *
6415 * @param {Integer} i Current index in the 708 packet
6416 * @param {Service} service The service object to be affected
6417 * @return {Integer} New index after parsing
6418 */
6419
6420
6421 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
6422 var packetData = this.current708Packet.data;
6423 var firstByte = packetData[i + 1];
6424 var secondByte = packetData[i + 2];
6425
6426 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
6427 i = this.handleText(++i, service, {
6428 isMultiByte: true
6429 });
6430 }
6431
6432 return i;
6433 };
6434 /**
6435 * Parse and execute the CW# command.
6436 *
6437 * Set the current window.
6438 *
6439 * @param {Integer} i Current index in the 708 packet
6440 * @param {Service} service The service object to be affected
6441 * @return {Integer} New index after parsing
6442 */
6443
6444
6445 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
6446 var packetData = this.current708Packet.data;
6447 var b = packetData[i];
6448 var windowNum = b & 0x07;
6449 service.setCurrentWindow(windowNum);
6450 return i;
6451 };
6452 /**
6453 * Parse and execute the DF# command.
6454 *
6455 * Define a window and set it as the current window.
6456 *
6457 * @param {Integer} i Current index in the 708 packet
6458 * @param {Service} service The service object to be affected
6459 * @return {Integer} New index after parsing
6460 */
6461
6462
6463 Cea708Stream.prototype.defineWindow = function (i, service) {
6464 var packetData = this.current708Packet.data;
6465 var b = packetData[i];
6466 var windowNum = b & 0x07;
6467 service.setCurrentWindow(windowNum);
6468 var win = service.currentWindow;
6469 b = packetData[++i];
6470 win.visible = (b & 0x20) >> 5; // v
6471
6472 win.rowLock = (b & 0x10) >> 4; // rl
6473
6474 win.columnLock = (b & 0x08) >> 3; // cl
6475
6476 win.priority = b & 0x07; // p
6477
6478 b = packetData[++i];
6479 win.relativePositioning = (b & 0x80) >> 7; // rp
6480
6481 win.anchorVertical = b & 0x7f; // av
6482
6483 b = packetData[++i];
6484 win.anchorHorizontal = b; // ah
6485
6486 b = packetData[++i];
6487 win.anchorPoint = (b & 0xf0) >> 4; // ap
6488
6489 win.rowCount = b & 0x0f; // rc
6490
6491 b = packetData[++i];
6492 win.columnCount = b & 0x3f; // cc
6493
6494 b = packetData[++i];
6495 win.windowStyle = (b & 0x38) >> 3; // ws
6496
6497 win.penStyle = b & 0x07; // ps
6498 // The spec says there are (rowCount+1) "virtual rows"
6499
6500 win.virtualRowCount = win.rowCount + 1;
6501 return i;
6502 };
6503 /**
6504 * Parse and execute the SWA command.
6505 *
6506 * Set attributes of the current window.
6507 *
6508 * @param {Integer} i Current index in the 708 packet
6509 * @param {Service} service The service object to be affected
6510 * @return {Integer} New index after parsing
6511 */
6512
6513
6514 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
6515 var packetData = this.current708Packet.data;
6516 var b = packetData[i];
6517 var winAttr = service.currentWindow.winAttr;
6518 b = packetData[++i];
6519 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
6520
6521 winAttr.fillRed = (b & 0x30) >> 4; // fr
6522
6523 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
6524
6525 winAttr.fillBlue = b & 0x03; // fb
6526
6527 b = packetData[++i];
6528 winAttr.borderType = (b & 0xc0) >> 6; // bt
6529
6530 winAttr.borderRed = (b & 0x30) >> 4; // br
6531
6532 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
6533
6534 winAttr.borderBlue = b & 0x03; // bb
6535
6536 b = packetData[++i];
6537 winAttr.borderType += (b & 0x80) >> 5; // bt
6538
6539 winAttr.wordWrap = (b & 0x40) >> 6; // ww
6540
6541 winAttr.printDirection = (b & 0x30) >> 4; // pd
6542
6543 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
6544
6545 winAttr.justify = b & 0x03; // j
6546
6547 b = packetData[++i];
6548 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
6549
6550 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
6551
6552 winAttr.displayEffect = b & 0x03; // de
6553
6554 return i;
6555 };
6556 /**
6557 * Gather text from all displayed windows and push a caption to output.
6558 *
6559 * @param {Integer} i Current index in the 708 packet
6560 * @param {Service} service The service object to be affected
6561 */
6562
6563
6564 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
6565 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
6566 // display text in the correct order, but sample files so far have not shown any issue.
6567
6568 for (var winId = 0; winId < 8; winId++) {
6569 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
6570 displayedText.push(service.windows[winId].getText());
6571 }
6572 }
6573
6574 service.endPts = pts;
6575 service.text = displayedText.join('\n\n');
6576 this.pushCaption(service);
6577 service.startPts = pts;
6578 };
6579 /**
6580 * Push a caption to output if the caption contains text.
6581 *
6582 * @param {Service} service The service object to be affected
6583 */
6584
6585
6586 Cea708Stream.prototype.pushCaption = function (service) {
6587 if (service.text !== '') {
6588 this.trigger('data', {
6589 startPts: service.startPts,
6590 endPts: service.endPts,
6591 text: service.text,
6592 stream: 'cc708_' + service.serviceNum
6593 });
6594 service.text = '';
6595 service.startPts = service.endPts;
6596 }
6597 };
6598 /**
6599 * Parse and execute the DSW command.
6600 *
6601 * Set visible property of windows based on the parsed bitmask.
6602 *
6603 * @param {Integer} i Current index in the 708 packet
6604 * @param {Service} service The service object to be affected
6605 * @return {Integer} New index after parsing
6606 */
6607
6608
6609 Cea708Stream.prototype.displayWindows = function (i, service) {
6610 var packetData = this.current708Packet.data;
6611 var b = packetData[++i];
6612 var pts = this.getPts(i);
6613 this.flushDisplayed(pts, service);
6614
6615 for (var winId = 0; winId < 8; winId++) {
6616 if (b & 0x01 << winId) {
6617 service.windows[winId].visible = 1;
6618 }
6619 }
6620
6621 return i;
6622 };
6623 /**
6624 * Parse and execute the HDW command.
6625 *
6626 * Set visible property of windows based on the parsed bitmask.
6627 *
6628 * @param {Integer} i Current index in the 708 packet
6629 * @param {Service} service The service object to be affected
6630 * @return {Integer} New index after parsing
6631 */
6632
6633
6634 Cea708Stream.prototype.hideWindows = function (i, service) {
6635 var packetData = this.current708Packet.data;
6636 var b = packetData[++i];
6637 var pts = this.getPts(i);
6638 this.flushDisplayed(pts, service);
6639
6640 for (var winId = 0; winId < 8; winId++) {
6641 if (b & 0x01 << winId) {
6642 service.windows[winId].visible = 0;
6643 }
6644 }
6645
6646 return i;
6647 };
6648 /**
6649 * Parse and execute the TGW command.
6650 *
6651 * Set visible property of windows based on the parsed bitmask.
6652 *
6653 * @param {Integer} i Current index in the 708 packet
6654 * @param {Service} service The service object to be affected
6655 * @return {Integer} New index after parsing
6656 */
6657
6658
6659 Cea708Stream.prototype.toggleWindows = function (i, service) {
6660 var packetData = this.current708Packet.data;
6661 var b = packetData[++i];
6662 var pts = this.getPts(i);
6663 this.flushDisplayed(pts, service);
6664
6665 for (var winId = 0; winId < 8; winId++) {
6666 if (b & 0x01 << winId) {
6667 service.windows[winId].visible ^= 1;
6668 }
6669 }
6670
6671 return i;
6672 };
6673 /**
6674 * Parse and execute the CLW command.
6675 *
6676 * Clear text of windows based on the parsed bitmask.
6677 *
6678 * @param {Integer} i Current index in the 708 packet
6679 * @param {Service} service The service object to be affected
6680 * @return {Integer} New index after parsing
6681 */
6682
6683
6684 Cea708Stream.prototype.clearWindows = function (i, service) {
6685 var packetData = this.current708Packet.data;
6686 var b = packetData[++i];
6687 var pts = this.getPts(i);
6688 this.flushDisplayed(pts, service);
6689
6690 for (var winId = 0; winId < 8; winId++) {
6691 if (b & 0x01 << winId) {
6692 service.windows[winId].clearText();
6693 }
6694 }
6695
6696 return i;
6697 };
6698 /**
6699 * Parse and execute the DLW command.
6700 *
6701 * Re-initialize windows based on the parsed bitmask.
6702 *
6703 * @param {Integer} i Current index in the 708 packet
6704 * @param {Service} service The service object to be affected
6705 * @return {Integer} New index after parsing
6706 */
6707
6708
6709 Cea708Stream.prototype.deleteWindows = function (i, service) {
6710 var packetData = this.current708Packet.data;
6711 var b = packetData[++i];
6712 var pts = this.getPts(i);
6713 this.flushDisplayed(pts, service);
6714
6715 for (var winId = 0; winId < 8; winId++) {
6716 if (b & 0x01 << winId) {
6717 service.windows[winId].reset();
6718 }
6719 }
6720
6721 return i;
6722 };
6723 /**
6724 * Parse and execute the SPA command.
6725 *
6726 * Set pen attributes of the current window.
6727 *
6728 * @param {Integer} i Current index in the 708 packet
6729 * @param {Service} service The service object to be affected
6730 * @return {Integer} New index after parsing
6731 */
6732
6733
6734 Cea708Stream.prototype.setPenAttributes = function (i, service) {
6735 var packetData = this.current708Packet.data;
6736 var b = packetData[i];
6737 var penAttr = service.currentWindow.penAttr;
6738 b = packetData[++i];
6739 penAttr.textTag = (b & 0xf0) >> 4; // tt
6740
6741 penAttr.offset = (b & 0x0c) >> 2; // o
6742
6743 penAttr.penSize = b & 0x03; // s
6744
6745 b = packetData[++i];
6746 penAttr.italics = (b & 0x80) >> 7; // i
6747
6748 penAttr.underline = (b & 0x40) >> 6; // u
6749
6750 penAttr.edgeType = (b & 0x38) >> 3; // et
6751
6752 penAttr.fontStyle = b & 0x07; // fs
6753
6754 return i;
6755 };
6756 /**
6757 * Parse and execute the SPC command.
6758 *
6759 * Set pen color of the current window.
6760 *
6761 * @param {Integer} i Current index in the 708 packet
6762 * @param {Service} service The service object to be affected
6763 * @return {Integer} New index after parsing
6764 */
6765
6766
6767 Cea708Stream.prototype.setPenColor = function (i, service) {
6768 var packetData = this.current708Packet.data;
6769 var b = packetData[i];
6770 var penColor = service.currentWindow.penColor;
6771 b = packetData[++i];
6772 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
6773
6774 penColor.fgRed = (b & 0x30) >> 4; // fr
6775
6776 penColor.fgGreen = (b & 0x0c) >> 2; // fg
6777
6778 penColor.fgBlue = b & 0x03; // fb
6779
6780 b = packetData[++i];
6781 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
6782
6783 penColor.bgRed = (b & 0x30) >> 4; // br
6784
6785 penColor.bgGreen = (b & 0x0c) >> 2; // bg
6786
6787 penColor.bgBlue = b & 0x03; // bb
6788
6789 b = packetData[++i];
6790 penColor.edgeRed = (b & 0x30) >> 4; // er
6791
6792 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
6793
6794 penColor.edgeBlue = b & 0x03; // eb
6795
6796 return i;
6797 };
6798 /**
6799 * Parse and execute the SPL command.
6800 *
6801 * Set pen location of the current window.
6802 *
6803 * @param {Integer} i Current index in the 708 packet
6804 * @param {Service} service The service object to be affected
6805 * @return {Integer} New index after parsing
6806 */
6807
6808
6809 Cea708Stream.prototype.setPenLocation = function (i, service) {
6810 var packetData = this.current708Packet.data;
6811 var b = packetData[i];
6812 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
6813
6814 service.currentWindow.pendingNewLine = true;
6815 b = packetData[++i];
6816 penLoc.row = b & 0x0f; // r
6817
6818 b = packetData[++i];
6819 penLoc.column = b & 0x3f; // c
6820
6821 return i;
6822 };
6823 /**
6824 * Execute the RST command.
6825 *
6826 * Reset service to a clean slate. Re-initialize.
6827 *
6828 * @param {Integer} i Current index in the 708 packet
6829 * @param {Service} service The service object to be affected
6830 * @return {Service} Re-initialized service
6831 */
6832
6833
6834 Cea708Stream.prototype.reset = function (i, service) {
6835 var pts = this.getPts(i);
6836 this.flushDisplayed(pts, service);
6837 return this.initService(service.serviceNum, i);
6838 }; // This hash maps non-ASCII, special, and extended character codes to their
6839 // proper Unicode equivalent. The first keys that are only a single byte
6840 // are the non-standard ASCII characters, which simply map the CEA608 byte
6841 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6842 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6843 // can be performed regardless of the field and data channel on which the
6844 // character code was received.
6845
6846
6847 var CHARACTER_TRANSLATION = {
6848 0x2a: 0xe1,
6849 // á
6850 0x5c: 0xe9,
6851 // é
6852 0x5e: 0xed,
6853 // í
6854 0x5f: 0xf3,
6855 // ó
6856 0x60: 0xfa,
6857 // ú
6858 0x7b: 0xe7,
6859 // ç
6860 0x7c: 0xf7,
6861 // ÷
6862 0x7d: 0xd1,
6863 // Ñ
6864 0x7e: 0xf1,
6865 // ñ
6866 0x7f: 0x2588,
6867 // █
6868 0x0130: 0xae,
6869 // ®
6870 0x0131: 0xb0,
6871 // °
6872 0x0132: 0xbd,
6873 // ½
6874 0x0133: 0xbf,
6875 // ¿
6876 0x0134: 0x2122,
6877 // ™
6878 0x0135: 0xa2,
6879 // ¢
6880 0x0136: 0xa3,
6881 // £
6882 0x0137: 0x266a,
6883 // ♪
6884 0x0138: 0xe0,
6885 // à
6886 0x0139: 0xa0,
6887 //
6888 0x013a: 0xe8,
6889 // è
6890 0x013b: 0xe2,
6891 // â
6892 0x013c: 0xea,
6893 // ê
6894 0x013d: 0xee,
6895 // î
6896 0x013e: 0xf4,
6897 // ô
6898 0x013f: 0xfb,
6899 // û
6900 0x0220: 0xc1,
6901 // Á
6902 0x0221: 0xc9,
6903 // É
6904 0x0222: 0xd3,
6905 // Ó
6906 0x0223: 0xda,
6907 // Ú
6908 0x0224: 0xdc,
6909 // Ü
6910 0x0225: 0xfc,
6911 // ü
6912 0x0226: 0x2018,
6913 // ‘
6914 0x0227: 0xa1,
6915 // ¡
6916 0x0228: 0x2a,
6917 // *
6918 0x0229: 0x27,
6919 // '
6920 0x022a: 0x2014,
6921 // —
6922 0x022b: 0xa9,
6923 // ©
6924 0x022c: 0x2120,
6925 // ℠
6926 0x022d: 0x2022,
6927 // •
6928 0x022e: 0x201c,
6929 // “
6930 0x022f: 0x201d,
6931 // ”
6932 0x0230: 0xc0,
6933 // À
6934 0x0231: 0xc2,
6935 // Â
6936 0x0232: 0xc7,
6937 // Ç
6938 0x0233: 0xc8,
6939 // È
6940 0x0234: 0xca,
6941 // Ê
6942 0x0235: 0xcb,
6943 // Ë
6944 0x0236: 0xeb,
6945 // ë
6946 0x0237: 0xce,
6947 // Î
6948 0x0238: 0xcf,
6949 // Ï
6950 0x0239: 0xef,
6951 // ï
6952 0x023a: 0xd4,
6953 // Ô
6954 0x023b: 0xd9,
6955 // Ù
6956 0x023c: 0xf9,
6957 // ù
6958 0x023d: 0xdb,
6959 // Û
6960 0x023e: 0xab,
6961 // «
6962 0x023f: 0xbb,
6963 // »
6964 0x0320: 0xc3,
6965 // Ã
6966 0x0321: 0xe3,
6967 // ã
6968 0x0322: 0xcd,
6969 // Í
6970 0x0323: 0xcc,
6971 // Ì
6972 0x0324: 0xec,
6973 // ì
6974 0x0325: 0xd2,
6975 // Ò
6976 0x0326: 0xf2,
6977 // ò
6978 0x0327: 0xd5,
6979 // Õ
6980 0x0328: 0xf5,
6981 // õ
6982 0x0329: 0x7b,
6983 // {
6984 0x032a: 0x7d,
6985 // }
6986 0x032b: 0x5c,
6987 // \
6988 0x032c: 0x5e,
6989 // ^
6990 0x032d: 0x5f,
6991 // _
6992 0x032e: 0x7c,
6993 // |
6994 0x032f: 0x7e,
6995 // ~
6996 0x0330: 0xc4,
6997 // Ä
6998 0x0331: 0xe4,
6999 // ä
7000 0x0332: 0xd6,
7001 // Ö
7002 0x0333: 0xf6,
7003 // ö
7004 0x0334: 0xdf,
7005 // ß
7006 0x0335: 0xa5,
7007 // ¥
7008 0x0336: 0xa4,
7009 // ¤
7010 0x0337: 0x2502,
7011 // │
7012 0x0338: 0xc5,
7013 // Å
7014 0x0339: 0xe5,
7015 // å
7016 0x033a: 0xd8,
7017 // Ø
7018 0x033b: 0xf8,
7019 // ø
7020 0x033c: 0x250c,
7021 // ┌
7022 0x033d: 0x2510,
7023 // ┐
7024 0x033e: 0x2514,
7025 // └
7026 0x033f: 0x2518 // ┘
7027
7028 };
7029
7030 var getCharFromCode = function getCharFromCode(code) {
7031 if (code === null) {
7032 return '';
7033 }
7034
7035 code = CHARACTER_TRANSLATION[code] || code;
7036 return String.fromCharCode(code);
7037 }; // the index of the last row in a CEA-608 display buffer
7038
7039
7040 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
7041 // getting it through bit logic.
7042
7043 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
7044 // cells. The "bottom" row is the last element in the outer array.
7045
7046 var createDisplayBuffer = function createDisplayBuffer() {
7047 var result = [],
7048 i = BOTTOM_ROW + 1;
7049
7050 while (i--) {
7051 result.push('');
7052 }
7053
7054 return result;
7055 };
7056
7057 var Cea608Stream = function Cea608Stream(field, dataChannel) {
7058 Cea608Stream.prototype.init.call(this);
7059 this.field_ = field || 0;
7060 this.dataChannel_ = dataChannel || 0;
7061 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
7062 this.setConstants();
7063 this.reset();
7064
7065 this.push = function (packet) {
7066 var data, swap, char0, char1, text; // remove the parity bits
7067
7068 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
7069
7070 if (data === this.lastControlCode_) {
7071 this.lastControlCode_ = null;
7072 return;
7073 } // Store control codes
7074
7075
7076 if ((data & 0xf000) === 0x1000) {
7077 this.lastControlCode_ = data;
7078 } else if (data !== this.PADDING_) {
7079 this.lastControlCode_ = null;
7080 }
7081
7082 char0 = data >>> 8;
7083 char1 = data & 0xff;
7084
7085 if (data === this.PADDING_) {
7086 return;
7087 } else if (data === this.RESUME_CAPTION_LOADING_) {
7088 this.mode_ = 'popOn';
7089 } else if (data === this.END_OF_CAPTION_) {
7090 // If an EOC is received while in paint-on mode, the displayed caption
7091 // text should be swapped to non-displayed memory as if it was a pop-on
7092 // caption. Because of that, we should explicitly switch back to pop-on
7093 // mode
7094 this.mode_ = 'popOn';
7095 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
7096
7097 this.flushDisplayed(packet.pts); // flip memory
7098
7099 swap = this.displayed_;
7100 this.displayed_ = this.nonDisplayed_;
7101 this.nonDisplayed_ = swap; // start measuring the time to display the caption
7102
7103 this.startPts_ = packet.pts;
7104 } else if (data === this.ROLL_UP_2_ROWS_) {
7105 this.rollUpRows_ = 2;
7106 this.setRollUp(packet.pts);
7107 } else if (data === this.ROLL_UP_3_ROWS_) {
7108 this.rollUpRows_ = 3;
7109 this.setRollUp(packet.pts);
7110 } else if (data === this.ROLL_UP_4_ROWS_) {
7111 this.rollUpRows_ = 4;
7112 this.setRollUp(packet.pts);
7113 } else if (data === this.CARRIAGE_RETURN_) {
7114 this.clearFormatting(packet.pts);
7115 this.flushDisplayed(packet.pts);
7116 this.shiftRowsUp_();
7117 this.startPts_ = packet.pts;
7118 } else if (data === this.BACKSPACE_) {
7119 if (this.mode_ === 'popOn') {
7120 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7121 } else {
7122 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7123 }
7124 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
7125 this.flushDisplayed(packet.pts);
7126 this.displayed_ = createDisplayBuffer();
7127 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
7128 this.nonDisplayed_ = createDisplayBuffer();
7129 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
7130 if (this.mode_ !== 'paintOn') {
7131 // NOTE: This should be removed when proper caption positioning is
7132 // implemented
7133 this.flushDisplayed(packet.pts);
7134 this.displayed_ = createDisplayBuffer();
7135 }
7136
7137 this.mode_ = 'paintOn';
7138 this.startPts_ = packet.pts; // Append special characters to caption text
7139 } else if (this.isSpecialCharacter(char0, char1)) {
7140 // Bitmask char0 so that we can apply character transformations
7141 // regardless of field and data channel.
7142 // Then byte-shift to the left and OR with char1 so we can pass the
7143 // entire character code to `getCharFromCode`.
7144 char0 = (char0 & 0x03) << 8;
7145 text = getCharFromCode(char0 | char1);
7146 this[this.mode_](packet.pts, text);
7147 this.column_++; // Append extended characters to caption text
7148 } else if (this.isExtCharacter(char0, char1)) {
7149 // Extended characters always follow their "non-extended" equivalents.
7150 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
7151 // decoders are supposed to drop the "è", while compliant decoders
7152 // backspace the "e" and insert "è".
7153 // Delete the previous character
7154 if (this.mode_ === 'popOn') {
7155 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7156 } else {
7157 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7158 } // Bitmask char0 so that we can apply character transformations
7159 // regardless of field and data channel.
7160 // Then byte-shift to the left and OR with char1 so we can pass the
7161 // entire character code to `getCharFromCode`.
7162
7163
7164 char0 = (char0 & 0x03) << 8;
7165 text = getCharFromCode(char0 | char1);
7166 this[this.mode_](packet.pts, text);
7167 this.column_++; // Process mid-row codes
7168 } else if (this.isMidRowCode(char0, char1)) {
7169 // Attributes are not additive, so clear all formatting
7170 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
7171 // should be replaced with spaces, so add one now
7172
7173 this[this.mode_](packet.pts, ' ');
7174 this.column_++;
7175
7176 if ((char1 & 0xe) === 0xe) {
7177 this.addFormatting(packet.pts, ['i']);
7178 }
7179
7180 if ((char1 & 0x1) === 0x1) {
7181 this.addFormatting(packet.pts, ['u']);
7182 } // Detect offset control codes and adjust cursor
7183
7184 } else if (this.isOffsetControlCode(char0, char1)) {
7185 // Cursor position is set by indent PAC (see below) in 4-column
7186 // increments, with an additional offset code of 1-3 to reach any
7187 // of the 32 columns specified by CEA-608. So all we need to do
7188 // here is increment the column cursor by the given offset.
7189 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
7190 } else if (this.isPAC(char0, char1)) {
7191 // There's no logic for PAC -> row mapping, so we have to just
7192 // find the row code in an array and use its index :(
7193 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
7194
7195 if (this.mode_ === 'rollUp') {
7196 // This implies that the base row is incorrectly set.
7197 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
7198 // of roll-up rows set.
7199 if (row - this.rollUpRows_ + 1 < 0) {
7200 row = this.rollUpRows_ - 1;
7201 }
7202
7203 this.setRollUp(packet.pts, row);
7204 }
7205
7206 if (row !== this.row_) {
7207 // formatting is only persistent for current row
7208 this.clearFormatting(packet.pts);
7209 this.row_ = row;
7210 } // All PACs can apply underline, so detect and apply
7211 // (All odd-numbered second bytes set underline)
7212
7213
7214 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
7215 this.addFormatting(packet.pts, ['u']);
7216 }
7217
7218 if ((data & 0x10) === 0x10) {
7219 // We've got an indent level code. Each successive even number
7220 // increments the column cursor by 4, so we can get the desired
7221 // column position by bit-shifting to the right (to get n/2)
7222 // and multiplying by 4.
7223 this.column_ = ((data & 0xe) >> 1) * 4;
7224 }
7225
7226 if (this.isColorPAC(char1)) {
7227 // it's a color code, though we only support white, which
7228 // can be either normal or italicized. white italics can be
7229 // either 0x4e or 0x6e depending on the row, so we just
7230 // bitwise-and with 0xe to see if italics should be turned on
7231 if ((char1 & 0xe) === 0xe) {
7232 this.addFormatting(packet.pts, ['i']);
7233 }
7234 } // We have a normal character in char0, and possibly one in char1
7235
7236 } else if (this.isNormalChar(char0)) {
7237 if (char1 === 0x00) {
7238 char1 = null;
7239 }
7240
7241 text = getCharFromCode(char0);
7242 text += getCharFromCode(char1);
7243 this[this.mode_](packet.pts, text);
7244 this.column_ += text.length;
7245 } // finish data processing
7246
7247 };
7248 };
7249
7250 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
7251 // display buffer
7252
7253 Cea608Stream.prototype.flushDisplayed = function (pts) {
7254 var content = this.displayed_ // remove spaces from the start and end of the string
7255 .map(function (row, index) {
7256 try {
7257 return row.trim();
7258 } catch (e) {
7259 // Ordinarily, this shouldn't happen. However, caption
7260 // parsing errors should not throw exceptions and
7261 // break playback.
7262 this.trigger('log', {
7263 level: 'warn',
7264 message: 'Skipping a malformed 608 caption at index ' + index + '.'
7265 });
7266 return '';
7267 }
7268 }, this) // combine all text rows to display in one cue
7269 .join('\n') // and remove blank rows from the start and end, but not the middle
7270 .replace(/^\n+|\n+$/g, '');
7271
7272 if (content.length) {
7273 this.trigger('data', {
7274 startPts: this.startPts_,
7275 endPts: pts,
7276 text: content,
7277 stream: this.name_
7278 });
7279 }
7280 };
7281 /**
7282 * Zero out the data, used for startup and on seek
7283 */
7284
7285
7286 Cea608Stream.prototype.reset = function () {
7287 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
7288 // actually display captions. If a caption is shifted to a row
7289 // with a lower index than this, it is cleared from the display
7290 // buffer
7291
7292 this.topRow_ = 0;
7293 this.startPts_ = 0;
7294 this.displayed_ = createDisplayBuffer();
7295 this.nonDisplayed_ = createDisplayBuffer();
7296 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
7297
7298 this.column_ = 0;
7299 this.row_ = BOTTOM_ROW;
7300 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
7301
7302 this.formatting_ = [];
7303 };
7304 /**
7305 * Sets up control code and related constants for this instance
7306 */
7307
7308
7309 Cea608Stream.prototype.setConstants = function () {
7310 // The following attributes have these uses:
7311 // ext_ : char0 for mid-row codes, and the base for extended
7312 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
7313 // extended codes)
7314 // control_: char0 for control codes, except byte-shifted to the
7315 // left so that we can do this.control_ | CONTROL_CODE
7316 // offset_: char0 for tab offset codes
7317 //
7318 // It's also worth noting that control codes, and _only_ control codes,
7319 // differ between field 1 and field2. Field 2 control codes are always
7320 // their field 1 value plus 1. That's why there's the "| field" on the
7321 // control value.
7322 if (this.dataChannel_ === 0) {
7323 this.BASE_ = 0x10;
7324 this.EXT_ = 0x11;
7325 this.CONTROL_ = (0x14 | this.field_) << 8;
7326 this.OFFSET_ = 0x17;
7327 } else if (this.dataChannel_ === 1) {
7328 this.BASE_ = 0x18;
7329 this.EXT_ = 0x19;
7330 this.CONTROL_ = (0x1c | this.field_) << 8;
7331 this.OFFSET_ = 0x1f;
7332 } // Constants for the LSByte command codes recognized by Cea608Stream. This
7333 // list is not exhaustive. For a more comprehensive listing and semantics see
7334 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
7335 // Padding
7336
7337
7338 this.PADDING_ = 0x0000; // Pop-on Mode
7339
7340 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
7341 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
7342
7343 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
7344 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
7345 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
7346 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
7347
7348 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
7349
7350 this.BACKSPACE_ = this.CONTROL_ | 0x21;
7351 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
7352 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
7353 };
7354 /**
7355 * Detects if the 2-byte packet data is a special character
7356 *
7357 * Special characters have a second byte in the range 0x30 to 0x3f,
7358 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
7359 * data channel 2).
7360 *
7361 * @param {Integer} char0 The first byte
7362 * @param {Integer} char1 The second byte
7363 * @return {Boolean} Whether the 2 bytes are an special character
7364 */
7365
7366
7367 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
7368 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
7369 };
7370 /**
7371 * Detects if the 2-byte packet data is an extended character
7372 *
7373 * Extended characters have a second byte in the range 0x20 to 0x3f,
7374 * with the first byte being 0x12 or 0x13 (for data channel 1) or
7375 * 0x1a or 0x1b (for data channel 2).
7376 *
7377 * @param {Integer} char0 The first byte
7378 * @param {Integer} char1 The second byte
7379 * @return {Boolean} Whether the 2 bytes are an extended character
7380 */
7381
7382
7383 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
7384 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
7385 };
7386 /**
7387 * Detects if the 2-byte packet is a mid-row code
7388 *
7389 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
7390 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
7391 * channel 2).
7392 *
7393 * @param {Integer} char0 The first byte
7394 * @param {Integer} char1 The second byte
7395 * @return {Boolean} Whether the 2 bytes are a mid-row code
7396 */
7397
7398
7399 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
7400 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
7401 };
7402 /**
7403 * Detects if the 2-byte packet is an offset control code
7404 *
7405 * Offset control codes have a second byte in the range 0x21 to 0x23,
7406 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
7407 * data channel 2).
7408 *
7409 * @param {Integer} char0 The first byte
7410 * @param {Integer} char1 The second byte
7411 * @return {Boolean} Whether the 2 bytes are an offset control code
7412 */
7413
7414
7415 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
7416 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
7417 };
7418 /**
7419 * Detects if the 2-byte packet is a Preamble Address Code
7420 *
7421 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
7422 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
7423 * range 0x40 to 0x7f.
7424 *
7425 * @param {Integer} char0 The first byte
7426 * @param {Integer} char1 The second byte
7427 * @return {Boolean} Whether the 2 bytes are a PAC
7428 */
7429
7430
7431 Cea608Stream.prototype.isPAC = function (char0, char1) {
7432 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
7433 };
7434 /**
7435 * Detects if a packet's second byte is in the range of a PAC color code
7436 *
7437 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
7438 * 0x60 to 0x6f.
7439 *
7440 * @param {Integer} char1 The second byte
7441 * @return {Boolean} Whether the byte is a color PAC
7442 */
7443
7444
7445 Cea608Stream.prototype.isColorPAC = function (char1) {
7446 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
7447 };
7448 /**
7449 * Detects if a single byte is in the range of a normal character
7450 *
7451 * Normal text bytes are in the range 0x20 to 0x7f.
7452 *
7453 * @param {Integer} char The byte
7454 * @return {Boolean} Whether the byte is a normal character
7455 */
7456
7457
7458 Cea608Stream.prototype.isNormalChar = function (char) {
7459 return char >= 0x20 && char <= 0x7f;
7460 };
7461 /**
7462 * Configures roll-up
7463 *
7464 * @param {Integer} pts Current PTS
7465 * @param {Integer} newBaseRow Used by PACs to slide the current window to
7466 * a new position
7467 */
7468
7469
7470 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
7471 // Reset the base row to the bottom row when switching modes
7472 if (this.mode_ !== 'rollUp') {
7473 this.row_ = BOTTOM_ROW;
7474 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
7475
7476 this.flushDisplayed(pts);
7477 this.nonDisplayed_ = createDisplayBuffer();
7478 this.displayed_ = createDisplayBuffer();
7479 }
7480
7481 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
7482 // move currently displayed captions (up or down) to the new base row
7483 for (var i = 0; i < this.rollUpRows_; i++) {
7484 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
7485 this.displayed_[this.row_ - i] = '';
7486 }
7487 }
7488
7489 if (newBaseRow === undefined) {
7490 newBaseRow = this.row_;
7491 }
7492
7493 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
7494 }; // Adds the opening HTML tag for the passed character to the caption text,
7495 // and keeps track of it for later closing
7496
7497
7498 Cea608Stream.prototype.addFormatting = function (pts, format) {
7499 this.formatting_ = this.formatting_.concat(format);
7500 var text = format.reduce(function (text, format) {
7501 return text + '<' + format + '>';
7502 }, '');
7503 this[this.mode_](pts, text);
7504 }; // Adds HTML closing tags for current formatting to caption text and
7505 // clears remembered formatting
7506
7507
7508 Cea608Stream.prototype.clearFormatting = function (pts) {
7509 if (!this.formatting_.length) {
7510 return;
7511 }
7512
7513 var text = this.formatting_.reverse().reduce(function (text, format) {
7514 return text + '</' + format + '>';
7515 }, '');
7516 this.formatting_ = [];
7517 this[this.mode_](pts, text);
7518 }; // Mode Implementations
7519
7520
7521 Cea608Stream.prototype.popOn = function (pts, text) {
7522 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
7523
7524 baseRow += text;
7525 this.nonDisplayed_[this.row_] = baseRow;
7526 };
7527
7528 Cea608Stream.prototype.rollUp = function (pts, text) {
7529 var baseRow = this.displayed_[this.row_];
7530 baseRow += text;
7531 this.displayed_[this.row_] = baseRow;
7532 };
7533
7534 Cea608Stream.prototype.shiftRowsUp_ = function () {
7535 var i; // clear out inactive rows
7536
7537 for (i = 0; i < this.topRow_; i++) {
7538 this.displayed_[i] = '';
7539 }
7540
7541 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7542 this.displayed_[i] = '';
7543 } // shift displayed rows up
7544
7545
7546 for (i = this.topRow_; i < this.row_; i++) {
7547 this.displayed_[i] = this.displayed_[i + 1];
7548 } // clear out the bottom row
7549
7550
7551 this.displayed_[this.row_] = '';
7552 };
7553
7554 Cea608Stream.prototype.paintOn = function (pts, text) {
7555 var baseRow = this.displayed_[this.row_];
7556 baseRow += text;
7557 this.displayed_[this.row_] = baseRow;
7558 }; // exports
7559
7560
7561 var captionStream = {
7562 CaptionStream: CaptionStream$1,
7563 Cea608Stream: Cea608Stream,
7564 Cea708Stream: Cea708Stream
7565 };
7566 /**
7567 * mux.js
7568 *
7569 * Copyright (c) Brightcove
7570 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7571 */
7572
7573 var streamTypes = {
7574 H264_STREAM_TYPE: 0x1B,
7575 ADTS_STREAM_TYPE: 0x0F,
7576 METADATA_STREAM_TYPE: 0x15
7577 };
7578 var MAX_TS = 8589934592;
7579 var RO_THRESH = 4294967296;
7580 var TYPE_SHARED = 'shared';
7581
7582 var handleRollover$1 = function handleRollover(value, reference) {
7583 var direction = 1;
7584
7585 if (value > reference) {
7586 // If the current timestamp value is greater than our reference timestamp and we detect a
7587 // timestamp rollover, this means the roll over is happening in the opposite direction.
7588 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7589 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7590 // rollover point. In loading this segment, the timestamp values will be very large,
7591 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7592 // the time stamp to be `value - 2^33`.
7593 direction = -1;
7594 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7595 // cause an incorrect adjustment.
7596
7597
7598 while (Math.abs(reference - value) > RO_THRESH) {
7599 value += direction * MAX_TS;
7600 }
7601
7602 return value;
7603 };
7604
7605 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
7606 var lastDTS, referenceDTS;
7607 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
7608 // video and audio. We could use `undefined` here, but having a string
7609 // makes debugging a little clearer.
7610
7611 this.type_ = type || TYPE_SHARED;
7612
7613 this.push = function (data) {
7614 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7615 // streams will only accept data that matches their type.
7616 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7617 return;
7618 }
7619
7620 if (referenceDTS === undefined) {
7621 referenceDTS = data.dts;
7622 }
7623
7624 data.dts = handleRollover$1(data.dts, referenceDTS);
7625 data.pts = handleRollover$1(data.pts, referenceDTS);
7626 lastDTS = data.dts;
7627 this.trigger('data', data);
7628 };
7629
7630 this.flush = function () {
7631 referenceDTS = lastDTS;
7632 this.trigger('done');
7633 };
7634
7635 this.endTimeline = function () {
7636 this.flush();
7637 this.trigger('endedtimeline');
7638 };
7639
7640 this.discontinuity = function () {
7641 referenceDTS = void 0;
7642 lastDTS = void 0;
7643 };
7644
7645 this.reset = function () {
7646 this.discontinuity();
7647 this.trigger('reset');
7648 };
7649 };
7650
7651 TimestampRolloverStream$1.prototype = new stream();
7652 var timestampRolloverStream = {
7653 TimestampRolloverStream: TimestampRolloverStream$1,
7654 handleRollover: handleRollover$1
7655 };
7656
7657 var percentEncode$1 = function percentEncode(bytes, start, end) {
7658 var i,
7659 result = '';
7660
7661 for (i = start; i < end; i++) {
7662 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7663 }
7664
7665 return result;
7666 },
7667 // return the string representation of the specified byte range,
7668 // interpreted as UTf-8.
7669 parseUtf8 = function parseUtf8(bytes, start, end) {
7670 return decodeURIComponent(percentEncode$1(bytes, start, end));
7671 },
7672 // return the string representation of the specified byte range,
7673 // interpreted as ISO-8859-1.
7674 parseIso88591$1 = function parseIso88591(bytes, start, end) {
7675 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
7676 },
7677 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
7678 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7679 },
7680 tagParsers = {
7681 TXXX: function TXXX(tag) {
7682 var i;
7683
7684 if (tag.data[0] !== 3) {
7685 // ignore frames with unrecognized character encodings
7686 return;
7687 }
7688
7689 for (i = 1; i < tag.data.length; i++) {
7690 if (tag.data[i] === 0) {
7691 // parse the text fields
7692 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
7693
7694 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7695 break;
7696 }
7697 }
7698
7699 tag.data = tag.value;
7700 },
7701 WXXX: function WXXX(tag) {
7702 var i;
7703
7704 if (tag.data[0] !== 3) {
7705 // ignore frames with unrecognized character encodings
7706 return;
7707 }
7708
7709 for (i = 1; i < tag.data.length; i++) {
7710 if (tag.data[i] === 0) {
7711 // parse the description and URL fields
7712 tag.description = parseUtf8(tag.data, 1, i);
7713 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7714 break;
7715 }
7716 }
7717 },
7718 PRIV: function PRIV(tag) {
7719 var i;
7720
7721 for (i = 0; i < tag.data.length; i++) {
7722 if (tag.data[i] === 0) {
7723 // parse the description and URL fields
7724 tag.owner = parseIso88591$1(tag.data, 0, i);
7725 break;
7726 }
7727 }
7728
7729 tag.privateData = tag.data.subarray(i + 1);
7730 tag.data = tag.privateData;
7731 }
7732 },
7733 _MetadataStream;
7734
7735 _MetadataStream = function MetadataStream(options) {
7736 var settings = {
7737 // the bytes of the program-level descriptor field in MP2T
7738 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7739 // program element descriptors"
7740 descriptor: options && options.descriptor
7741 },
7742 // the total size in bytes of the ID3 tag being parsed
7743 tagSize = 0,
7744 // tag data that is not complete enough to be parsed
7745 buffer = [],
7746 // the total number of bytes currently in the buffer
7747 bufferSize = 0,
7748 i;
7749
7750 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
7751 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7752
7753
7754 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7755
7756 if (settings.descriptor) {
7757 for (i = 0; i < settings.descriptor.length; i++) {
7758 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7759 }
7760 }
7761
7762 this.push = function (chunk) {
7763 var tag, frameStart, frameSize, frame, i, frameHeader;
7764
7765 if (chunk.type !== 'timed-metadata') {
7766 return;
7767 } // if data_alignment_indicator is set in the PES header,
7768 // we must have the start of a new ID3 tag. Assume anything
7769 // remaining in the buffer was malformed and throw it out
7770
7771
7772 if (chunk.dataAlignmentIndicator) {
7773 bufferSize = 0;
7774 buffer.length = 0;
7775 } // ignore events that don't look like ID3 data
7776
7777
7778 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7779 this.trigger('log', {
7780 level: 'warn',
7781 message: 'Skipping unrecognized metadata packet'
7782 });
7783 return;
7784 } // add this chunk to the data we've collected so far
7785
7786
7787 buffer.push(chunk);
7788 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
7789
7790 if (buffer.length === 1) {
7791 // the frame size is transmitted as a 28-bit integer in the
7792 // last four bytes of the ID3 header.
7793 // The most significant bit of each byte is dropped and the
7794 // results concatenated to recover the actual value.
7795 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
7796 // convenient for our comparisons to include it
7797
7798 tagSize += 10;
7799 } // if the entire frame has not arrived, wait for more data
7800
7801
7802 if (bufferSize < tagSize) {
7803 return;
7804 } // collect the entire frame so it can be parsed
7805
7806
7807 tag = {
7808 data: new Uint8Array(tagSize),
7809 frames: [],
7810 pts: buffer[0].pts,
7811 dts: buffer[0].dts
7812 };
7813
7814 for (i = 0; i < tagSize;) {
7815 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7816 i += buffer[0].data.byteLength;
7817 bufferSize -= buffer[0].data.byteLength;
7818 buffer.shift();
7819 } // find the start of the first frame and the end of the tag
7820
7821
7822 frameStart = 10;
7823
7824 if (tag.data[5] & 0x40) {
7825 // advance the frame start past the extended header
7826 frameStart += 4; // header size field
7827
7828 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
7829
7830 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
7831 } // parse one or more ID3 frames
7832 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7833
7834
7835 do {
7836 // determine the number of bytes in this frame
7837 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
7838
7839 if (frameSize < 1) {
7840 this.trigger('log', {
7841 level: 'warn',
7842 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
7843 });
7844 return;
7845 }
7846
7847 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7848 frame = {
7849 id: frameHeader,
7850 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7851 };
7852 frame.key = frame.id;
7853
7854 if (tagParsers[frame.id]) {
7855 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7856 // time for raw AAC data
7857
7858 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7859 var d = frame.data,
7860 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7861 size *= 4;
7862 size += d[7] & 0x03;
7863 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7864 // on the value of this frame
7865 // we couldn't have known the appropriate pts and dts before
7866 // parsing this ID3 tag so set those values now
7867
7868 if (tag.pts === undefined && tag.dts === undefined) {
7869 tag.pts = frame.timeStamp;
7870 tag.dts = frame.timeStamp;
7871 }
7872
7873 this.trigger('timestamp', frame);
7874 }
7875 }
7876
7877 tag.frames.push(frame);
7878 frameStart += 10; // advance past the frame header
7879
7880 frameStart += frameSize; // advance past the frame body
7881 } while (frameStart < tagSize);
7882
7883 this.trigger('data', tag);
7884 };
7885 };
7886
7887 _MetadataStream.prototype = new stream();
7888 var metadataStream = _MetadataStream;
7889 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
7890
7891 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7892
7893
7894 var MP2T_PACKET_LENGTH$1 = 188,
7895 // bytes
7896 SYNC_BYTE$1 = 0x47;
7897 /**
7898 * Splits an incoming stream of binary data into MPEG-2 Transport
7899 * Stream packets.
7900 */
7901
7902 _TransportPacketStream = function TransportPacketStream() {
7903 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
7904 bytesInBuffer = 0;
7905
7906 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7907
7908 /**
7909 * Split a stream of data into M2TS packets
7910 **/
7911
7912
7913 this.push = function (bytes) {
7914 var startIndex = 0,
7915 endIndex = MP2T_PACKET_LENGTH$1,
7916 everything; // If there are bytes remaining from the last segment, prepend them to the
7917 // bytes that were pushed in
7918
7919 if (bytesInBuffer) {
7920 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7921 everything.set(buffer.subarray(0, bytesInBuffer));
7922 everything.set(bytes, bytesInBuffer);
7923 bytesInBuffer = 0;
7924 } else {
7925 everything = bytes;
7926 } // While we have enough data for a packet
7927
7928
7929 while (endIndex < everything.byteLength) {
7930 // Look for a pair of start and end sync bytes in the data..
7931 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
7932 // We found a packet so emit it and jump one whole packet forward in
7933 // the stream
7934 this.trigger('data', everything.subarray(startIndex, endIndex));
7935 startIndex += MP2T_PACKET_LENGTH$1;
7936 endIndex += MP2T_PACKET_LENGTH$1;
7937 continue;
7938 } // If we get here, we have somehow become de-synchronized and we need to step
7939 // forward one byte at a time until we find a pair of sync bytes that denote
7940 // a packet
7941
7942
7943 startIndex++;
7944 endIndex++;
7945 } // If there was some data left over at the end of the segment that couldn't
7946 // possibly be a whole packet, keep it because it might be the start of a packet
7947 // that continues in the next segment
7948
7949
7950 if (startIndex < everything.byteLength) {
7951 buffer.set(everything.subarray(startIndex), 0);
7952 bytesInBuffer = everything.byteLength - startIndex;
7953 }
7954 };
7955 /**
7956 * Passes identified M2TS packets to the TransportParseStream to be parsed
7957 **/
7958
7959
7960 this.flush = function () {
7961 // If the buffer contains a whole packet when we are being flushed, emit it
7962 // and empty the buffer. Otherwise hold onto the data because it may be
7963 // important for decoding the next segment
7964 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
7965 this.trigger('data', buffer);
7966 bytesInBuffer = 0;
7967 }
7968
7969 this.trigger('done');
7970 };
7971
7972 this.endTimeline = function () {
7973 this.flush();
7974 this.trigger('endedtimeline');
7975 };
7976
7977 this.reset = function () {
7978 bytesInBuffer = 0;
7979 this.trigger('reset');
7980 };
7981 };
7982
7983 _TransportPacketStream.prototype = new stream();
7984 /**
7985 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7986 * forms of the individual transport stream packets.
7987 */
7988
7989 _TransportParseStream = function TransportParseStream() {
7990 var parsePsi, parsePat, parsePmt, self;
7991
7992 _TransportParseStream.prototype.init.call(this);
7993
7994 self = this;
7995 this.packetsWaitingForPmt = [];
7996 this.programMapTable = undefined;
7997
7998 parsePsi = function parsePsi(payload, psi) {
7999 var offset = 0; // PSI packets may be split into multiple sections and those
8000 // sections may be split into multiple packets. If a PSI
8001 // section starts in this packet, the payload_unit_start_indicator
8002 // will be true and the first byte of the payload will indicate
8003 // the offset from the current position to the start of the
8004 // section.
8005
8006 if (psi.payloadUnitStartIndicator) {
8007 offset += payload[offset] + 1;
8008 }
8009
8010 if (psi.type === 'pat') {
8011 parsePat(payload.subarray(offset), psi);
8012 } else {
8013 parsePmt(payload.subarray(offset), psi);
8014 }
8015 };
8016
8017 parsePat = function parsePat(payload, pat) {
8018 pat.section_number = payload[7]; // eslint-disable-line camelcase
8019
8020 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
8021 // skip the PSI header and parse the first PMT entry
8022
8023 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
8024 pat.pmtPid = self.pmtPid;
8025 };
8026 /**
8027 * Parse out the relevant fields of a Program Map Table (PMT).
8028 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
8029 * packet. The first byte in this array should be the table_id
8030 * field.
8031 * @param pmt {object} the object that should be decorated with
8032 * fields parsed from the PMT.
8033 */
8034
8035
8036 parsePmt = function parsePmt(payload, pmt) {
8037 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
8038 // take effect. We don't believe this should ever be the case
8039 // for HLS but we'll ignore "forward" PMT declarations if we see
8040 // them. Future PMT declarations have the current_next_indicator
8041 // set to zero.
8042
8043 if (!(payload[5] & 0x01)) {
8044 return;
8045 } // overwrite any existing program map table
8046
8047
8048 self.programMapTable = {
8049 video: null,
8050 audio: null,
8051 'timed-metadata': {}
8052 }; // the mapping table ends at the end of the current section
8053
8054 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
8055 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
8056 // long the program info descriptors are
8057
8058 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
8059
8060 offset = 12 + programInfoLength;
8061
8062 while (offset < tableEnd) {
8063 var streamType = payload[offset];
8064 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
8065 // TODO: should this be done for metadata too? for now maintain behavior of
8066 // multiple metadata streams
8067
8068 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
8069 self.programMapTable.video = pid;
8070 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
8071 self.programMapTable.audio = pid;
8072 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
8073 // map pid to stream type for metadata streams
8074 self.programMapTable['timed-metadata'][pid] = streamType;
8075 } // move to the next table entry
8076 // skip past the elementary stream descriptors, if present
8077
8078
8079 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
8080 } // record the map on the packet as well
8081
8082
8083 pmt.programMapTable = self.programMapTable;
8084 };
8085 /**
8086 * Deliver a new MP2T packet to the next stream in the pipeline.
8087 */
8088
8089
8090 this.push = function (packet) {
8091 var result = {},
8092 offset = 4;
8093 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
8094
8095 result.pid = packet[1] & 0x1f;
8096 result.pid <<= 8;
8097 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
8098 // fifth byte of the TS packet header. The adaptation field is
8099 // used to add stuffing to PES packets that don't fill a complete
8100 // TS packet, and to specify some forms of timing and control data
8101 // that we do not currently use.
8102
8103 if ((packet[3] & 0x30) >>> 4 > 0x01) {
8104 offset += packet[offset] + 1;
8105 } // parse the rest of the packet based on the type
8106
8107
8108 if (result.pid === 0) {
8109 result.type = 'pat';
8110 parsePsi(packet.subarray(offset), result);
8111 this.trigger('data', result);
8112 } else if (result.pid === this.pmtPid) {
8113 result.type = 'pmt';
8114 parsePsi(packet.subarray(offset), result);
8115 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
8116
8117 while (this.packetsWaitingForPmt.length) {
8118 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
8119 }
8120 } else if (this.programMapTable === undefined) {
8121 // When we have not seen a PMT yet, defer further processing of
8122 // PES packets until one has been parsed
8123 this.packetsWaitingForPmt.push([packet, offset, result]);
8124 } else {
8125 this.processPes_(packet, offset, result);
8126 }
8127 };
8128
8129 this.processPes_ = function (packet, offset, result) {
8130 // set the appropriate stream type
8131 if (result.pid === this.programMapTable.video) {
8132 result.streamType = streamTypes.H264_STREAM_TYPE;
8133 } else if (result.pid === this.programMapTable.audio) {
8134 result.streamType = streamTypes.ADTS_STREAM_TYPE;
8135 } else {
8136 // if not video or audio, it is timed-metadata or unknown
8137 // if unknown, streamType will be undefined
8138 result.streamType = this.programMapTable['timed-metadata'][result.pid];
8139 }
8140
8141 result.type = 'pes';
8142 result.data = packet.subarray(offset);
8143 this.trigger('data', result);
8144 };
8145 };
8146
8147 _TransportParseStream.prototype = new stream();
8148 _TransportParseStream.STREAM_TYPES = {
8149 h264: 0x1b,
8150 adts: 0x0f
8151 };
8152 /**
8153 * Reconsistutes program elementary stream (PES) packets from parsed
8154 * transport stream packets. That is, if you pipe an
8155 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
8156 * events will be events which capture the bytes for individual PES
8157 * packets plus relevant metadata that has been extracted from the
8158 * container.
8159 */
8160
8161 _ElementaryStream = function ElementaryStream() {
8162 var self = this,
8163 segmentHadPmt = false,
8164 // PES packet fragments
8165 video = {
8166 data: [],
8167 size: 0
8168 },
8169 audio = {
8170 data: [],
8171 size: 0
8172 },
8173 timedMetadata = {
8174 data: [],
8175 size: 0
8176 },
8177 programMapTable,
8178 parsePes = function parsePes(payload, pes) {
8179 var ptsDtsFlags;
8180 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
8181
8182 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
8183 // that are frame data that is continuing from the previous fragment. This
8184 // is to check that the pes data is the start of a new pes payload
8185
8186 if (startPrefix !== 1) {
8187 return;
8188 } // get the packet length, this will be 0 for video
8189
8190
8191 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
8192
8193 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
8194 // and a DTS value. Determine what combination of values is
8195 // available to work with.
8196
8197 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
8198 // performs all bitwise operations on 32-bit integers but javascript
8199 // supports a much greater range (52-bits) of integer using standard
8200 // mathematical operations.
8201 // We construct a 31-bit value using bitwise operators over the 31
8202 // most significant bits and then multiply by 4 (equal to a left-shift
8203 // of 2) before we add the final 2 least significant bits of the
8204 // timestamp (equal to an OR.)
8205
8206 if (ptsDtsFlags & 0xC0) {
8207 // the PTS and DTS are not written out directly. For information
8208 // on how they are encoded, see
8209 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
8210 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
8211 pes.pts *= 4; // Left shift by 2
8212
8213 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
8214
8215 pes.dts = pes.pts;
8216
8217 if (ptsDtsFlags & 0x40) {
8218 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
8219 pes.dts *= 4; // Left shift by 2
8220
8221 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
8222 }
8223 } // the data section starts immediately after the PES header.
8224 // pes_header_data_length specifies the number of header bytes
8225 // that follow the last byte of the field.
8226
8227
8228 pes.data = payload.subarray(9 + payload[8]);
8229 },
8230
8231 /**
8232 * Pass completely parsed PES packets to the next stream in the pipeline
8233 **/
8234 flushStream = function flushStream(stream, type, forceFlush) {
8235 var packetData = new Uint8Array(stream.size),
8236 event = {
8237 type: type
8238 },
8239 i = 0,
8240 offset = 0,
8241 packetFlushable = false,
8242 fragment; // do nothing if there is not enough buffered data for a complete
8243 // PES header
8244
8245 if (!stream.data.length || stream.size < 9) {
8246 return;
8247 }
8248
8249 event.trackId = stream.data[0].pid; // reassemble the packet
8250
8251 for (i = 0; i < stream.data.length; i++) {
8252 fragment = stream.data[i];
8253 packetData.set(fragment.data, offset);
8254 offset += fragment.data.byteLength;
8255 } // parse assembled packet's PES header
8256
8257
8258 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
8259 // check that there is enough stream data to fill the packet
8260
8261 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
8262
8263 if (forceFlush || packetFlushable) {
8264 stream.size = 0;
8265 stream.data.length = 0;
8266 } // only emit packets that are complete. this is to avoid assembling
8267 // incomplete PES packets due to poor segmentation
8268
8269
8270 if (packetFlushable) {
8271 self.trigger('data', event);
8272 }
8273 };
8274
8275 _ElementaryStream.prototype.init.call(this);
8276 /**
8277 * Identifies M2TS packet types and parses PES packets using metadata
8278 * parsed from the PMT
8279 **/
8280
8281
8282 this.push = function (data) {
8283 ({
8284 pat: function pat() {// we have to wait for the PMT to arrive as well before we
8285 // have any meaningful metadata
8286 },
8287 pes: function pes() {
8288 var stream, streamType;
8289
8290 switch (data.streamType) {
8291 case streamTypes.H264_STREAM_TYPE:
8292 stream = video;
8293 streamType = 'video';
8294 break;
8295
8296 case streamTypes.ADTS_STREAM_TYPE:
8297 stream = audio;
8298 streamType = 'audio';
8299 break;
8300
8301 case streamTypes.METADATA_STREAM_TYPE:
8302 stream = timedMetadata;
8303 streamType = 'timed-metadata';
8304 break;
8305
8306 default:
8307 // ignore unknown stream types
8308 return;
8309 } // if a new packet is starting, we can flush the completed
8310 // packet
8311
8312
8313 if (data.payloadUnitStartIndicator) {
8314 flushStream(stream, streamType, true);
8315 } // buffer this fragment until we are sure we've received the
8316 // complete payload
8317
8318
8319 stream.data.push(data);
8320 stream.size += data.data.byteLength;
8321 },
8322 pmt: function pmt() {
8323 var event = {
8324 type: 'metadata',
8325 tracks: []
8326 };
8327 programMapTable = data.programMapTable; // translate audio and video streams to tracks
8328
8329 if (programMapTable.video !== null) {
8330 event.tracks.push({
8331 timelineStartInfo: {
8332 baseMediaDecodeTime: 0
8333 },
8334 id: +programMapTable.video,
8335 codec: 'avc',
8336 type: 'video'
8337 });
8338 }
8339
8340 if (programMapTable.audio !== null) {
8341 event.tracks.push({
8342 timelineStartInfo: {
8343 baseMediaDecodeTime: 0
8344 },
8345 id: +programMapTable.audio,
8346 codec: 'adts',
8347 type: 'audio'
8348 });
8349 }
8350
8351 segmentHadPmt = true;
8352 self.trigger('data', event);
8353 }
8354 })[data.type]();
8355 };
8356
8357 this.reset = function () {
8358 video.size = 0;
8359 video.data.length = 0;
8360 audio.size = 0;
8361 audio.data.length = 0;
8362 this.trigger('reset');
8363 };
8364 /**
8365 * Flush any remaining input. Video PES packets may be of variable
8366 * length. Normally, the start of a new video packet can trigger the
8367 * finalization of the previous packet. That is not possible if no
8368 * more video is forthcoming, however. In that case, some other
8369 * mechanism (like the end of the file) has to be employed. When it is
8370 * clear that no additional data is forthcoming, calling this method
8371 * will flush the buffered packets.
8372 */
8373
8374
8375 this.flushStreams_ = function () {
8376 // !!THIS ORDER IS IMPORTANT!!
8377 // video first then audio
8378 flushStream(video, 'video');
8379 flushStream(audio, 'audio');
8380 flushStream(timedMetadata, 'timed-metadata');
8381 };
8382
8383 this.flush = function () {
8384 // if on flush we haven't had a pmt emitted
8385 // and we have a pmt to emit. emit the pmt
8386 // so that we trigger a trackinfo downstream.
8387 if (!segmentHadPmt && programMapTable) {
8388 var pmt = {
8389 type: 'metadata',
8390 tracks: []
8391 }; // translate audio and video streams to tracks
8392
8393 if (programMapTable.video !== null) {
8394 pmt.tracks.push({
8395 timelineStartInfo: {
8396 baseMediaDecodeTime: 0
8397 },
8398 id: +programMapTable.video,
8399 codec: 'avc',
8400 type: 'video'
8401 });
8402 }
8403
8404 if (programMapTable.audio !== null) {
8405 pmt.tracks.push({
8406 timelineStartInfo: {
8407 baseMediaDecodeTime: 0
8408 },
8409 id: +programMapTable.audio,
8410 codec: 'adts',
8411 type: 'audio'
8412 });
8413 }
8414
8415 self.trigger('data', pmt);
8416 }
8417
8418 segmentHadPmt = false;
8419 this.flushStreams_();
8420 this.trigger('done');
8421 };
8422 };
8423
8424 _ElementaryStream.prototype = new stream();
8425 var m2ts = {
8426 PAT_PID: 0x0000,
8427 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
8428 TransportPacketStream: _TransportPacketStream,
8429 TransportParseStream: _TransportParseStream,
8430 ElementaryStream: _ElementaryStream,
8431 TimestampRolloverStream: TimestampRolloverStream,
8432 CaptionStream: captionStream.CaptionStream,
8433 Cea608Stream: captionStream.Cea608Stream,
8434 Cea708Stream: captionStream.Cea708Stream,
8435 MetadataStream: metadataStream
8436 };
8437
8438 for (var type in streamTypes) {
8439 if (streamTypes.hasOwnProperty(type)) {
8440 m2ts[type] = streamTypes[type];
8441 }
8442 }
8443
8444 var m2ts_1 = m2ts;
8445 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
8446
8447 var _AdtsStream;
8448
8449 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8450 /*
8451 * Accepts a ElementaryStream and emits data events with parsed
8452 * AAC Audio Frames of the individual packets. Input audio in ADTS
8453 * format is unpacked and re-emitted as AAC frames.
8454 *
8455 * @see http://wiki.multimedia.cx/index.php?title=ADTS
8456 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
8457 */
8458
8459 _AdtsStream = function AdtsStream(handlePartialSegments) {
8460 var buffer,
8461 frameNum = 0;
8462
8463 _AdtsStream.prototype.init.call(this);
8464
8465 this.skipWarn_ = function (start, end) {
8466 this.trigger('log', {
8467 level: 'warn',
8468 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
8469 });
8470 };
8471
8472 this.push = function (packet) {
8473 var i = 0,
8474 frameLength,
8475 protectionSkipBytes,
8476 oldBuffer,
8477 sampleCount,
8478 adtsFrameDuration;
8479
8480 if (!handlePartialSegments) {
8481 frameNum = 0;
8482 }
8483
8484 if (packet.type !== 'audio') {
8485 // ignore non-audio data
8486 return;
8487 } // Prepend any data in the buffer to the input data so that we can parse
8488 // aac frames the cross a PES packet boundary
8489
8490
8491 if (buffer && buffer.length) {
8492 oldBuffer = buffer;
8493 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
8494 buffer.set(oldBuffer);
8495 buffer.set(packet.data, oldBuffer.byteLength);
8496 } else {
8497 buffer = packet.data;
8498 } // unpack any ADTS frames which have been fully received
8499 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
8500
8501
8502 var skip; // We use i + 7 here because we want to be able to parse the entire header.
8503 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
8504
8505 while (i + 7 < buffer.length) {
8506 // Look for the start of an ADTS header..
8507 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
8508 if (typeof skip !== 'number') {
8509 skip = i;
8510 } // If a valid header was not found, jump one forward and attempt to
8511 // find a valid ADTS header starting at the next byte
8512
8513
8514 i++;
8515 continue;
8516 }
8517
8518 if (typeof skip === 'number') {
8519 this.skipWarn_(skip, i);
8520 skip = null;
8521 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
8522 // end of the ADTS header
8523
8524
8525 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
8526 // end of the sync sequence
8527 // NOTE: frame length includes the size of the header
8528
8529 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
8530 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
8531 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
8532 // then we have to wait for more data
8533
8534 if (buffer.byteLength - i < frameLength) {
8535 break;
8536 } // Otherwise, deliver the complete AAC frame
8537
8538
8539 this.trigger('data', {
8540 pts: packet.pts + frameNum * adtsFrameDuration,
8541 dts: packet.dts + frameNum * adtsFrameDuration,
8542 sampleCount: sampleCount,
8543 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
8544 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
8545 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
8546 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
8547 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
8548 samplesize: 16,
8549 // data is the frame without it's header
8550 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
8551 });
8552 frameNum++;
8553 i += frameLength;
8554 }
8555
8556 if (typeof skip === 'number') {
8557 this.skipWarn_(skip, i);
8558 skip = null;
8559 } // remove processed bytes from the buffer.
8560
8561
8562 buffer = buffer.subarray(i);
8563 };
8564
8565 this.flush = function () {
8566 frameNum = 0;
8567 this.trigger('done');
8568 };
8569
8570 this.reset = function () {
8571 buffer = void 0;
8572 this.trigger('reset');
8573 };
8574
8575 this.endTimeline = function () {
8576 buffer = void 0;
8577 this.trigger('endedtimeline');
8578 };
8579 };
8580
8581 _AdtsStream.prototype = new stream();
8582 var adts = _AdtsStream;
8583 /**
8584 * mux.js
8585 *
8586 * Copyright (c) Brightcove
8587 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8588 */
8589
8590 var ExpGolomb;
8591 /**
8592 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
8593 * scheme used by h264.
8594 */
8595
8596 ExpGolomb = function ExpGolomb(workingData) {
8597 var // the number of bytes left to examine in workingData
8598 workingBytesAvailable = workingData.byteLength,
8599 // the current word being examined
8600 workingWord = 0,
8601 // :uint
8602 // the number of bits left to examine in the current word
8603 workingBitsAvailable = 0; // :uint;
8604 // ():uint
8605
8606 this.length = function () {
8607 return 8 * workingBytesAvailable;
8608 }; // ():uint
8609
8610
8611 this.bitsAvailable = function () {
8612 return 8 * workingBytesAvailable + workingBitsAvailable;
8613 }; // ():void
8614
8615
8616 this.loadWord = function () {
8617 var position = workingData.byteLength - workingBytesAvailable,
8618 workingBytes = new Uint8Array(4),
8619 availableBytes = Math.min(4, workingBytesAvailable);
8620
8621 if (availableBytes === 0) {
8622 throw new Error('no bytes available');
8623 }
8624
8625 workingBytes.set(workingData.subarray(position, position + availableBytes));
8626 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
8627
8628 workingBitsAvailable = availableBytes * 8;
8629 workingBytesAvailable -= availableBytes;
8630 }; // (count:int):void
8631
8632
8633 this.skipBits = function (count) {
8634 var skipBytes; // :int
8635
8636 if (workingBitsAvailable > count) {
8637 workingWord <<= count;
8638 workingBitsAvailable -= count;
8639 } else {
8640 count -= workingBitsAvailable;
8641 skipBytes = Math.floor(count / 8);
8642 count -= skipBytes * 8;
8643 workingBytesAvailable -= skipBytes;
8644 this.loadWord();
8645 workingWord <<= count;
8646 workingBitsAvailable -= count;
8647 }
8648 }; // (size:int):uint
8649
8650
8651 this.readBits = function (size) {
8652 var bits = Math.min(workingBitsAvailable, size),
8653 // :uint
8654 valu = workingWord >>> 32 - bits; // :uint
8655 // if size > 31, handle error
8656
8657 workingBitsAvailable -= bits;
8658
8659 if (workingBitsAvailable > 0) {
8660 workingWord <<= bits;
8661 } else if (workingBytesAvailable > 0) {
8662 this.loadWord();
8663 }
8664
8665 bits = size - bits;
8666
8667 if (bits > 0) {
8668 return valu << bits | this.readBits(bits);
8669 }
8670
8671 return valu;
8672 }; // ():uint
8673
8674
8675 this.skipLeadingZeros = function () {
8676 var leadingZeroCount; // :uint
8677
8678 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8679 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8680 // the first bit of working word is 1
8681 workingWord <<= leadingZeroCount;
8682 workingBitsAvailable -= leadingZeroCount;
8683 return leadingZeroCount;
8684 }
8685 } // we exhausted workingWord and still have not found a 1
8686
8687
8688 this.loadWord();
8689 return leadingZeroCount + this.skipLeadingZeros();
8690 }; // ():void
8691
8692
8693 this.skipUnsignedExpGolomb = function () {
8694 this.skipBits(1 + this.skipLeadingZeros());
8695 }; // ():void
8696
8697
8698 this.skipExpGolomb = function () {
8699 this.skipBits(1 + this.skipLeadingZeros());
8700 }; // ():uint
8701
8702
8703 this.readUnsignedExpGolomb = function () {
8704 var clz = this.skipLeadingZeros(); // :uint
8705
8706 return this.readBits(clz + 1) - 1;
8707 }; // ():int
8708
8709
8710 this.readExpGolomb = function () {
8711 var valu = this.readUnsignedExpGolomb(); // :int
8712
8713 if (0x01 & valu) {
8714 // the number is odd if the low order bit is set
8715 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8716 }
8717
8718 return -1 * (valu >>> 1); // divide by two then make it negative
8719 }; // Some convenience functions
8720 // :Boolean
8721
8722
8723 this.readBoolean = function () {
8724 return this.readBits(1) === 1;
8725 }; // ():int
8726
8727
8728 this.readUnsignedByte = function () {
8729 return this.readBits(8);
8730 };
8731
8732 this.loadWord();
8733 };
8734
8735 var expGolomb = ExpGolomb;
8736
8737 var _H264Stream, _NalByteStream;
8738
8739 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8740 /**
8741 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8742 */
8743
8744 _NalByteStream = function NalByteStream() {
8745 var syncPoint = 0,
8746 i,
8747 buffer;
8748
8749 _NalByteStream.prototype.init.call(this);
8750 /*
8751 * Scans a byte stream and triggers a data event with the NAL units found.
8752 * @param {Object} data Event received from H264Stream
8753 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8754 *
8755 * @see H264Stream.push
8756 */
8757
8758
8759 this.push = function (data) {
8760 var swapBuffer;
8761
8762 if (!buffer) {
8763 buffer = data.data;
8764 } else {
8765 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8766 swapBuffer.set(buffer);
8767 swapBuffer.set(data.data, buffer.byteLength);
8768 buffer = swapBuffer;
8769 }
8770
8771 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
8772 // scan for NAL unit boundaries
8773 // a match looks like this:
8774 // 0 0 1 .. NAL .. 0 0 1
8775 // ^ sync point ^ i
8776 // or this:
8777 // 0 0 1 .. NAL .. 0 0 0
8778 // ^ sync point ^ i
8779 // advance the sync point to a NAL start, if necessary
8780
8781 for (; syncPoint < len - 3; syncPoint++) {
8782 if (buffer[syncPoint + 2] === 1) {
8783 // the sync point is properly aligned
8784 i = syncPoint + 5;
8785 break;
8786 }
8787 }
8788
8789 while (i < len) {
8790 // look at the current byte to determine if we've hit the end of
8791 // a NAL unit boundary
8792 switch (buffer[i]) {
8793 case 0:
8794 // skip past non-sync sequences
8795 if (buffer[i - 1] !== 0) {
8796 i += 2;
8797 break;
8798 } else if (buffer[i - 2] !== 0) {
8799 i++;
8800 break;
8801 } // deliver the NAL unit if it isn't empty
8802
8803
8804 if (syncPoint + 3 !== i - 2) {
8805 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8806 } // drop trailing zeroes
8807
8808
8809 do {
8810 i++;
8811 } while (buffer[i] !== 1 && i < len);
8812
8813 syncPoint = i - 2;
8814 i += 3;
8815 break;
8816
8817 case 1:
8818 // skip past non-sync sequences
8819 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8820 i += 3;
8821 break;
8822 } // deliver the NAL unit
8823
8824
8825 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8826 syncPoint = i - 2;
8827 i += 3;
8828 break;
8829
8830 default:
8831 // the current byte isn't a one or zero, so it cannot be part
8832 // of a sync sequence
8833 i += 3;
8834 break;
8835 }
8836 } // filter out the NAL units that were delivered
8837
8838
8839 buffer = buffer.subarray(syncPoint);
8840 i -= syncPoint;
8841 syncPoint = 0;
8842 };
8843
8844 this.reset = function () {
8845 buffer = null;
8846 syncPoint = 0;
8847 this.trigger('reset');
8848 };
8849
8850 this.flush = function () {
8851 // deliver the last buffered NAL unit
8852 if (buffer && buffer.byteLength > 3) {
8853 this.trigger('data', buffer.subarray(syncPoint + 3));
8854 } // reset the stream state
8855
8856
8857 buffer = null;
8858 syncPoint = 0;
8859 this.trigger('done');
8860 };
8861
8862 this.endTimeline = function () {
8863 this.flush();
8864 this.trigger('endedtimeline');
8865 };
8866 };
8867
8868 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
8869 // see Recommendation ITU-T H.264 (4/2013),
8870 // 7.3.2.1.1 Sequence parameter set data syntax
8871
8872 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8873 100: true,
8874 110: true,
8875 122: true,
8876 244: true,
8877 44: true,
8878 83: true,
8879 86: true,
8880 118: true,
8881 128: true,
8882 // TODO: the three profiles below don't
8883 // appear to have sps data in the specificiation anymore?
8884 138: true,
8885 139: true,
8886 134: true
8887 };
8888 /**
8889 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8890 * events.
8891 */
8892
8893 _H264Stream = function H264Stream() {
8894 var nalByteStream = new _NalByteStream(),
8895 self,
8896 trackId,
8897 currentPts,
8898 currentDts,
8899 discardEmulationPreventionBytes,
8900 readSequenceParameterSet,
8901 skipScalingList;
8902
8903 _H264Stream.prototype.init.call(this);
8904
8905 self = this;
8906 /*
8907 * Pushes a packet from a stream onto the NalByteStream
8908 *
8909 * @param {Object} packet - A packet received from a stream
8910 * @param {Uint8Array} packet.data - The raw bytes of the packet
8911 * @param {Number} packet.dts - Decode timestamp of the packet
8912 * @param {Number} packet.pts - Presentation timestamp of the packet
8913 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8914 * @param {('video'|'audio')} packet.type - The type of packet
8915 *
8916 */
8917
8918 this.push = function (packet) {
8919 if (packet.type !== 'video') {
8920 return;
8921 }
8922
8923 trackId = packet.trackId;
8924 currentPts = packet.pts;
8925 currentDts = packet.dts;
8926 nalByteStream.push(packet);
8927 };
8928 /*
8929 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8930 * for the NALUs to the next stream component.
8931 * Also, preprocess caption and sequence parameter NALUs.
8932 *
8933 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8934 * @see NalByteStream.push
8935 */
8936
8937
8938 nalByteStream.on('data', function (data) {
8939 var event = {
8940 trackId: trackId,
8941 pts: currentPts,
8942 dts: currentDts,
8943 data: data,
8944 nalUnitTypeCode: data[0] & 0x1f
8945 };
8946
8947 switch (event.nalUnitTypeCode) {
8948 case 0x05:
8949 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8950 break;
8951
8952 case 0x06:
8953 event.nalUnitType = 'sei_rbsp';
8954 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8955 break;
8956
8957 case 0x07:
8958 event.nalUnitType = 'seq_parameter_set_rbsp';
8959 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8960 event.config = readSequenceParameterSet(event.escapedRBSP);
8961 break;
8962
8963 case 0x08:
8964 event.nalUnitType = 'pic_parameter_set_rbsp';
8965 break;
8966
8967 case 0x09:
8968 event.nalUnitType = 'access_unit_delimiter_rbsp';
8969 break;
8970 } // This triggers data on the H264Stream
8971
8972
8973 self.trigger('data', event);
8974 });
8975 nalByteStream.on('done', function () {
8976 self.trigger('done');
8977 });
8978 nalByteStream.on('partialdone', function () {
8979 self.trigger('partialdone');
8980 });
8981 nalByteStream.on('reset', function () {
8982 self.trigger('reset');
8983 });
8984 nalByteStream.on('endedtimeline', function () {
8985 self.trigger('endedtimeline');
8986 });
8987
8988 this.flush = function () {
8989 nalByteStream.flush();
8990 };
8991
8992 this.partialFlush = function () {
8993 nalByteStream.partialFlush();
8994 };
8995
8996 this.reset = function () {
8997 nalByteStream.reset();
8998 };
8999
9000 this.endTimeline = function () {
9001 nalByteStream.endTimeline();
9002 };
9003 /**
9004 * Advance the ExpGolomb decoder past a scaling list. The scaling
9005 * list is optionally transmitted as part of a sequence parameter
9006 * set and is not relevant to transmuxing.
9007 * @param count {number} the number of entries in this scaling list
9008 * @param expGolombDecoder {object} an ExpGolomb pointed to the
9009 * start of a scaling list
9010 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
9011 */
9012
9013
9014 skipScalingList = function skipScalingList(count, expGolombDecoder) {
9015 var lastScale = 8,
9016 nextScale = 8,
9017 j,
9018 deltaScale;
9019
9020 for (j = 0; j < count; j++) {
9021 if (nextScale !== 0) {
9022 deltaScale = expGolombDecoder.readExpGolomb();
9023 nextScale = (lastScale + deltaScale + 256) % 256;
9024 }
9025
9026 lastScale = nextScale === 0 ? lastScale : nextScale;
9027 }
9028 };
9029 /**
9030 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
9031 * Sequence Payload"
9032 * @param data {Uint8Array} the bytes of a RBSP from a NAL
9033 * unit
9034 * @return {Uint8Array} the RBSP without any Emulation
9035 * Prevention Bytes
9036 */
9037
9038
9039 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
9040 var length = data.byteLength,
9041 emulationPreventionBytesPositions = [],
9042 i = 1,
9043 newLength,
9044 newData; // Find all `Emulation Prevention Bytes`
9045
9046 while (i < length - 2) {
9047 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
9048 emulationPreventionBytesPositions.push(i + 2);
9049 i += 2;
9050 } else {
9051 i++;
9052 }
9053 } // If no Emulation Prevention Bytes were found just return the original
9054 // array
9055
9056
9057 if (emulationPreventionBytesPositions.length === 0) {
9058 return data;
9059 } // Create a new array to hold the NAL unit data
9060
9061
9062 newLength = length - emulationPreventionBytesPositions.length;
9063 newData = new Uint8Array(newLength);
9064 var sourceIndex = 0;
9065
9066 for (i = 0; i < newLength; sourceIndex++, i++) {
9067 if (sourceIndex === emulationPreventionBytesPositions[0]) {
9068 // Skip this byte
9069 sourceIndex++; // Remove this position index
9070
9071 emulationPreventionBytesPositions.shift();
9072 }
9073
9074 newData[i] = data[sourceIndex];
9075 }
9076
9077 return newData;
9078 };
9079 /**
9080 * Read a sequence parameter set and return some interesting video
9081 * properties. A sequence parameter set is the H264 metadata that
9082 * describes the properties of upcoming video frames.
9083 * @param data {Uint8Array} the bytes of a sequence parameter set
9084 * @return {object} an object with configuration parsed from the
9085 * sequence parameter set, including the dimensions of the
9086 * associated video frames.
9087 */
9088
9089
9090 readSequenceParameterSet = function readSequenceParameterSet(data) {
9091 var frameCropLeftOffset = 0,
9092 frameCropRightOffset = 0,
9093 frameCropTopOffset = 0,
9094 frameCropBottomOffset = 0,
9095 expGolombDecoder,
9096 profileIdc,
9097 levelIdc,
9098 profileCompatibility,
9099 chromaFormatIdc,
9100 picOrderCntType,
9101 numRefFramesInPicOrderCntCycle,
9102 picWidthInMbsMinus1,
9103 picHeightInMapUnitsMinus1,
9104 frameMbsOnlyFlag,
9105 scalingListCount,
9106 sarRatio = [1, 1],
9107 aspectRatioIdc,
9108 i;
9109 expGolombDecoder = new expGolomb(data);
9110 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
9111
9112 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
9113
9114 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
9115
9116 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
9117 // some profiles have more optional data we don't need
9118
9119 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
9120 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
9121
9122 if (chromaFormatIdc === 3) {
9123 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
9124 }
9125
9126 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
9127
9128 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
9129
9130 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
9131
9132 if (expGolombDecoder.readBoolean()) {
9133 // seq_scaling_matrix_present_flag
9134 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
9135
9136 for (i = 0; i < scalingListCount; i++) {
9137 if (expGolombDecoder.readBoolean()) {
9138 // seq_scaling_list_present_flag[ i ]
9139 if (i < 6) {
9140 skipScalingList(16, expGolombDecoder);
9141 } else {
9142 skipScalingList(64, expGolombDecoder);
9143 }
9144 }
9145 }
9146 }
9147 }
9148
9149 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
9150
9151 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
9152
9153 if (picOrderCntType === 0) {
9154 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
9155 } else if (picOrderCntType === 1) {
9156 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
9157
9158 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
9159
9160 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
9161
9162 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
9163
9164 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
9165 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
9166 }
9167 }
9168
9169 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
9170
9171 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
9172
9173 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9174 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9175 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
9176
9177 if (frameMbsOnlyFlag === 0) {
9178 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
9179 }
9180
9181 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
9182
9183 if (expGolombDecoder.readBoolean()) {
9184 // frame_cropping_flag
9185 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
9186 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
9187 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
9188 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
9189 }
9190
9191 if (expGolombDecoder.readBoolean()) {
9192 // vui_parameters_present_flag
9193 if (expGolombDecoder.readBoolean()) {
9194 // aspect_ratio_info_present_flag
9195 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
9196
9197 switch (aspectRatioIdc) {
9198 case 1:
9199 sarRatio = [1, 1];
9200 break;
9201
9202 case 2:
9203 sarRatio = [12, 11];
9204 break;
9205
9206 case 3:
9207 sarRatio = [10, 11];
9208 break;
9209
9210 case 4:
9211 sarRatio = [16, 11];
9212 break;
9213
9214 case 5:
9215 sarRatio = [40, 33];
9216 break;
9217
9218 case 6:
9219 sarRatio = [24, 11];
9220 break;
9221
9222 case 7:
9223 sarRatio = [20, 11];
9224 break;
9225
9226 case 8:
9227 sarRatio = [32, 11];
9228 break;
9229
9230 case 9:
9231 sarRatio = [80, 33];
9232 break;
9233
9234 case 10:
9235 sarRatio = [18, 11];
9236 break;
9237
9238 case 11:
9239 sarRatio = [15, 11];
9240 break;
9241
9242 case 12:
9243 sarRatio = [64, 33];
9244 break;
9245
9246 case 13:
9247 sarRatio = [160, 99];
9248 break;
9249
9250 case 14:
9251 sarRatio = [4, 3];
9252 break;
9253
9254 case 15:
9255 sarRatio = [3, 2];
9256 break;
9257
9258 case 16:
9259 sarRatio = [2, 1];
9260 break;
9261
9262 case 255:
9263 {
9264 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
9265 break;
9266 }
9267 }
9268
9269 if (sarRatio) {
9270 sarRatio[0] / sarRatio[1];
9271 }
9272 }
9273 }
9274
9275 return {
9276 profileIdc: profileIdc,
9277 levelIdc: levelIdc,
9278 profileCompatibility: profileCompatibility,
9279 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
9280 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
9281 // sar is sample aspect ratio
9282 sarRatio: sarRatio
9283 };
9284 };
9285 };
9286
9287 _H264Stream.prototype = new stream();
9288 var h264 = {
9289 H264Stream: _H264Stream,
9290 NalByteStream: _NalByteStream
9291 };
9292 /**
9293 * mux.js
9294 *
9295 * Copyright (c) Brightcove
9296 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9297 *
9298 * Utilities to detect basic properties and metadata about Aac data.
9299 */
9300
9301 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
9302
9303 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
9304 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
9305 flags = header[byteIndex + 5],
9306 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
9307
9308 returnSize = returnSize >= 0 ? returnSize : 0;
9309
9310 if (footerPresent) {
9311 return returnSize + 20;
9312 }
9313
9314 return returnSize + 10;
9315 };
9316
9317 var getId3Offset = function getId3Offset(data, offset) {
9318 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
9319 return offset;
9320 }
9321
9322 offset += parseId3TagSize(data, offset);
9323 return getId3Offset(data, offset);
9324 }; // TODO: use vhs-utils
9325
9326
9327 var isLikelyAacData$1 = function isLikelyAacData(data) {
9328 var offset = getId3Offset(data, 0);
9329 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
9330 // is not mp3 data but aac data.
9331 (data[offset + 1] & 0x16) === 0x10;
9332 };
9333
9334 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
9335 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
9336 }; // return a percent-encoded representation of the specified byte range
9337 // @see http://en.wikipedia.org/wiki/Percent-encoding
9338
9339
9340 var percentEncode = function percentEncode(bytes, start, end) {
9341 var i,
9342 result = '';
9343
9344 for (i = start; i < end; i++) {
9345 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
9346 }
9347
9348 return result;
9349 }; // return the string representation of the specified byte range,
9350 // interpreted as ISO-8859-1.
9351
9352
9353 var parseIso88591 = function parseIso88591(bytes, start, end) {
9354 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
9355 };
9356
9357 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
9358 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9359 middle = header[byteIndex + 4] << 3,
9360 highTwo = header[byteIndex + 3] & 0x3 << 11;
9361 return highTwo | middle | lowThree;
9362 };
9363
9364 var parseType$2 = function parseType(header, byteIndex) {
9365 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
9366 return 'timed-metadata';
9367 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
9368 return 'audio';
9369 }
9370
9371 return null;
9372 };
9373
9374 var parseSampleRate = function parseSampleRate(packet) {
9375 var i = 0;
9376
9377 while (i + 5 < packet.length) {
9378 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
9379 // If a valid header was not found, jump one forward and attempt to
9380 // find a valid ADTS header starting at the next byte
9381 i++;
9382 continue;
9383 }
9384
9385 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
9386 }
9387
9388 return null;
9389 };
9390
9391 var parseAacTimestamp = function parseAacTimestamp(packet) {
9392 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
9393
9394 frameStart = 10;
9395
9396 if (packet[5] & 0x40) {
9397 // advance the frame start past the extended header
9398 frameStart += 4; // header size field
9399
9400 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
9401 } // parse one or more ID3 frames
9402 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
9403
9404
9405 do {
9406 // determine the number of bytes in this frame
9407 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
9408
9409 if (frameSize < 1) {
9410 return null;
9411 }
9412
9413 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
9414
9415 if (frameHeader === 'PRIV') {
9416 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
9417
9418 for (var i = 0; i < frame.byteLength; i++) {
9419 if (frame[i] === 0) {
9420 var owner = parseIso88591(frame, 0, i);
9421
9422 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
9423 var d = frame.subarray(i + 1);
9424 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
9425 size *= 4;
9426 size += d[7] & 0x03;
9427 return size;
9428 }
9429
9430 break;
9431 }
9432 }
9433 }
9434
9435 frameStart += 10; // advance past the frame header
9436
9437 frameStart += frameSize; // advance past the frame body
9438 } while (frameStart < packet.byteLength);
9439
9440 return null;
9441 };
9442
9443 var utils = {
9444 isLikelyAacData: isLikelyAacData$1,
9445 parseId3TagSize: parseId3TagSize,
9446 parseAdtsSize: parseAdtsSize,
9447 parseType: parseType$2,
9448 parseSampleRate: parseSampleRate,
9449 parseAacTimestamp: parseAacTimestamp
9450 };
9451
9452 var _AacStream;
9453 /**
9454 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
9455 */
9456
9457
9458 _AacStream = function AacStream() {
9459 var everything = new Uint8Array(),
9460 timeStamp = 0;
9461
9462 _AacStream.prototype.init.call(this);
9463
9464 this.setTimestamp = function (timestamp) {
9465 timeStamp = timestamp;
9466 };
9467
9468 this.push = function (bytes) {
9469 var frameSize = 0,
9470 byteIndex = 0,
9471 bytesLeft,
9472 chunk,
9473 packet,
9474 tempLength; // If there are bytes remaining from the last segment, prepend them to the
9475 // bytes that were pushed in
9476
9477 if (everything.length) {
9478 tempLength = everything.length;
9479 everything = new Uint8Array(bytes.byteLength + tempLength);
9480 everything.set(everything.subarray(0, tempLength));
9481 everything.set(bytes, tempLength);
9482 } else {
9483 everything = bytes;
9484 }
9485
9486 while (everything.length - byteIndex >= 3) {
9487 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
9488 // Exit early because we don't have enough to parse
9489 // the ID3 tag header
9490 if (everything.length - byteIndex < 10) {
9491 break;
9492 } // check framesize
9493
9494
9495 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9496 // to emit a full packet
9497 // Add to byteIndex to support multiple ID3 tags in sequence
9498
9499 if (byteIndex + frameSize > everything.length) {
9500 break;
9501 }
9502
9503 chunk = {
9504 type: 'timed-metadata',
9505 data: everything.subarray(byteIndex, byteIndex + frameSize)
9506 };
9507 this.trigger('data', chunk);
9508 byteIndex += frameSize;
9509 continue;
9510 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
9511 // Exit early because we don't have enough to parse
9512 // the ADTS frame header
9513 if (everything.length - byteIndex < 7) {
9514 break;
9515 }
9516
9517 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9518 // to emit a full packet
9519
9520 if (byteIndex + frameSize > everything.length) {
9521 break;
9522 }
9523
9524 packet = {
9525 type: 'audio',
9526 data: everything.subarray(byteIndex, byteIndex + frameSize),
9527 pts: timeStamp,
9528 dts: timeStamp
9529 };
9530 this.trigger('data', packet);
9531 byteIndex += frameSize;
9532 continue;
9533 }
9534
9535 byteIndex++;
9536 }
9537
9538 bytesLeft = everything.length - byteIndex;
9539
9540 if (bytesLeft > 0) {
9541 everything = everything.subarray(byteIndex);
9542 } else {
9543 everything = new Uint8Array();
9544 }
9545 };
9546
9547 this.reset = function () {
9548 everything = new Uint8Array();
9549 this.trigger('reset');
9550 };
9551
9552 this.endTimeline = function () {
9553 everything = new Uint8Array();
9554 this.trigger('endedtimeline');
9555 };
9556 };
9557
9558 _AacStream.prototype = new stream();
9559 var aac = _AacStream; // constants
9560
9561 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
9562 var audioProperties = AUDIO_PROPERTIES;
9563 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
9564 var videoProperties = VIDEO_PROPERTIES;
9565 var H264Stream = h264.H264Stream;
9566 var isLikelyAacData = utils.isLikelyAacData;
9567 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
9568
9569 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
9570
9571 var retriggerForStream = function retriggerForStream(key, event) {
9572 event.stream = key;
9573 this.trigger('log', event);
9574 };
9575
9576 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
9577 var keys = Object.keys(pipeline);
9578
9579 for (var i = 0; i < keys.length; i++) {
9580 var key = keys[i]; // skip non-stream keys and headOfPipeline
9581 // which is just a duplicate
9582
9583 if (key === 'headOfPipeline' || !pipeline[key].on) {
9584 continue;
9585 }
9586
9587 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
9588 }
9589 };
9590 /**
9591 * Compare two arrays (even typed) for same-ness
9592 */
9593
9594
9595 var arrayEquals = function arrayEquals(a, b) {
9596 var i;
9597
9598 if (a.length !== b.length) {
9599 return false;
9600 } // compare the value of each element in the array
9601
9602
9603 for (i = 0; i < a.length; i++) {
9604 if (a[i] !== b[i]) {
9605 return false;
9606 }
9607 }
9608
9609 return true;
9610 };
9611
9612 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
9613 var ptsOffsetFromDts = startPts - startDts,
9614 decodeDuration = endDts - startDts,
9615 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
9616 // however, the player time values will reflect a start from the baseMediaDecodeTime.
9617 // In order to provide relevant values for the player times, base timing info on the
9618 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
9619
9620 return {
9621 start: {
9622 dts: baseMediaDecodeTime,
9623 pts: baseMediaDecodeTime + ptsOffsetFromDts
9624 },
9625 end: {
9626 dts: baseMediaDecodeTime + decodeDuration,
9627 pts: baseMediaDecodeTime + presentationDuration
9628 },
9629 prependedContentDuration: prependedContentDuration,
9630 baseMediaDecodeTime: baseMediaDecodeTime
9631 };
9632 };
9633 /**
9634 * Constructs a single-track, ISO BMFF media segment from AAC data
9635 * events. The output of this stream can be fed to a SourceBuffer
9636 * configured with a suitable initialization segment.
9637 * @param track {object} track metadata configuration
9638 * @param options {object} transmuxer options object
9639 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9640 * in the source; false to adjust the first segment to start at 0.
9641 */
9642
9643
9644 _AudioSegmentStream = function AudioSegmentStream(track, options) {
9645 var adtsFrames = [],
9646 sequenceNumber,
9647 earliestAllowedDts = 0,
9648 audioAppendStartTs = 0,
9649 videoBaseMediaDecodeTime = Infinity;
9650 options = options || {};
9651 sequenceNumber = options.firstSequenceNumber || 0;
9652
9653 _AudioSegmentStream.prototype.init.call(this);
9654
9655 this.push = function (data) {
9656 trackDecodeInfo.collectDtsInfo(track, data);
9657
9658 if (track) {
9659 audioProperties.forEach(function (prop) {
9660 track[prop] = data[prop];
9661 });
9662 } // buffer audio data until end() is called
9663
9664
9665 adtsFrames.push(data);
9666 };
9667
9668 this.setEarliestDts = function (earliestDts) {
9669 earliestAllowedDts = earliestDts;
9670 };
9671
9672 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9673 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9674 };
9675
9676 this.setAudioAppendStart = function (timestamp) {
9677 audioAppendStartTs = timestamp;
9678 };
9679
9680 this.flush = function () {
9681 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
9682
9683 if (adtsFrames.length === 0) {
9684 this.trigger('done', 'AudioSegmentStream');
9685 return;
9686 }
9687
9688 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9689 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
9690
9691 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9692 // samples (that is, adts frames) in the audio data
9693
9694 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9695
9696 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9697 adtsFrames = [];
9698 moof = mp4Generator.moof(sequenceNumber, [track]);
9699 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
9700
9701 sequenceNumber++;
9702 boxes.set(moof);
9703 boxes.set(mdat, moof.byteLength);
9704 trackDecodeInfo.clearDtsInfo(track);
9705 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
9706 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9707 // valid use-case where an init segment/data should be triggered without associated
9708 // frames. Leaving for now, but should be looked into.
9709
9710 if (frames.length) {
9711 segmentDuration = frames.length * frameDuration;
9712 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
9713 // frame info is in video clock cycles. Convert to match expectation of
9714 // listeners (that all timestamps will be based on video clock cycles).
9715 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
9716 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
9717 this.trigger('timingInfo', {
9718 start: frames[0].pts,
9719 end: frames[0].pts + segmentDuration
9720 });
9721 }
9722
9723 this.trigger('data', {
9724 track: track,
9725 boxes: boxes
9726 });
9727 this.trigger('done', 'AudioSegmentStream');
9728 };
9729
9730 this.reset = function () {
9731 trackDecodeInfo.clearDtsInfo(track);
9732 adtsFrames = [];
9733 this.trigger('reset');
9734 };
9735 };
9736
9737 _AudioSegmentStream.prototype = new stream();
9738 /**
9739 * Constructs a single-track, ISO BMFF media segment from H264 data
9740 * events. The output of this stream can be fed to a SourceBuffer
9741 * configured with a suitable initialization segment.
9742 * @param track {object} track metadata configuration
9743 * @param options {object} transmuxer options object
9744 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9745 * gopsToAlignWith list when attempting to align gop pts
9746 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9747 * in the source; false to adjust the first segment to start at 0.
9748 */
9749
9750 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9751 var sequenceNumber,
9752 nalUnits = [],
9753 gopsToAlignWith = [],
9754 config,
9755 pps;
9756 options = options || {};
9757 sequenceNumber = options.firstSequenceNumber || 0;
9758
9759 _VideoSegmentStream.prototype.init.call(this);
9760
9761 delete track.minPTS;
9762 this.gopCache_ = [];
9763 /**
9764 * Constructs a ISO BMFF segment given H264 nalUnits
9765 * @param {Object} nalUnit A data event representing a nalUnit
9766 * @param {String} nalUnit.nalUnitType
9767 * @param {Object} nalUnit.config Properties for a mp4 track
9768 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9769 * @see lib/codecs/h264.js
9770 **/
9771
9772 this.push = function (nalUnit) {
9773 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
9774
9775 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9776 config = nalUnit.config;
9777 track.sps = [nalUnit.data];
9778 videoProperties.forEach(function (prop) {
9779 track[prop] = config[prop];
9780 }, this);
9781 }
9782
9783 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9784 pps = nalUnit.data;
9785 track.pps = [nalUnit.data];
9786 } // buffer video until flush() is called
9787
9788
9789 nalUnits.push(nalUnit);
9790 };
9791 /**
9792 * Pass constructed ISO BMFF track and boxes on to the
9793 * next stream in the pipeline
9794 **/
9795
9796
9797 this.flush = function () {
9798 var frames,
9799 gopForFusion,
9800 gops,
9801 moof,
9802 mdat,
9803 boxes,
9804 prependedContentDuration = 0,
9805 firstGop,
9806 lastGop; // Throw away nalUnits at the start of the byte stream until
9807 // we find the first AUD
9808
9809 while (nalUnits.length) {
9810 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9811 break;
9812 }
9813
9814 nalUnits.shift();
9815 } // Return early if no video data has been observed
9816
9817
9818 if (nalUnits.length === 0) {
9819 this.resetStream_();
9820 this.trigger('done', 'VideoSegmentStream');
9821 return;
9822 } // Organize the raw nal-units into arrays that represent
9823 // higher-level constructs such as frames and gops
9824 // (group-of-pictures)
9825
9826
9827 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9828 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
9829 // a problem since MSE (on Chrome) requires a leading keyframe.
9830 //
9831 // We have two approaches to repairing this situation:
9832 // 1) GOP-FUSION:
9833 // This is where we keep track of the GOPS (group-of-pictures)
9834 // from previous fragments and attempt to find one that we can
9835 // prepend to the current fragment in order to create a valid
9836 // fragment.
9837 // 2) KEYFRAME-PULLING:
9838 // Here we search for the first keyframe in the fragment and
9839 // throw away all the frames between the start of the fragment
9840 // and that keyframe. We then extend the duration and pull the
9841 // PTS of the keyframe forward so that it covers the time range
9842 // of the frames that were disposed of.
9843 //
9844 // #1 is far prefereable over #2 which can cause "stuttering" but
9845 // requires more things to be just right.
9846
9847 if (!gops[0][0].keyFrame) {
9848 // Search for a gop for fusion from our gopCache
9849 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9850
9851 if (gopForFusion) {
9852 // in order to provide more accurate timing information about the segment, save
9853 // the number of seconds prepended to the original segment due to GOP fusion
9854 prependedContentDuration = gopForFusion.duration;
9855 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
9856 // new gop at the beginning
9857
9858 gops.byteLength += gopForFusion.byteLength;
9859 gops.nalCount += gopForFusion.nalCount;
9860 gops.pts = gopForFusion.pts;
9861 gops.dts = gopForFusion.dts;
9862 gops.duration += gopForFusion.duration;
9863 } else {
9864 // If we didn't find a candidate gop fall back to keyframe-pulling
9865 gops = frameUtils.extendFirstKeyFrame(gops);
9866 }
9867 } // Trim gops to align with gopsToAlignWith
9868
9869
9870 if (gopsToAlignWith.length) {
9871 var alignedGops;
9872
9873 if (options.alignGopsAtEnd) {
9874 alignedGops = this.alignGopsAtEnd_(gops);
9875 } else {
9876 alignedGops = this.alignGopsAtStart_(gops);
9877 }
9878
9879 if (!alignedGops) {
9880 // save all the nals in the last GOP into the gop cache
9881 this.gopCache_.unshift({
9882 gop: gops.pop(),
9883 pps: track.pps,
9884 sps: track.sps
9885 }); // Keep a maximum of 6 GOPs in the cache
9886
9887 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9888
9889 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
9890
9891 this.resetStream_();
9892 this.trigger('done', 'VideoSegmentStream');
9893 return;
9894 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9895 // when recalculated before sending off to CoalesceStream
9896
9897
9898 trackDecodeInfo.clearDtsInfo(track);
9899 gops = alignedGops;
9900 }
9901
9902 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
9903 // samples (that is, frames) in the video data
9904
9905 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
9906
9907 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9908 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9909 this.trigger('processedGopsInfo', gops.map(function (gop) {
9910 return {
9911 pts: gop.pts,
9912 dts: gop.dts,
9913 byteLength: gop.byteLength
9914 };
9915 }));
9916 firstGop = gops[0];
9917 lastGop = gops[gops.length - 1];
9918 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9919 this.trigger('timingInfo', {
9920 start: gops[0].pts,
9921 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9922 }); // save all the nals in the last GOP into the gop cache
9923
9924 this.gopCache_.unshift({
9925 gop: gops.pop(),
9926 pps: track.pps,
9927 sps: track.sps
9928 }); // Keep a maximum of 6 GOPs in the cache
9929
9930 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9931
9932 nalUnits = [];
9933 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9934 this.trigger('timelineStartInfo', track.timelineStartInfo);
9935 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9936 // throwing away hundreds of media segment fragments
9937
9938 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9939
9940 sequenceNumber++;
9941 boxes.set(moof);
9942 boxes.set(mdat, moof.byteLength);
9943 this.trigger('data', {
9944 track: track,
9945 boxes: boxes
9946 });
9947 this.resetStream_(); // Continue with the flush process now
9948
9949 this.trigger('done', 'VideoSegmentStream');
9950 };
9951
9952 this.reset = function () {
9953 this.resetStream_();
9954 nalUnits = [];
9955 this.gopCache_.length = 0;
9956 gopsToAlignWith.length = 0;
9957 this.trigger('reset');
9958 };
9959
9960 this.resetStream_ = function () {
9961 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9962 // for instance, when we are rendition switching
9963
9964 config = undefined;
9965 pps = undefined;
9966 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9967 // return it or return null if no good candidate was found
9968
9969
9970 this.getGopForFusion_ = function (nalUnit) {
9971 var halfSecond = 45000,
9972 // Half-a-second in a 90khz clock
9973 allowableOverlap = 10000,
9974 // About 3 frames @ 30fps
9975 nearestDistance = Infinity,
9976 dtsDistance,
9977 nearestGopObj,
9978 currentGop,
9979 currentGopObj,
9980 i; // Search for the GOP nearest to the beginning of this nal unit
9981
9982 for (i = 0; i < this.gopCache_.length; i++) {
9983 currentGopObj = this.gopCache_[i];
9984 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
9985
9986 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9987 continue;
9988 } // Reject Gops that would require a negative baseMediaDecodeTime
9989
9990
9991 if (currentGop.dts < track.timelineStartInfo.dts) {
9992 continue;
9993 } // The distance between the end of the gop and the start of the nalUnit
9994
9995
9996 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
9997 // a half-second of the nal unit
9998
9999 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
10000 // Always use the closest GOP we found if there is more than
10001 // one candidate
10002 if (!nearestGopObj || nearestDistance > dtsDistance) {
10003 nearestGopObj = currentGopObj;
10004 nearestDistance = dtsDistance;
10005 }
10006 }
10007 }
10008
10009 if (nearestGopObj) {
10010 return nearestGopObj.gop;
10011 }
10012
10013 return null;
10014 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10015 // of gopsToAlignWith starting from the START of the list
10016
10017
10018 this.alignGopsAtStart_ = function (gops) {
10019 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
10020 byteLength = gops.byteLength;
10021 nalCount = gops.nalCount;
10022 duration = gops.duration;
10023 alignIndex = gopIndex = 0;
10024
10025 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
10026 align = gopsToAlignWith[alignIndex];
10027 gop = gops[gopIndex];
10028
10029 if (align.pts === gop.pts) {
10030 break;
10031 }
10032
10033 if (gop.pts > align.pts) {
10034 // this current gop starts after the current gop we want to align on, so increment
10035 // align index
10036 alignIndex++;
10037 continue;
10038 } // current gop starts before the current gop we want to align on. so increment gop
10039 // index
10040
10041
10042 gopIndex++;
10043 byteLength -= gop.byteLength;
10044 nalCount -= gop.nalCount;
10045 duration -= gop.duration;
10046 }
10047
10048 if (gopIndex === 0) {
10049 // no gops to trim
10050 return gops;
10051 }
10052
10053 if (gopIndex === gops.length) {
10054 // all gops trimmed, skip appending all gops
10055 return null;
10056 }
10057
10058 alignedGops = gops.slice(gopIndex);
10059 alignedGops.byteLength = byteLength;
10060 alignedGops.duration = duration;
10061 alignedGops.nalCount = nalCount;
10062 alignedGops.pts = alignedGops[0].pts;
10063 alignedGops.dts = alignedGops[0].dts;
10064 return alignedGops;
10065 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10066 // of gopsToAlignWith starting from the END of the list
10067
10068
10069 this.alignGopsAtEnd_ = function (gops) {
10070 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
10071 alignIndex = gopsToAlignWith.length - 1;
10072 gopIndex = gops.length - 1;
10073 alignEndIndex = null;
10074 matchFound = false;
10075
10076 while (alignIndex >= 0 && gopIndex >= 0) {
10077 align = gopsToAlignWith[alignIndex];
10078 gop = gops[gopIndex];
10079
10080 if (align.pts === gop.pts) {
10081 matchFound = true;
10082 break;
10083 }
10084
10085 if (align.pts > gop.pts) {
10086 alignIndex--;
10087 continue;
10088 }
10089
10090 if (alignIndex === gopsToAlignWith.length - 1) {
10091 // gop.pts is greater than the last alignment candidate. If no match is found
10092 // by the end of this loop, we still want to append gops that come after this
10093 // point
10094 alignEndIndex = gopIndex;
10095 }
10096
10097 gopIndex--;
10098 }
10099
10100 if (!matchFound && alignEndIndex === null) {
10101 return null;
10102 }
10103
10104 var trimIndex;
10105
10106 if (matchFound) {
10107 trimIndex = gopIndex;
10108 } else {
10109 trimIndex = alignEndIndex;
10110 }
10111
10112 if (trimIndex === 0) {
10113 return gops;
10114 }
10115
10116 var alignedGops = gops.slice(trimIndex);
10117 var metadata = alignedGops.reduce(function (total, gop) {
10118 total.byteLength += gop.byteLength;
10119 total.duration += gop.duration;
10120 total.nalCount += gop.nalCount;
10121 return total;
10122 }, {
10123 byteLength: 0,
10124 duration: 0,
10125 nalCount: 0
10126 });
10127 alignedGops.byteLength = metadata.byteLength;
10128 alignedGops.duration = metadata.duration;
10129 alignedGops.nalCount = metadata.nalCount;
10130 alignedGops.pts = alignedGops[0].pts;
10131 alignedGops.dts = alignedGops[0].dts;
10132 return alignedGops;
10133 };
10134
10135 this.alignGopsWith = function (newGopsToAlignWith) {
10136 gopsToAlignWith = newGopsToAlignWith;
10137 };
10138 };
10139
10140 _VideoSegmentStream.prototype = new stream();
10141 /**
10142 * A Stream that can combine multiple streams (ie. audio & video)
10143 * into a single output segment for MSE. Also supports audio-only
10144 * and video-only streams.
10145 * @param options {object} transmuxer options object
10146 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
10147 * in the source; false to adjust the first segment to start at media timeline start.
10148 */
10149
10150 _CoalesceStream = function CoalesceStream(options, metadataStream) {
10151 // Number of Tracks per output segment
10152 // If greater than 1, we combine multiple
10153 // tracks into a single segment
10154 this.numberOfTracks = 0;
10155 this.metadataStream = metadataStream;
10156 options = options || {};
10157
10158 if (typeof options.remux !== 'undefined') {
10159 this.remuxTracks = !!options.remux;
10160 } else {
10161 this.remuxTracks = true;
10162 }
10163
10164 if (typeof options.keepOriginalTimestamps === 'boolean') {
10165 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
10166 } else {
10167 this.keepOriginalTimestamps = false;
10168 }
10169
10170 this.pendingTracks = [];
10171 this.videoTrack = null;
10172 this.pendingBoxes = [];
10173 this.pendingCaptions = [];
10174 this.pendingMetadata = [];
10175 this.pendingBytes = 0;
10176 this.emittedTracks = 0;
10177
10178 _CoalesceStream.prototype.init.call(this); // Take output from multiple
10179
10180
10181 this.push = function (output) {
10182 // buffer incoming captions until the associated video segment
10183 // finishes
10184 if (output.text) {
10185 return this.pendingCaptions.push(output);
10186 } // buffer incoming id3 tags until the final flush
10187
10188
10189 if (output.frames) {
10190 return this.pendingMetadata.push(output);
10191 } // Add this track to the list of pending tracks and store
10192 // important information required for the construction of
10193 // the final segment
10194
10195
10196 this.pendingTracks.push(output.track);
10197 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
10198 // We unshift audio and push video because
10199 // as of Chrome 75 when switching from
10200 // one init segment to another if the video
10201 // mdat does not appear after the audio mdat
10202 // only audio will play for the duration of our transmux.
10203
10204 if (output.track.type === 'video') {
10205 this.videoTrack = output.track;
10206 this.pendingBoxes.push(output.boxes);
10207 }
10208
10209 if (output.track.type === 'audio') {
10210 this.audioTrack = output.track;
10211 this.pendingBoxes.unshift(output.boxes);
10212 }
10213 };
10214 };
10215
10216 _CoalesceStream.prototype = new stream();
10217
10218 _CoalesceStream.prototype.flush = function (flushSource) {
10219 var offset = 0,
10220 event = {
10221 captions: [],
10222 captionStreams: {},
10223 metadata: [],
10224 info: {}
10225 },
10226 caption,
10227 id3,
10228 initSegment,
10229 timelineStartPts = 0,
10230 i;
10231
10232 if (this.pendingTracks.length < this.numberOfTracks) {
10233 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
10234 // Return because we haven't received a flush from a data-generating
10235 // portion of the segment (meaning that we have only recieved meta-data
10236 // or captions.)
10237 return;
10238 } else if (this.remuxTracks) {
10239 // Return until we have enough tracks from the pipeline to remux (if we
10240 // are remuxing audio and video into a single MP4)
10241 return;
10242 } else if (this.pendingTracks.length === 0) {
10243 // In the case where we receive a flush without any data having been
10244 // received we consider it an emitted track for the purposes of coalescing
10245 // `done` events.
10246 // We do this for the case where there is an audio and video track in the
10247 // segment but no audio data. (seen in several playlists with alternate
10248 // audio tracks and no audio present in the main TS segments.)
10249 this.emittedTracks++;
10250
10251 if (this.emittedTracks >= this.numberOfTracks) {
10252 this.trigger('done');
10253 this.emittedTracks = 0;
10254 }
10255
10256 return;
10257 }
10258 }
10259
10260 if (this.videoTrack) {
10261 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
10262 videoProperties.forEach(function (prop) {
10263 event.info[prop] = this.videoTrack[prop];
10264 }, this);
10265 } else if (this.audioTrack) {
10266 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
10267 audioProperties.forEach(function (prop) {
10268 event.info[prop] = this.audioTrack[prop];
10269 }, this);
10270 }
10271
10272 if (this.videoTrack || this.audioTrack) {
10273 if (this.pendingTracks.length === 1) {
10274 event.type = this.pendingTracks[0].type;
10275 } else {
10276 event.type = 'combined';
10277 }
10278
10279 this.emittedTracks += this.pendingTracks.length;
10280 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
10281
10282 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
10283 // and track definitions
10284
10285 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
10286
10287 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
10288
10289 for (i = 0; i < this.pendingBoxes.length; i++) {
10290 event.data.set(this.pendingBoxes[i], offset);
10291 offset += this.pendingBoxes[i].byteLength;
10292 } // Translate caption PTS times into second offsets to match the
10293 // video timeline for the segment, and add track info
10294
10295
10296 for (i = 0; i < this.pendingCaptions.length; i++) {
10297 caption = this.pendingCaptions[i];
10298 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
10299 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
10300 event.captionStreams[caption.stream] = true;
10301 event.captions.push(caption);
10302 } // Translate ID3 frame PTS times into second offsets to match the
10303 // video timeline for the segment
10304
10305
10306 for (i = 0; i < this.pendingMetadata.length; i++) {
10307 id3 = this.pendingMetadata[i];
10308 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
10309 event.metadata.push(id3);
10310 } // We add this to every single emitted segment even though we only need
10311 // it for the first
10312
10313
10314 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
10315
10316 this.pendingTracks.length = 0;
10317 this.videoTrack = null;
10318 this.pendingBoxes.length = 0;
10319 this.pendingCaptions.length = 0;
10320 this.pendingBytes = 0;
10321 this.pendingMetadata.length = 0; // Emit the built segment
10322 // We include captions and ID3 tags for backwards compatibility,
10323 // ideally we should send only video and audio in the data event
10324
10325 this.trigger('data', event); // Emit each caption to the outside world
10326 // Ideally, this would happen immediately on parsing captions,
10327 // but we need to ensure that video data is sent back first
10328 // so that caption timing can be adjusted to match video timing
10329
10330 for (i = 0; i < event.captions.length; i++) {
10331 caption = event.captions[i];
10332 this.trigger('caption', caption);
10333 } // Emit each id3 tag to the outside world
10334 // Ideally, this would happen immediately on parsing the tag,
10335 // but we need to ensure that video data is sent back first
10336 // so that ID3 frame timing can be adjusted to match video timing
10337
10338
10339 for (i = 0; i < event.metadata.length; i++) {
10340 id3 = event.metadata[i];
10341 this.trigger('id3Frame', id3);
10342 }
10343 } // Only emit `done` if all tracks have been flushed and emitted
10344
10345
10346 if (this.emittedTracks >= this.numberOfTracks) {
10347 this.trigger('done');
10348 this.emittedTracks = 0;
10349 }
10350 };
10351
10352 _CoalesceStream.prototype.setRemux = function (val) {
10353 this.remuxTracks = val;
10354 };
10355 /**
10356 * A Stream that expects MP2T binary data as input and produces
10357 * corresponding media segments, suitable for use with Media Source
10358 * Extension (MSE) implementations that support the ISO BMFF byte
10359 * stream format, like Chrome.
10360 */
10361
10362
10363 _Transmuxer = function Transmuxer(options) {
10364 var self = this,
10365 hasFlushed = true,
10366 videoTrack,
10367 audioTrack;
10368
10369 _Transmuxer.prototype.init.call(this);
10370
10371 options = options || {};
10372 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10373 this.transmuxPipeline_ = {};
10374
10375 this.setupAacPipeline = function () {
10376 var pipeline = {};
10377 this.transmuxPipeline_ = pipeline;
10378 pipeline.type = 'aac';
10379 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10380
10381 pipeline.aacStream = new aac();
10382 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
10383 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
10384 pipeline.adtsStream = new adts();
10385 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10386 pipeline.headOfPipeline = pipeline.aacStream;
10387 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
10388 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
10389 pipeline.metadataStream.on('timestamp', function (frame) {
10390 pipeline.aacStream.setTimestamp(frame.timeStamp);
10391 });
10392 pipeline.aacStream.on('data', function (data) {
10393 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10394 return;
10395 }
10396
10397 audioTrack = audioTrack || {
10398 timelineStartInfo: {
10399 baseMediaDecodeTime: self.baseMediaDecodeTime
10400 },
10401 codec: 'adts',
10402 type: 'audio'
10403 }; // hook up the audio segment stream to the first track with aac data
10404
10405 pipeline.coalesceStream.numberOfTracks++;
10406 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10407 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10408 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10409
10410 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
10411
10412 self.trigger('trackinfo', {
10413 hasAudio: !!audioTrack,
10414 hasVideo: !!videoTrack
10415 });
10416 }); // Re-emit any data coming from the coalesce stream to the outside world
10417
10418 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
10419
10420 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10421 addPipelineLogRetriggers(this, pipeline);
10422 };
10423
10424 this.setupTsPipeline = function () {
10425 var pipeline = {};
10426 this.transmuxPipeline_ = pipeline;
10427 pipeline.type = 'ts';
10428 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10429
10430 pipeline.packetStream = new m2ts_1.TransportPacketStream();
10431 pipeline.parseStream = new m2ts_1.TransportParseStream();
10432 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
10433 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
10434 pipeline.adtsStream = new adts();
10435 pipeline.h264Stream = new H264Stream();
10436 pipeline.captionStream = new m2ts_1.CaptionStream(options);
10437 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10438 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
10439
10440 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
10441 // demux the streams
10442
10443 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
10444 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
10445 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
10446
10447 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
10448 pipeline.elementaryStream.on('data', function (data) {
10449 var i;
10450
10451 if (data.type === 'metadata') {
10452 i = data.tracks.length; // scan the tracks listed in the metadata
10453
10454 while (i--) {
10455 if (!videoTrack && data.tracks[i].type === 'video') {
10456 videoTrack = data.tracks[i];
10457 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10458 } else if (!audioTrack && data.tracks[i].type === 'audio') {
10459 audioTrack = data.tracks[i];
10460 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10461 }
10462 } // hook up the video segment stream to the first track with h264 data
10463
10464
10465 if (videoTrack && !pipeline.videoSegmentStream) {
10466 pipeline.coalesceStream.numberOfTracks++;
10467 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
10468 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
10469 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10470 // When video emits timelineStartInfo data after a flush, we forward that
10471 // info to the AudioSegmentStream, if it exists, because video timeline
10472 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
10473 // because this is a particularly subtle form of timestamp alteration.
10474 if (audioTrack && !options.keepOriginalTimestamps) {
10475 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
10476 // very earliest DTS we have seen in video because Chrome will
10477 // interpret any video track with a baseMediaDecodeTime that is
10478 // non-zero as a gap.
10479
10480 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
10481 }
10482 });
10483 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
10484 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
10485 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
10486 if (audioTrack) {
10487 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
10488 }
10489 });
10490 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
10491
10492 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
10493 }
10494
10495 if (audioTrack && !pipeline.audioSegmentStream) {
10496 // hook up the audio segment stream to the first track with aac data
10497 pipeline.coalesceStream.numberOfTracks++;
10498 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10499 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10500 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
10501 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
10502
10503 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
10504 } // emit pmt info
10505
10506
10507 self.trigger('trackinfo', {
10508 hasAudio: !!audioTrack,
10509 hasVideo: !!videoTrack
10510 });
10511 }
10512 }); // Re-emit any data coming from the coalesce stream to the outside world
10513
10514 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
10515 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
10516 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
10517 self.trigger('id3Frame', id3Frame);
10518 });
10519 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
10520
10521 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10522 addPipelineLogRetriggers(this, pipeline);
10523 }; // hook up the segment streams once track metadata is delivered
10524
10525
10526 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10527 var pipeline = this.transmuxPipeline_;
10528
10529 if (!options.keepOriginalTimestamps) {
10530 this.baseMediaDecodeTime = baseMediaDecodeTime;
10531 }
10532
10533 if (audioTrack) {
10534 audioTrack.timelineStartInfo.dts = undefined;
10535 audioTrack.timelineStartInfo.pts = undefined;
10536 trackDecodeInfo.clearDtsInfo(audioTrack);
10537
10538 if (pipeline.audioTimestampRolloverStream) {
10539 pipeline.audioTimestampRolloverStream.discontinuity();
10540 }
10541 }
10542
10543 if (videoTrack) {
10544 if (pipeline.videoSegmentStream) {
10545 pipeline.videoSegmentStream.gopCache_ = [];
10546 }
10547
10548 videoTrack.timelineStartInfo.dts = undefined;
10549 videoTrack.timelineStartInfo.pts = undefined;
10550 trackDecodeInfo.clearDtsInfo(videoTrack);
10551 pipeline.captionStream.reset();
10552 }
10553
10554 if (pipeline.timestampRolloverStream) {
10555 pipeline.timestampRolloverStream.discontinuity();
10556 }
10557 };
10558
10559 this.setAudioAppendStart = function (timestamp) {
10560 if (audioTrack) {
10561 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
10562 }
10563 };
10564
10565 this.setRemux = function (val) {
10566 var pipeline = this.transmuxPipeline_;
10567 options.remux = val;
10568
10569 if (pipeline && pipeline.coalesceStream) {
10570 pipeline.coalesceStream.setRemux(val);
10571 }
10572 };
10573
10574 this.alignGopsWith = function (gopsToAlignWith) {
10575 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
10576 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
10577 }
10578 };
10579
10580 this.getLogTrigger_ = function (key) {
10581 var self = this;
10582 return function (event) {
10583 event.stream = key;
10584 self.trigger('log', event);
10585 };
10586 }; // feed incoming data to the front of the parsing pipeline
10587
10588
10589 this.push = function (data) {
10590 if (hasFlushed) {
10591 var isAac = isLikelyAacData(data);
10592
10593 if (isAac && this.transmuxPipeline_.type !== 'aac') {
10594 this.setupAacPipeline();
10595 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
10596 this.setupTsPipeline();
10597 }
10598
10599 hasFlushed = false;
10600 }
10601
10602 this.transmuxPipeline_.headOfPipeline.push(data);
10603 }; // flush any buffered data
10604
10605
10606 this.flush = function () {
10607 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
10608
10609 this.transmuxPipeline_.headOfPipeline.flush();
10610 };
10611
10612 this.endTimeline = function () {
10613 this.transmuxPipeline_.headOfPipeline.endTimeline();
10614 };
10615
10616 this.reset = function () {
10617 if (this.transmuxPipeline_.headOfPipeline) {
10618 this.transmuxPipeline_.headOfPipeline.reset();
10619 }
10620 }; // Caption data has to be reset when seeking outside buffered range
10621
10622
10623 this.resetCaptions = function () {
10624 if (this.transmuxPipeline_.captionStream) {
10625 this.transmuxPipeline_.captionStream.reset();
10626 }
10627 };
10628 };
10629
10630 _Transmuxer.prototype = new stream();
10631 var transmuxer = {
10632 Transmuxer: _Transmuxer,
10633 VideoSegmentStream: _VideoSegmentStream,
10634 AudioSegmentStream: _AudioSegmentStream,
10635 AUDIO_PROPERTIES: audioProperties,
10636 VIDEO_PROPERTIES: videoProperties,
10637 // exported for testing
10638 generateSegmentTimingInfo: generateSegmentTimingInfo
10639 };
10640 /**
10641 * mux.js
10642 *
10643 * Copyright (c) Brightcove
10644 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10645 */
10646
10647 var toUnsigned$3 = function toUnsigned(value) {
10648 return value >>> 0;
10649 };
10650
10651 var toHexString$1 = function toHexString(value) {
10652 return ('00' + value.toString(16)).slice(-2);
10653 };
10654
10655 var bin = {
10656 toUnsigned: toUnsigned$3,
10657 toHexString: toHexString$1
10658 };
10659
10660 var parseType$1 = function parseType(buffer) {
10661 var result = '';
10662 result += String.fromCharCode(buffer[0]);
10663 result += String.fromCharCode(buffer[1]);
10664 result += String.fromCharCode(buffer[2]);
10665 result += String.fromCharCode(buffer[3]);
10666 return result;
10667 };
10668
10669 var parseType_1 = parseType$1;
10670 var toUnsigned$2 = bin.toUnsigned;
10671
10672 var findBox = function findBox(data, path) {
10673 var results = [],
10674 i,
10675 size,
10676 type,
10677 end,
10678 subresults;
10679
10680 if (!path.length) {
10681 // short-circuit the search for empty paths
10682 return null;
10683 }
10684
10685 for (i = 0; i < data.byteLength;) {
10686 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10687 type = parseType_1(data.subarray(i + 4, i + 8));
10688 end = size > 1 ? i + size : data.byteLength;
10689
10690 if (type === path[0]) {
10691 if (path.length === 1) {
10692 // this is the end of the path and we've found the box we were
10693 // looking for
10694 results.push(data.subarray(i + 8, end));
10695 } else {
10696 // recursively search for the next box along the path
10697 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10698
10699 if (subresults.length) {
10700 results = results.concat(subresults);
10701 }
10702 }
10703 }
10704
10705 i = end;
10706 } // we've finished searching all of data
10707
10708
10709 return results;
10710 };
10711
10712 var findBox_1 = findBox;
10713 var toUnsigned$1 = bin.toUnsigned;
10714 var getUint64$1 = numbers.getUint64;
10715
10716 var tfdt = function tfdt(data) {
10717 var result = {
10718 version: data[0],
10719 flags: new Uint8Array(data.subarray(1, 4))
10720 };
10721
10722 if (result.version === 1) {
10723 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
10724 } else {
10725 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
10726 }
10727
10728 return result;
10729 };
10730
10731 var parseTfdt = tfdt;
10732
10733 var parseSampleFlags = function parseSampleFlags(flags) {
10734 return {
10735 isLeading: (flags[0] & 0x0c) >>> 2,
10736 dependsOn: flags[0] & 0x03,
10737 isDependedOn: (flags[1] & 0xc0) >>> 6,
10738 hasRedundancy: (flags[1] & 0x30) >>> 4,
10739 paddingValue: (flags[1] & 0x0e) >>> 1,
10740 isNonSyncSample: flags[1] & 0x01,
10741 degradationPriority: flags[2] << 8 | flags[3]
10742 };
10743 };
10744
10745 var parseSampleFlags_1 = parseSampleFlags;
10746
10747 var trun = function trun(data) {
10748 var result = {
10749 version: data[0],
10750 flags: new Uint8Array(data.subarray(1, 4)),
10751 samples: []
10752 },
10753 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10754 // Flag interpretation
10755 dataOffsetPresent = result.flags[2] & 0x01,
10756 // compare with 2nd byte of 0x1
10757 firstSampleFlagsPresent = result.flags[2] & 0x04,
10758 // compare with 2nd byte of 0x4
10759 sampleDurationPresent = result.flags[1] & 0x01,
10760 // compare with 2nd byte of 0x100
10761 sampleSizePresent = result.flags[1] & 0x02,
10762 // compare with 2nd byte of 0x200
10763 sampleFlagsPresent = result.flags[1] & 0x04,
10764 // compare with 2nd byte of 0x400
10765 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10766 // compare with 2nd byte of 0x800
10767 sampleCount = view.getUint32(4),
10768 offset = 8,
10769 sample;
10770
10771 if (dataOffsetPresent) {
10772 // 32 bit signed integer
10773 result.dataOffset = view.getInt32(offset);
10774 offset += 4;
10775 } // Overrides the flags for the first sample only. The order of
10776 // optional values will be: duration, size, compositionTimeOffset
10777
10778
10779 if (firstSampleFlagsPresent && sampleCount) {
10780 sample = {
10781 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10782 };
10783 offset += 4;
10784
10785 if (sampleDurationPresent) {
10786 sample.duration = view.getUint32(offset);
10787 offset += 4;
10788 }
10789
10790 if (sampleSizePresent) {
10791 sample.size = view.getUint32(offset);
10792 offset += 4;
10793 }
10794
10795 if (sampleCompositionTimeOffsetPresent) {
10796 if (result.version === 1) {
10797 sample.compositionTimeOffset = view.getInt32(offset);
10798 } else {
10799 sample.compositionTimeOffset = view.getUint32(offset);
10800 }
10801
10802 offset += 4;
10803 }
10804
10805 result.samples.push(sample);
10806 sampleCount--;
10807 }
10808
10809 while (sampleCount--) {
10810 sample = {};
10811
10812 if (sampleDurationPresent) {
10813 sample.duration = view.getUint32(offset);
10814 offset += 4;
10815 }
10816
10817 if (sampleSizePresent) {
10818 sample.size = view.getUint32(offset);
10819 offset += 4;
10820 }
10821
10822 if (sampleFlagsPresent) {
10823 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10824 offset += 4;
10825 }
10826
10827 if (sampleCompositionTimeOffsetPresent) {
10828 if (result.version === 1) {
10829 sample.compositionTimeOffset = view.getInt32(offset);
10830 } else {
10831 sample.compositionTimeOffset = view.getUint32(offset);
10832 }
10833
10834 offset += 4;
10835 }
10836
10837 result.samples.push(sample);
10838 }
10839
10840 return result;
10841 };
10842
10843 var parseTrun = trun;
10844
10845 var tfhd = function tfhd(data) {
10846 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10847 result = {
10848 version: data[0],
10849 flags: new Uint8Array(data.subarray(1, 4)),
10850 trackId: view.getUint32(4)
10851 },
10852 baseDataOffsetPresent = result.flags[2] & 0x01,
10853 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10854 defaultSampleDurationPresent = result.flags[2] & 0x08,
10855 defaultSampleSizePresent = result.flags[2] & 0x10,
10856 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10857 durationIsEmpty = result.flags[0] & 0x010000,
10858 defaultBaseIsMoof = result.flags[0] & 0x020000,
10859 i;
10860 i = 8;
10861
10862 if (baseDataOffsetPresent) {
10863 i += 4; // truncate top 4 bytes
10864 // FIXME: should we read the full 64 bits?
10865
10866 result.baseDataOffset = view.getUint32(12);
10867 i += 4;
10868 }
10869
10870 if (sampleDescriptionIndexPresent) {
10871 result.sampleDescriptionIndex = view.getUint32(i);
10872 i += 4;
10873 }
10874
10875 if (defaultSampleDurationPresent) {
10876 result.defaultSampleDuration = view.getUint32(i);
10877 i += 4;
10878 }
10879
10880 if (defaultSampleSizePresent) {
10881 result.defaultSampleSize = view.getUint32(i);
10882 i += 4;
10883 }
10884
10885 if (defaultSampleFlagsPresent) {
10886 result.defaultSampleFlags = view.getUint32(i);
10887 }
10888
10889 if (durationIsEmpty) {
10890 result.durationIsEmpty = true;
10891 }
10892
10893 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10894 result.baseDataOffsetIsMoof = true;
10895 }
10896
10897 return result;
10898 };
10899
10900 var parseTfhd = tfhd;
10901 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
10902 var win;
10903
10904 if (typeof window !== "undefined") {
10905 win = window;
10906 } else if (typeof commonjsGlobal !== "undefined") {
10907 win = commonjsGlobal;
10908 } else if (typeof self !== "undefined") {
10909 win = self;
10910 } else {
10911 win = {};
10912 }
10913
10914 var window_1 = win;
10915 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
10916 var CaptionStream = captionStream.CaptionStream;
10917 /**
10918 * Maps an offset in the mdat to a sample based on the the size of the samples.
10919 * Assumes that `parseSamples` has been called first.
10920 *
10921 * @param {Number} offset - The offset into the mdat
10922 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10923 * @return {?Object} The matching sample, or null if no match was found.
10924 *
10925 * @see ISO-BMFF-12/2015, Section 8.8.8
10926 **/
10927
10928 var mapToSample = function mapToSample(offset, samples) {
10929 var approximateOffset = offset;
10930
10931 for (var i = 0; i < samples.length; i++) {
10932 var sample = samples[i];
10933
10934 if (approximateOffset < sample.size) {
10935 return sample;
10936 }
10937
10938 approximateOffset -= sample.size;
10939 }
10940
10941 return null;
10942 };
10943 /**
10944 * Finds SEI nal units contained in a Media Data Box.
10945 * Assumes that `parseSamples` has been called first.
10946 *
10947 * @param {Uint8Array} avcStream - The bytes of the mdat
10948 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10949 * @param {Number} trackId - The trackId of this video track
10950 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10951 * The contents of the seiNal should match what is expected by
10952 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10953 *
10954 * @see ISO-BMFF-12/2015, Section 8.1.1
10955 * @see Rec. ITU-T H.264, 7.3.2.3.1
10956 **/
10957
10958
10959 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10960 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10961 result = {
10962 logs: [],
10963 seiNals: []
10964 },
10965 seiNal,
10966 i,
10967 length,
10968 lastMatchedSample;
10969
10970 for (i = 0; i + 4 < avcStream.length; i += length) {
10971 length = avcView.getUint32(i);
10972 i += 4; // Bail if this doesn't appear to be an H264 stream
10973
10974 if (length <= 0) {
10975 continue;
10976 }
10977
10978 switch (avcStream[i] & 0x1F) {
10979 case 0x06:
10980 var data = avcStream.subarray(i + 1, i + 1 + length);
10981 var matchingSample = mapToSample(i, samples);
10982 seiNal = {
10983 nalUnitType: 'sei_rbsp',
10984 size: length,
10985 data: data,
10986 escapedRBSP: discardEmulationPreventionBytes(data),
10987 trackId: trackId
10988 };
10989
10990 if (matchingSample) {
10991 seiNal.pts = matchingSample.pts;
10992 seiNal.dts = matchingSample.dts;
10993 lastMatchedSample = matchingSample;
10994 } else if (lastMatchedSample) {
10995 // If a matching sample cannot be found, use the last
10996 // sample's values as they should be as close as possible
10997 seiNal.pts = lastMatchedSample.pts;
10998 seiNal.dts = lastMatchedSample.dts;
10999 } else {
11000 result.logs.push({
11001 level: 'warn',
11002 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
11003 });
11004 break;
11005 }
11006
11007 result.seiNals.push(seiNal);
11008 break;
11009 }
11010 }
11011
11012 return result;
11013 };
11014 /**
11015 * Parses sample information out of Track Run Boxes and calculates
11016 * the absolute presentation and decode timestamps of each sample.
11017 *
11018 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
11019 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
11020 @see ISO-BMFF-12/2015, Section 8.8.12
11021 * @param {Object} tfhd - The parsed Track Fragment Header
11022 * @see inspect.parseTfhd
11023 * @return {Object[]} the parsed samples
11024 *
11025 * @see ISO-BMFF-12/2015, Section 8.8.8
11026 **/
11027
11028
11029 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
11030 var currentDts = baseMediaDecodeTime;
11031 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
11032 var defaultSampleSize = tfhd.defaultSampleSize || 0;
11033 var trackId = tfhd.trackId;
11034 var allSamples = [];
11035 truns.forEach(function (trun) {
11036 // Note: We currently do not parse the sample table as well
11037 // as the trun. It's possible some sources will require this.
11038 // moov > trak > mdia > minf > stbl
11039 var trackRun = parseTrun(trun);
11040 var samples = trackRun.samples;
11041 samples.forEach(function (sample) {
11042 if (sample.duration === undefined) {
11043 sample.duration = defaultSampleDuration;
11044 }
11045
11046 if (sample.size === undefined) {
11047 sample.size = defaultSampleSize;
11048 }
11049
11050 sample.trackId = trackId;
11051 sample.dts = currentDts;
11052
11053 if (sample.compositionTimeOffset === undefined) {
11054 sample.compositionTimeOffset = 0;
11055 }
11056
11057 if (typeof currentDts === 'bigint') {
11058 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
11059 currentDts += window_1.BigInt(sample.duration);
11060 } else {
11061 sample.pts = currentDts + sample.compositionTimeOffset;
11062 currentDts += sample.duration;
11063 }
11064 });
11065 allSamples = allSamples.concat(samples);
11066 });
11067 return allSamples;
11068 };
11069 /**
11070 * Parses out caption nals from an FMP4 segment's video tracks.
11071 *
11072 * @param {Uint8Array} segment - The bytes of a single segment
11073 * @param {Number} videoTrackId - The trackId of a video track in the segment
11074 * @return {Object.<Number, Object[]>} A mapping of video trackId to
11075 * a list of seiNals found in that track
11076 **/
11077
11078
11079 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
11080 // To get the samples
11081 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
11082
11083 var mdats = findBox_1(segment, ['mdat']);
11084 var captionNals = {};
11085 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
11086
11087 mdats.forEach(function (mdat, index) {
11088 var matchingTraf = trafs[index];
11089 mdatTrafPairs.push({
11090 mdat: mdat,
11091 traf: matchingTraf
11092 });
11093 });
11094 mdatTrafPairs.forEach(function (pair) {
11095 var mdat = pair.mdat;
11096 var traf = pair.traf;
11097 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
11098
11099 var headerInfo = parseTfhd(tfhd[0]);
11100 var trackId = headerInfo.trackId;
11101 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
11102
11103 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
11104 var truns = findBox_1(traf, ['trun']);
11105 var samples;
11106 var result; // Only parse video data for the chosen video track
11107
11108 if (videoTrackId === trackId && truns.length > 0) {
11109 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
11110 result = findSeiNals(mdat, samples, trackId);
11111
11112 if (!captionNals[trackId]) {
11113 captionNals[trackId] = {
11114 seiNals: [],
11115 logs: []
11116 };
11117 }
11118
11119 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
11120 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
11121 }
11122 });
11123 return captionNals;
11124 };
11125 /**
11126 * Parses out inband captions from an MP4 container and returns
11127 * caption objects that can be used by WebVTT and the TextTrack API.
11128 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
11129 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
11130 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
11131 *
11132 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11133 * @param {Number} trackId - The id of the video track to parse
11134 * @param {Number} timescale - The timescale for the video track from the init segment
11135 *
11136 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
11137 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
11138 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
11139 * @return {String} parsedCaptions[].text - The visible content of the caption
11140 **/
11141
11142
11143 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
11144 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11145
11146 if (trackId === null) {
11147 return null;
11148 }
11149
11150 captionNals = parseCaptionNals(segment, trackId);
11151 var trackNals = captionNals[trackId] || {};
11152 return {
11153 seiNals: trackNals.seiNals,
11154 logs: trackNals.logs,
11155 timescale: timescale
11156 };
11157 };
11158 /**
11159 * Converts SEI NALUs into captions that can be used by video.js
11160 **/
11161
11162
11163 var CaptionParser = function CaptionParser() {
11164 var isInitialized = false;
11165 var captionStream; // Stores segments seen before trackId and timescale are set
11166
11167 var segmentCache; // Stores video track ID of the track being parsed
11168
11169 var trackId; // Stores the timescale of the track being parsed
11170
11171 var timescale; // Stores captions parsed so far
11172
11173 var parsedCaptions; // Stores whether we are receiving partial data or not
11174
11175 var parsingPartial;
11176 /**
11177 * A method to indicate whether a CaptionParser has been initalized
11178 * @returns {Boolean}
11179 **/
11180
11181 this.isInitialized = function () {
11182 return isInitialized;
11183 };
11184 /**
11185 * Initializes the underlying CaptionStream, SEI NAL parsing
11186 * and management, and caption collection
11187 **/
11188
11189
11190 this.init = function (options) {
11191 captionStream = new CaptionStream();
11192 isInitialized = true;
11193 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
11194
11195 captionStream.on('data', function (event) {
11196 // Convert to seconds in the source's timescale
11197 event.startTime = event.startPts / timescale;
11198 event.endTime = event.endPts / timescale;
11199 parsedCaptions.captions.push(event);
11200 parsedCaptions.captionStreams[event.stream] = true;
11201 });
11202 captionStream.on('log', function (log) {
11203 parsedCaptions.logs.push(log);
11204 });
11205 };
11206 /**
11207 * Determines if a new video track will be selected
11208 * or if the timescale changed
11209 * @return {Boolean}
11210 **/
11211
11212
11213 this.isNewInit = function (videoTrackIds, timescales) {
11214 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
11215 return false;
11216 }
11217
11218 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
11219 };
11220 /**
11221 * Parses out SEI captions and interacts with underlying
11222 * CaptionStream to return dispatched captions
11223 *
11224 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11225 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
11226 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
11227 * @see parseEmbeddedCaptions
11228 * @see m2ts/caption-stream.js
11229 **/
11230
11231
11232 this.parse = function (segment, videoTrackIds, timescales) {
11233 var parsedData;
11234
11235 if (!this.isInitialized()) {
11236 return null; // This is not likely to be a video segment
11237 } else if (!videoTrackIds || !timescales) {
11238 return null;
11239 } else if (this.isNewInit(videoTrackIds, timescales)) {
11240 // Use the first video track only as there is no
11241 // mechanism to switch to other video tracks
11242 trackId = videoTrackIds[0];
11243 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
11244 // data until we have one.
11245 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11246 } else if (trackId === null || !timescale) {
11247 segmentCache.push(segment);
11248 return null;
11249 } // Now that a timescale and trackId is set, parse cached segments
11250
11251
11252 while (segmentCache.length > 0) {
11253 var cachedSegment = segmentCache.shift();
11254 this.parse(cachedSegment, videoTrackIds, timescales);
11255 }
11256
11257 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
11258
11259 if (parsedData && parsedData.logs) {
11260 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
11261 }
11262
11263 if (parsedData === null || !parsedData.seiNals) {
11264 if (parsedCaptions.logs.length) {
11265 return {
11266 logs: parsedCaptions.logs,
11267 captions: [],
11268 captionStreams: []
11269 };
11270 }
11271
11272 return null;
11273 }
11274
11275 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
11276
11277 this.flushStream();
11278 return parsedCaptions;
11279 };
11280 /**
11281 * Pushes SEI NALUs onto CaptionStream
11282 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
11283 * Assumes that `parseCaptionNals` has been called first
11284 * @see m2ts/caption-stream.js
11285 **/
11286
11287
11288 this.pushNals = function (nals) {
11289 if (!this.isInitialized() || !nals || nals.length === 0) {
11290 return null;
11291 }
11292
11293 nals.forEach(function (nal) {
11294 captionStream.push(nal);
11295 });
11296 };
11297 /**
11298 * Flushes underlying CaptionStream to dispatch processed, displayable captions
11299 * @see m2ts/caption-stream.js
11300 **/
11301
11302
11303 this.flushStream = function () {
11304 if (!this.isInitialized()) {
11305 return null;
11306 }
11307
11308 if (!parsingPartial) {
11309 captionStream.flush();
11310 } else {
11311 captionStream.partialFlush();
11312 }
11313 };
11314 /**
11315 * Reset caption buckets for new data
11316 **/
11317
11318
11319 this.clearParsedCaptions = function () {
11320 parsedCaptions.captions = [];
11321 parsedCaptions.captionStreams = {};
11322 parsedCaptions.logs = [];
11323 };
11324 /**
11325 * Resets underlying CaptionStream
11326 * @see m2ts/caption-stream.js
11327 **/
11328
11329
11330 this.resetCaptionStream = function () {
11331 if (!this.isInitialized()) {
11332 return null;
11333 }
11334
11335 captionStream.reset();
11336 };
11337 /**
11338 * Convenience method to clear all captions flushed from the
11339 * CaptionStream and still being parsed
11340 * @see m2ts/caption-stream.js
11341 **/
11342
11343
11344 this.clearAllCaptions = function () {
11345 this.clearParsedCaptions();
11346 this.resetCaptionStream();
11347 };
11348 /**
11349 * Reset caption parser
11350 **/
11351
11352
11353 this.reset = function () {
11354 segmentCache = [];
11355 trackId = null;
11356 timescale = null;
11357
11358 if (!parsedCaptions) {
11359 parsedCaptions = {
11360 captions: [],
11361 // CC1, CC2, CC3, CC4
11362 captionStreams: {},
11363 logs: []
11364 };
11365 } else {
11366 this.clearParsedCaptions();
11367 }
11368
11369 this.resetCaptionStream();
11370 };
11371
11372 this.reset();
11373 };
11374
11375 var captionParser = CaptionParser;
11376 var toUnsigned = bin.toUnsigned;
11377 var toHexString = bin.toHexString;
11378 var getUint64 = numbers.getUint64;
11379 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
11380 /**
11381 * Parses an MP4 initialization segment and extracts the timescale
11382 * values for any declared tracks. Timescale values indicate the
11383 * number of clock ticks per second to assume for time-based values
11384 * elsewhere in the MP4.
11385 *
11386 * To determine the start time of an MP4, you need two pieces of
11387 * information: the timescale unit and the earliest base media decode
11388 * time. Multiple timescales can be specified within an MP4 but the
11389 * base media decode time is always expressed in the timescale from
11390 * the media header box for the track:
11391 * ```
11392 * moov > trak > mdia > mdhd.timescale
11393 * ```
11394 * @param init {Uint8Array} the bytes of the init segment
11395 * @return {object} a hash of track ids to timescale values or null if
11396 * the init segment is malformed.
11397 */
11398
11399 timescale = function timescale(init) {
11400 var result = {},
11401 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
11402
11403 return traks.reduce(function (result, trak) {
11404 var tkhd, version, index, id, mdhd;
11405 tkhd = findBox_1(trak, ['tkhd'])[0];
11406
11407 if (!tkhd) {
11408 return null;
11409 }
11410
11411 version = tkhd[0];
11412 index = version === 0 ? 12 : 20;
11413 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
11414 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11415
11416 if (!mdhd) {
11417 return null;
11418 }
11419
11420 version = mdhd[0];
11421 index = version === 0 ? 12 : 20;
11422 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11423 return result;
11424 }, result);
11425 };
11426 /**
11427 * Determine the base media decode start time, in seconds, for an MP4
11428 * fragment. If multiple fragments are specified, the earliest time is
11429 * returned.
11430 *
11431 * The base media decode time can be parsed from track fragment
11432 * metadata:
11433 * ```
11434 * moof > traf > tfdt.baseMediaDecodeTime
11435 * ```
11436 * It requires the timescale value from the mdhd to interpret.
11437 *
11438 * @param timescale {object} a hash of track ids to timescale values.
11439 * @return {number} the earliest base media decode start time for the
11440 * fragment, in seconds
11441 */
11442
11443
11444 startTime = function startTime(timescale, fragment) {
11445 var trafs; // we need info from two childrend of each track fragment box
11446
11447 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
11448
11449 var lowestTime = trafs.reduce(function (acc, traf) {
11450 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
11451
11452 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
11453
11454 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
11455
11456 var tfdt = findBox_1(traf, ['tfdt'])[0];
11457 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
11458 var baseTime; // version 1 is 64 bit
11459
11460 if (tfdt[0] === 1) {
11461 baseTime = getUint64(tfdt.subarray(4, 12));
11462 } else {
11463 baseTime = dv.getUint32(4);
11464 } // convert base time to seconds if it is a valid number.
11465
11466
11467 var seconds;
11468
11469 if (typeof baseTime === 'bigint') {
11470 seconds = baseTime / window_1.BigInt(scale);
11471 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
11472 seconds = baseTime / scale;
11473 }
11474
11475 if (seconds < Number.MAX_SAFE_INTEGER) {
11476 seconds = Number(seconds);
11477 }
11478
11479 if (seconds < acc) {
11480 acc = seconds;
11481 }
11482
11483 return acc;
11484 }, Infinity);
11485 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
11486 };
11487 /**
11488 * Determine the composition start, in seconds, for an MP4
11489 * fragment.
11490 *
11491 * The composition start time of a fragment can be calculated using the base
11492 * media decode time, composition time offset, and timescale, as follows:
11493 *
11494 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
11495 *
11496 * All of the aforementioned information is contained within a media fragment's
11497 * `traf` box, except for timescale info, which comes from the initialization
11498 * segment, so a track id (also contained within a `traf`) is also necessary to
11499 * associate it with a timescale
11500 *
11501 *
11502 * @param timescales {object} - a hash of track ids to timescale values.
11503 * @param fragment {Unit8Array} - the bytes of a media segment
11504 * @return {number} the composition start time for the fragment, in seconds
11505 **/
11506
11507
11508 compositionStartTime = function compositionStartTime(timescales, fragment) {
11509 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
11510 var baseMediaDecodeTime = 0;
11511 var compositionTimeOffset = 0;
11512 var trackId;
11513
11514 if (trafBoxes && trafBoxes.length) {
11515 // The spec states that track run samples contained within a `traf` box are contiguous, but
11516 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
11517 // We will assume that they are, so we only need the first to calculate start time.
11518 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
11519 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
11520 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
11521
11522 if (tfhd) {
11523 var parsedTfhd = parseTfhd(tfhd);
11524 trackId = parsedTfhd.trackId;
11525 }
11526
11527 if (tfdt) {
11528 var parsedTfdt = parseTfdt(tfdt);
11529 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
11530 }
11531
11532 if (trun) {
11533 var parsedTrun = parseTrun(trun);
11534
11535 if (parsedTrun.samples && parsedTrun.samples.length) {
11536 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
11537 }
11538 }
11539 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
11540 // specified.
11541
11542
11543 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
11544
11545 if (typeof baseMediaDecodeTime === 'bigint') {
11546 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
11547 timescale = window_1.BigInt(timescale);
11548 }
11549
11550 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
11551
11552 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
11553 result = Number(result);
11554 }
11555
11556 return result;
11557 };
11558 /**
11559 * Find the trackIds of the video tracks in this source.
11560 * Found by parsing the Handler Reference and Track Header Boxes:
11561 * moov > trak > mdia > hdlr
11562 * moov > trak > tkhd
11563 *
11564 * @param {Uint8Array} init - The bytes of the init segment for this source
11565 * @return {Number[]} A list of trackIds
11566 *
11567 * @see ISO-BMFF-12/2015, Section 8.4.3
11568 **/
11569
11570
11571 getVideoTrackIds = function getVideoTrackIds(init) {
11572 var traks = findBox_1(init, ['moov', 'trak']);
11573 var videoTrackIds = [];
11574 traks.forEach(function (trak) {
11575 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
11576 var tkhds = findBox_1(trak, ['tkhd']);
11577 hdlrs.forEach(function (hdlr, index) {
11578 var handlerType = parseType_1(hdlr.subarray(8, 12));
11579 var tkhd = tkhds[index];
11580 var view;
11581 var version;
11582 var trackId;
11583
11584 if (handlerType === 'vide') {
11585 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11586 version = view.getUint8(0);
11587 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
11588 videoTrackIds.push(trackId);
11589 }
11590 });
11591 });
11592 return videoTrackIds;
11593 };
11594
11595 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
11596 // mdhd is a FullBox, meaning it will have its own version as the first byte
11597 var version = mdhd[0];
11598 var index = version === 0 ? 12 : 20;
11599 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11600 };
11601 /**
11602 * Get all the video, audio, and hint tracks from a non fragmented
11603 * mp4 segment
11604 */
11605
11606
11607 getTracks = function getTracks(init) {
11608 var traks = findBox_1(init, ['moov', 'trak']);
11609 var tracks = [];
11610 traks.forEach(function (trak) {
11611 var track = {};
11612 var tkhd = findBox_1(trak, ['tkhd'])[0];
11613 var view, tkhdVersion; // id
11614
11615 if (tkhd) {
11616 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11617 tkhdVersion = view.getUint8(0);
11618 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
11619 }
11620
11621 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
11622
11623 if (hdlr) {
11624 var type = parseType_1(hdlr.subarray(8, 12));
11625
11626 if (type === 'vide') {
11627 track.type = 'video';
11628 } else if (type === 'soun') {
11629 track.type = 'audio';
11630 } else {
11631 track.type = type;
11632 }
11633 } // codec
11634
11635
11636 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
11637
11638 if (stsd) {
11639 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
11640
11641 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
11642 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
11643 var codecConfig, codecConfigType;
11644
11645 if (codecBox) {
11646 // https://tools.ietf.org/html/rfc6381#section-3.3
11647 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
11648 // we don't need anything but the "config" parameter of the
11649 // avc1 codecBox
11650 codecConfig = codecBox.subarray(78);
11651 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11652
11653 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
11654 track.codec += '.'; // left padded with zeroes for single digit hex
11655 // profile idc
11656
11657 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
11658
11659 track.codec += toHexString(codecConfig[10]); // level idc
11660
11661 track.codec += toHexString(codecConfig[11]);
11662 } else {
11663 // TODO: show a warning that we couldn't parse the codec
11664 // and are using the default
11665 track.codec = 'avc1.4d400d';
11666 }
11667 } else if (/^mp4[a,v]$/i.test(track.codec)) {
11668 // we do not need anything but the streamDescriptor of the mp4a codecBox
11669 codecConfig = codecBox.subarray(28);
11670 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11671
11672 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
11673 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
11674
11675 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
11676 } else {
11677 // TODO: show a warning that we couldn't parse the codec
11678 // and are using the default
11679 track.codec = 'mp4a.40.2';
11680 }
11681 } else {
11682 // flac, opus, etc
11683 track.codec = track.codec.toLowerCase();
11684 }
11685 }
11686 }
11687
11688 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11689
11690 if (mdhd) {
11691 track.timescale = getTimescaleFromMediaHeader(mdhd);
11692 }
11693
11694 tracks.push(track);
11695 });
11696 return tracks;
11697 };
11698
11699 var probe$2 = {
11700 // export mp4 inspector's findBox and parseType for backwards compatibility
11701 findBox: findBox_1,
11702 parseType: parseType_1,
11703 timescale: timescale,
11704 startTime: startTime,
11705 compositionStartTime: compositionStartTime,
11706 videoTrackIds: getVideoTrackIds,
11707 tracks: getTracks,
11708 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
11709 };
11710
11711 var parsePid = function parsePid(packet) {
11712 var pid = packet[1] & 0x1f;
11713 pid <<= 8;
11714 pid |= packet[2];
11715 return pid;
11716 };
11717
11718 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
11719 return !!(packet[1] & 0x40);
11720 };
11721
11722 var parseAdaptionField = function parseAdaptionField(packet) {
11723 var offset = 0; // if an adaption field is present, its length is specified by the
11724 // fifth byte of the TS packet header. The adaptation field is
11725 // used to add stuffing to PES packets that don't fill a complete
11726 // TS packet, and to specify some forms of timing and control data
11727 // that we do not currently use.
11728
11729 if ((packet[3] & 0x30) >>> 4 > 0x01) {
11730 offset += packet[4] + 1;
11731 }
11732
11733 return offset;
11734 };
11735
11736 var parseType = function parseType(packet, pmtPid) {
11737 var pid = parsePid(packet);
11738
11739 if (pid === 0) {
11740 return 'pat';
11741 } else if (pid === pmtPid) {
11742 return 'pmt';
11743 } else if (pmtPid) {
11744 return 'pes';
11745 }
11746
11747 return null;
11748 };
11749
11750 var parsePat = function parsePat(packet) {
11751 var pusi = parsePayloadUnitStartIndicator(packet);
11752 var offset = 4 + parseAdaptionField(packet);
11753
11754 if (pusi) {
11755 offset += packet[offset] + 1;
11756 }
11757
11758 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
11759 };
11760
11761 var parsePmt = function parsePmt(packet) {
11762 var programMapTable = {};
11763 var pusi = parsePayloadUnitStartIndicator(packet);
11764 var payloadOffset = 4 + parseAdaptionField(packet);
11765
11766 if (pusi) {
11767 payloadOffset += packet[payloadOffset] + 1;
11768 } // PMTs can be sent ahead of the time when they should actually
11769 // take effect. We don't believe this should ever be the case
11770 // for HLS but we'll ignore "forward" PMT declarations if we see
11771 // them. Future PMT declarations have the current_next_indicator
11772 // set to zero.
11773
11774
11775 if (!(packet[payloadOffset + 5] & 0x01)) {
11776 return;
11777 }
11778
11779 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
11780
11781 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
11782 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
11783 // long the program info descriptors are
11784
11785 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
11786
11787 var offset = 12 + programInfoLength;
11788
11789 while (offset < tableEnd) {
11790 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
11791
11792 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
11793 // skip past the elementary stream descriptors, if present
11794
11795 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
11796 }
11797
11798 return programMapTable;
11799 };
11800
11801 var parsePesType = function parsePesType(packet, programMapTable) {
11802 var pid = parsePid(packet);
11803 var type = programMapTable[pid];
11804
11805 switch (type) {
11806 case streamTypes.H264_STREAM_TYPE:
11807 return 'video';
11808
11809 case streamTypes.ADTS_STREAM_TYPE:
11810 return 'audio';
11811
11812 case streamTypes.METADATA_STREAM_TYPE:
11813 return 'timed-metadata';
11814
11815 default:
11816 return null;
11817 }
11818 };
11819
11820 var parsePesTime = function parsePesTime(packet) {
11821 var pusi = parsePayloadUnitStartIndicator(packet);
11822
11823 if (!pusi) {
11824 return null;
11825 }
11826
11827 var offset = 4 + parseAdaptionField(packet);
11828
11829 if (offset >= packet.byteLength) {
11830 // From the H 222.0 MPEG-TS spec
11831 // "For transport stream packets carrying PES packets, stuffing is needed when there
11832 // is insufficient PES packet data to completely fill the transport stream packet
11833 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
11834 // the sum of the lengths of the data elements in it, so that the payload bytes
11835 // remaining after the adaptation field exactly accommodates the available PES packet
11836 // data."
11837 //
11838 // If the offset is >= the length of the packet, then the packet contains no data
11839 // and instead is just adaption field stuffing bytes
11840 return null;
11841 }
11842
11843 var pes = null;
11844 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
11845 // and a DTS value. Determine what combination of values is
11846 // available to work with.
11847
11848 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
11849 // performs all bitwise operations on 32-bit integers but javascript
11850 // supports a much greater range (52-bits) of integer using standard
11851 // mathematical operations.
11852 // We construct a 31-bit value using bitwise operators over the 31
11853 // most significant bits and then multiply by 4 (equal to a left-shift
11854 // of 2) before we add the final 2 least significant bits of the
11855 // timestamp (equal to an OR.)
11856
11857 if (ptsDtsFlags & 0xC0) {
11858 pes = {}; // the PTS and DTS are not written out directly. For information
11859 // on how they are encoded, see
11860 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
11861
11862 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
11863 pes.pts *= 4; // Left shift by 2
11864
11865 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
11866
11867 pes.dts = pes.pts;
11868
11869 if (ptsDtsFlags & 0x40) {
11870 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
11871 pes.dts *= 4; // Left shift by 2
11872
11873 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
11874 }
11875 }
11876
11877 return pes;
11878 };
11879
11880 var parseNalUnitType = function parseNalUnitType(type) {
11881 switch (type) {
11882 case 0x05:
11883 return 'slice_layer_without_partitioning_rbsp_idr';
11884
11885 case 0x06:
11886 return 'sei_rbsp';
11887
11888 case 0x07:
11889 return 'seq_parameter_set_rbsp';
11890
11891 case 0x08:
11892 return 'pic_parameter_set_rbsp';
11893
11894 case 0x09:
11895 return 'access_unit_delimiter_rbsp';
11896
11897 default:
11898 return null;
11899 }
11900 };
11901
11902 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
11903 var offset = 4 + parseAdaptionField(packet);
11904 var frameBuffer = packet.subarray(offset);
11905 var frameI = 0;
11906 var frameSyncPoint = 0;
11907 var foundKeyFrame = false;
11908 var nalType; // advance the sync point to a NAL start, if necessary
11909
11910 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
11911 if (frameBuffer[frameSyncPoint + 2] === 1) {
11912 // the sync point is properly aligned
11913 frameI = frameSyncPoint + 5;
11914 break;
11915 }
11916 }
11917
11918 while (frameI < frameBuffer.byteLength) {
11919 // look at the current byte to determine if we've hit the end of
11920 // a NAL unit boundary
11921 switch (frameBuffer[frameI]) {
11922 case 0:
11923 // skip past non-sync sequences
11924 if (frameBuffer[frameI - 1] !== 0) {
11925 frameI += 2;
11926 break;
11927 } else if (frameBuffer[frameI - 2] !== 0) {
11928 frameI++;
11929 break;
11930 }
11931
11932 if (frameSyncPoint + 3 !== frameI - 2) {
11933 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11934
11935 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11936 foundKeyFrame = true;
11937 }
11938 } // drop trailing zeroes
11939
11940
11941 do {
11942 frameI++;
11943 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
11944
11945 frameSyncPoint = frameI - 2;
11946 frameI += 3;
11947 break;
11948
11949 case 1:
11950 // skip past non-sync sequences
11951 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
11952 frameI += 3;
11953 break;
11954 }
11955
11956 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11957
11958 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11959 foundKeyFrame = true;
11960 }
11961
11962 frameSyncPoint = frameI - 2;
11963 frameI += 3;
11964 break;
11965
11966 default:
11967 // the current byte isn't a one or zero, so it cannot be part
11968 // of a sync sequence
11969 frameI += 3;
11970 break;
11971 }
11972 }
11973
11974 frameBuffer = frameBuffer.subarray(frameSyncPoint);
11975 frameI -= frameSyncPoint;
11976 frameSyncPoint = 0; // parse the final nal
11977
11978 if (frameBuffer && frameBuffer.byteLength > 3) {
11979 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11980
11981 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11982 foundKeyFrame = true;
11983 }
11984 }
11985
11986 return foundKeyFrame;
11987 };
11988
11989 var probe$1 = {
11990 parseType: parseType,
11991 parsePat: parsePat,
11992 parsePmt: parsePmt,
11993 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
11994 parsePesType: parsePesType,
11995 parsePesTime: parsePesTime,
11996 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
11997 };
11998 var handleRollover = timestampRolloverStream.handleRollover;
11999 var probe = {};
12000 probe.ts = probe$1;
12001 probe.aac = utils;
12002 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
12003 var MP2T_PACKET_LENGTH = 188,
12004 // bytes
12005 SYNC_BYTE = 0x47;
12006 /**
12007 * walks through segment data looking for pat and pmt packets to parse out
12008 * program map table information
12009 */
12010
12011 var parsePsi_ = function parsePsi_(bytes, pmt) {
12012 var startIndex = 0,
12013 endIndex = MP2T_PACKET_LENGTH,
12014 packet,
12015 type;
12016
12017 while (endIndex < bytes.byteLength) {
12018 // Look for a pair of start and end sync bytes in the data..
12019 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12020 // We found a packet
12021 packet = bytes.subarray(startIndex, endIndex);
12022 type = probe.ts.parseType(packet, pmt.pid);
12023
12024 switch (type) {
12025 case 'pat':
12026 pmt.pid = probe.ts.parsePat(packet);
12027 break;
12028
12029 case 'pmt':
12030 var table = probe.ts.parsePmt(packet);
12031 pmt.table = pmt.table || {};
12032 Object.keys(table).forEach(function (key) {
12033 pmt.table[key] = table[key];
12034 });
12035 break;
12036 }
12037
12038 startIndex += MP2T_PACKET_LENGTH;
12039 endIndex += MP2T_PACKET_LENGTH;
12040 continue;
12041 } // If we get here, we have somehow become de-synchronized and we need to step
12042 // forward one byte at a time until we find a pair of sync bytes that denote
12043 // a packet
12044
12045
12046 startIndex++;
12047 endIndex++;
12048 }
12049 };
12050 /**
12051 * walks through the segment data from the start and end to get timing information
12052 * for the first and last audio pes packets
12053 */
12054
12055
12056 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
12057 var startIndex = 0,
12058 endIndex = MP2T_PACKET_LENGTH,
12059 packet,
12060 type,
12061 pesType,
12062 pusi,
12063 parsed;
12064 var endLoop = false; // Start walking from start of segment to get first audio packet
12065
12066 while (endIndex <= bytes.byteLength) {
12067 // Look for a pair of start and end sync bytes in the data..
12068 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12069 // We found a packet
12070 packet = bytes.subarray(startIndex, endIndex);
12071 type = probe.ts.parseType(packet, pmt.pid);
12072
12073 switch (type) {
12074 case 'pes':
12075 pesType = probe.ts.parsePesType(packet, pmt.table);
12076 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12077
12078 if (pesType === 'audio' && pusi) {
12079 parsed = probe.ts.parsePesTime(packet);
12080
12081 if (parsed) {
12082 parsed.type = 'audio';
12083 result.audio.push(parsed);
12084 endLoop = true;
12085 }
12086 }
12087
12088 break;
12089 }
12090
12091 if (endLoop) {
12092 break;
12093 }
12094
12095 startIndex += MP2T_PACKET_LENGTH;
12096 endIndex += MP2T_PACKET_LENGTH;
12097 continue;
12098 } // If we get here, we have somehow become de-synchronized and we need to step
12099 // forward one byte at a time until we find a pair of sync bytes that denote
12100 // a packet
12101
12102
12103 startIndex++;
12104 endIndex++;
12105 } // Start walking from end of segment to get last audio packet
12106
12107
12108 endIndex = bytes.byteLength;
12109 startIndex = endIndex - MP2T_PACKET_LENGTH;
12110 endLoop = false;
12111
12112 while (startIndex >= 0) {
12113 // Look for a pair of start and end sync bytes in the data..
12114 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12115 // We found a packet
12116 packet = bytes.subarray(startIndex, endIndex);
12117 type = probe.ts.parseType(packet, pmt.pid);
12118
12119 switch (type) {
12120 case 'pes':
12121 pesType = probe.ts.parsePesType(packet, pmt.table);
12122 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12123
12124 if (pesType === 'audio' && pusi) {
12125 parsed = probe.ts.parsePesTime(packet);
12126
12127 if (parsed) {
12128 parsed.type = 'audio';
12129 result.audio.push(parsed);
12130 endLoop = true;
12131 }
12132 }
12133
12134 break;
12135 }
12136
12137 if (endLoop) {
12138 break;
12139 }
12140
12141 startIndex -= MP2T_PACKET_LENGTH;
12142 endIndex -= MP2T_PACKET_LENGTH;
12143 continue;
12144 } // If we get here, we have somehow become de-synchronized and we need to step
12145 // forward one byte at a time until we find a pair of sync bytes that denote
12146 // a packet
12147
12148
12149 startIndex--;
12150 endIndex--;
12151 }
12152 };
12153 /**
12154 * walks through the segment data from the start and end to get timing information
12155 * for the first and last video pes packets as well as timing information for the first
12156 * key frame.
12157 */
12158
12159
12160 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
12161 var startIndex = 0,
12162 endIndex = MP2T_PACKET_LENGTH,
12163 packet,
12164 type,
12165 pesType,
12166 pusi,
12167 parsed,
12168 frame,
12169 i,
12170 pes;
12171 var endLoop = false;
12172 var currentFrame = {
12173 data: [],
12174 size: 0
12175 }; // Start walking from start of segment to get first video packet
12176
12177 while (endIndex < bytes.byteLength) {
12178 // Look for a pair of start and end sync bytes in the data..
12179 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12180 // We found a packet
12181 packet = bytes.subarray(startIndex, endIndex);
12182 type = probe.ts.parseType(packet, pmt.pid);
12183
12184 switch (type) {
12185 case 'pes':
12186 pesType = probe.ts.parsePesType(packet, pmt.table);
12187 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12188
12189 if (pesType === 'video') {
12190 if (pusi && !endLoop) {
12191 parsed = probe.ts.parsePesTime(packet);
12192
12193 if (parsed) {
12194 parsed.type = 'video';
12195 result.video.push(parsed);
12196 endLoop = true;
12197 }
12198 }
12199
12200 if (!result.firstKeyFrame) {
12201 if (pusi) {
12202 if (currentFrame.size !== 0) {
12203 frame = new Uint8Array(currentFrame.size);
12204 i = 0;
12205
12206 while (currentFrame.data.length) {
12207 pes = currentFrame.data.shift();
12208 frame.set(pes, i);
12209 i += pes.byteLength;
12210 }
12211
12212 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
12213 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
12214 // the keyframe seems to work fine with HLS playback
12215 // and definitely preferable to a crash with TypeError...
12216
12217 if (firstKeyFrame) {
12218 result.firstKeyFrame = firstKeyFrame;
12219 result.firstKeyFrame.type = 'video';
12220 } else {
12221 // eslint-disable-next-line
12222 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
12223 }
12224 }
12225
12226 currentFrame.size = 0;
12227 }
12228 }
12229
12230 currentFrame.data.push(packet);
12231 currentFrame.size += packet.byteLength;
12232 }
12233 }
12234
12235 break;
12236 }
12237
12238 if (endLoop && result.firstKeyFrame) {
12239 break;
12240 }
12241
12242 startIndex += MP2T_PACKET_LENGTH;
12243 endIndex += MP2T_PACKET_LENGTH;
12244 continue;
12245 } // If we get here, we have somehow become de-synchronized and we need to step
12246 // forward one byte at a time until we find a pair of sync bytes that denote
12247 // a packet
12248
12249
12250 startIndex++;
12251 endIndex++;
12252 } // Start walking from end of segment to get last video packet
12253
12254
12255 endIndex = bytes.byteLength;
12256 startIndex = endIndex - MP2T_PACKET_LENGTH;
12257 endLoop = false;
12258
12259 while (startIndex >= 0) {
12260 // Look for a pair of start and end sync bytes in the data..
12261 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12262 // We found a packet
12263 packet = bytes.subarray(startIndex, endIndex);
12264 type = probe.ts.parseType(packet, pmt.pid);
12265
12266 switch (type) {
12267 case 'pes':
12268 pesType = probe.ts.parsePesType(packet, pmt.table);
12269 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12270
12271 if (pesType === 'video' && pusi) {
12272 parsed = probe.ts.parsePesTime(packet);
12273
12274 if (parsed) {
12275 parsed.type = 'video';
12276 result.video.push(parsed);
12277 endLoop = true;
12278 }
12279 }
12280
12281 break;
12282 }
12283
12284 if (endLoop) {
12285 break;
12286 }
12287
12288 startIndex -= MP2T_PACKET_LENGTH;
12289 endIndex -= MP2T_PACKET_LENGTH;
12290 continue;
12291 } // If we get here, we have somehow become de-synchronized and we need to step
12292 // forward one byte at a time until we find a pair of sync bytes that denote
12293 // a packet
12294
12295
12296 startIndex--;
12297 endIndex--;
12298 }
12299 };
12300 /**
12301 * Adjusts the timestamp information for the segment to account for
12302 * rollover and convert to seconds based on pes packet timescale (90khz clock)
12303 */
12304
12305
12306 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
12307 if (segmentInfo.audio && segmentInfo.audio.length) {
12308 var audioBaseTimestamp = baseTimestamp;
12309
12310 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
12311 audioBaseTimestamp = segmentInfo.audio[0].dts;
12312 }
12313
12314 segmentInfo.audio.forEach(function (info) {
12315 info.dts = handleRollover(info.dts, audioBaseTimestamp);
12316 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
12317
12318 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12319 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12320 });
12321 }
12322
12323 if (segmentInfo.video && segmentInfo.video.length) {
12324 var videoBaseTimestamp = baseTimestamp;
12325
12326 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
12327 videoBaseTimestamp = segmentInfo.video[0].dts;
12328 }
12329
12330 segmentInfo.video.forEach(function (info) {
12331 info.dts = handleRollover(info.dts, videoBaseTimestamp);
12332 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
12333
12334 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12335 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12336 });
12337
12338 if (segmentInfo.firstKeyFrame) {
12339 var frame = segmentInfo.firstKeyFrame;
12340 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
12341 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
12342
12343 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
12344 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
12345 }
12346 }
12347 };
12348 /**
12349 * inspects the aac data stream for start and end time information
12350 */
12351
12352
12353 var inspectAac_ = function inspectAac_(bytes) {
12354 var endLoop = false,
12355 audioCount = 0,
12356 sampleRate = null,
12357 timestamp = null,
12358 frameSize = 0,
12359 byteIndex = 0,
12360 packet;
12361
12362 while (bytes.length - byteIndex >= 3) {
12363 var type = probe.aac.parseType(bytes, byteIndex);
12364
12365 switch (type) {
12366 case 'timed-metadata':
12367 // Exit early because we don't have enough to parse
12368 // the ID3 tag header
12369 if (bytes.length - byteIndex < 10) {
12370 endLoop = true;
12371 break;
12372 }
12373
12374 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12375 // to emit a full packet
12376
12377 if (frameSize > bytes.length) {
12378 endLoop = true;
12379 break;
12380 }
12381
12382 if (timestamp === null) {
12383 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12384 timestamp = probe.aac.parseAacTimestamp(packet);
12385 }
12386
12387 byteIndex += frameSize;
12388 break;
12389
12390 case 'audio':
12391 // Exit early because we don't have enough to parse
12392 // the ADTS frame header
12393 if (bytes.length - byteIndex < 7) {
12394 endLoop = true;
12395 break;
12396 }
12397
12398 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12399 // to emit a full packet
12400
12401 if (frameSize > bytes.length) {
12402 endLoop = true;
12403 break;
12404 }
12405
12406 if (sampleRate === null) {
12407 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12408 sampleRate = probe.aac.parseSampleRate(packet);
12409 }
12410
12411 audioCount++;
12412 byteIndex += frameSize;
12413 break;
12414
12415 default:
12416 byteIndex++;
12417 break;
12418 }
12419
12420 if (endLoop) {
12421 return null;
12422 }
12423 }
12424
12425 if (sampleRate === null || timestamp === null) {
12426 return null;
12427 }
12428
12429 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
12430 var result = {
12431 audio: [{
12432 type: 'audio',
12433 dts: timestamp,
12434 pts: timestamp
12435 }, {
12436 type: 'audio',
12437 dts: timestamp + audioCount * 1024 * audioTimescale,
12438 pts: timestamp + audioCount * 1024 * audioTimescale
12439 }]
12440 };
12441 return result;
12442 };
12443 /**
12444 * inspects the transport stream segment data for start and end time information
12445 * of the audio and video tracks (when present) as well as the first key frame's
12446 * start time.
12447 */
12448
12449
12450 var inspectTs_ = function inspectTs_(bytes) {
12451 var pmt = {
12452 pid: null,
12453 table: null
12454 };
12455 var result = {};
12456 parsePsi_(bytes, pmt);
12457
12458 for (var pid in pmt.table) {
12459 if (pmt.table.hasOwnProperty(pid)) {
12460 var type = pmt.table[pid];
12461
12462 switch (type) {
12463 case streamTypes.H264_STREAM_TYPE:
12464 result.video = [];
12465 parseVideoPes_(bytes, pmt, result);
12466
12467 if (result.video.length === 0) {
12468 delete result.video;
12469 }
12470
12471 break;
12472
12473 case streamTypes.ADTS_STREAM_TYPE:
12474 result.audio = [];
12475 parseAudioPes_(bytes, pmt, result);
12476
12477 if (result.audio.length === 0) {
12478 delete result.audio;
12479 }
12480
12481 break;
12482 }
12483 }
12484 }
12485
12486 return result;
12487 };
12488 /**
12489 * Inspects segment byte data and returns an object with start and end timing information
12490 *
12491 * @param {Uint8Array} bytes The segment byte data
12492 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
12493 * timestamps for rollover. This value must be in 90khz clock.
12494 * @return {Object} Object containing start and end frame timing info of segment.
12495 */
12496
12497
12498 var inspect = function inspect(bytes, baseTimestamp) {
12499 var isAacData = probe.aac.isLikelyAacData(bytes);
12500 var result;
12501
12502 if (isAacData) {
12503 result = inspectAac_(bytes);
12504 } else {
12505 result = inspectTs_(bytes);
12506 }
12507
12508 if (!result || !result.audio && !result.video) {
12509 return null;
12510 }
12511
12512 adjustTimestamp_(result, baseTimestamp);
12513 return result;
12514 };
12515
12516 var tsInspector = {
12517 inspect: inspect,
12518 parseAudioPes_: parseAudioPes_
12519 };
12520 /* global self */
12521
12522 /**
12523 * Re-emits transmuxer events by converting them into messages to the
12524 * world outside the worker.
12525 *
12526 * @param {Object} transmuxer the transmuxer to wire events on
12527 * @private
12528 */
12529
12530 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
12531 transmuxer.on('data', function (segment) {
12532 // transfer ownership of the underlying ArrayBuffer
12533 // instead of doing a copy to save memory
12534 // ArrayBuffers are transferable but generic TypedArrays are not
12535 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
12536 var initArray = segment.initSegment;
12537 segment.initSegment = {
12538 data: initArray.buffer,
12539 byteOffset: initArray.byteOffset,
12540 byteLength: initArray.byteLength
12541 };
12542 var typedArray = segment.data;
12543 segment.data = typedArray.buffer;
12544 self.postMessage({
12545 action: 'data',
12546 segment: segment,
12547 byteOffset: typedArray.byteOffset,
12548 byteLength: typedArray.byteLength
12549 }, [segment.data]);
12550 });
12551 transmuxer.on('done', function (data) {
12552 self.postMessage({
12553 action: 'done'
12554 });
12555 });
12556 transmuxer.on('gopInfo', function (gopInfo) {
12557 self.postMessage({
12558 action: 'gopInfo',
12559 gopInfo: gopInfo
12560 });
12561 });
12562 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
12563 var videoSegmentTimingInfo = {
12564 start: {
12565 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12566 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12567 },
12568 end: {
12569 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12570 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12571 },
12572 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12573 };
12574
12575 if (timingInfo.prependedContentDuration) {
12576 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12577 }
12578
12579 self.postMessage({
12580 action: 'videoSegmentTimingInfo',
12581 videoSegmentTimingInfo: videoSegmentTimingInfo
12582 });
12583 });
12584 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
12585 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
12586 var audioSegmentTimingInfo = {
12587 start: {
12588 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12589 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12590 },
12591 end: {
12592 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12593 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12594 },
12595 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12596 };
12597
12598 if (timingInfo.prependedContentDuration) {
12599 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12600 }
12601
12602 self.postMessage({
12603 action: 'audioSegmentTimingInfo',
12604 audioSegmentTimingInfo: audioSegmentTimingInfo
12605 });
12606 });
12607 transmuxer.on('id3Frame', function (id3Frame) {
12608 self.postMessage({
12609 action: 'id3Frame',
12610 id3Frame: id3Frame
12611 });
12612 });
12613 transmuxer.on('caption', function (caption) {
12614 self.postMessage({
12615 action: 'caption',
12616 caption: caption
12617 });
12618 });
12619 transmuxer.on('trackinfo', function (trackInfo) {
12620 self.postMessage({
12621 action: 'trackinfo',
12622 trackInfo: trackInfo
12623 });
12624 });
12625 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
12626 // convert to video TS since we prioritize video time over audio
12627 self.postMessage({
12628 action: 'audioTimingInfo',
12629 audioTimingInfo: {
12630 start: clock.videoTsToSeconds(audioTimingInfo.start),
12631 end: clock.videoTsToSeconds(audioTimingInfo.end)
12632 }
12633 });
12634 });
12635 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
12636 self.postMessage({
12637 action: 'videoTimingInfo',
12638 videoTimingInfo: {
12639 start: clock.videoTsToSeconds(videoTimingInfo.start),
12640 end: clock.videoTsToSeconds(videoTimingInfo.end)
12641 }
12642 });
12643 });
12644 transmuxer.on('log', function (log) {
12645 self.postMessage({
12646 action: 'log',
12647 log: log
12648 });
12649 });
12650 };
12651 /**
12652 * All incoming messages route through this hash. If no function exists
12653 * to handle an incoming message, then we ignore the message.
12654 *
12655 * @class MessageHandlers
12656 * @param {Object} options the options to initialize with
12657 */
12658
12659
12660 var MessageHandlers = /*#__PURE__*/function () {
12661 function MessageHandlers(self, options) {
12662 this.options = options || {};
12663 this.self = self;
12664 this.init();
12665 }
12666 /**
12667 * initialize our web worker and wire all the events.
12668 */
12669
12670
12671 var _proto = MessageHandlers.prototype;
12672
12673 _proto.init = function init() {
12674 if (this.transmuxer) {
12675 this.transmuxer.dispose();
12676 }
12677
12678 this.transmuxer = new transmuxer.Transmuxer(this.options);
12679 wireTransmuxerEvents(this.self, this.transmuxer);
12680 };
12681
12682 _proto.pushMp4Captions = function pushMp4Captions(data) {
12683 if (!this.captionParser) {
12684 this.captionParser = new captionParser();
12685 this.captionParser.init();
12686 }
12687
12688 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12689 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
12690 this.self.postMessage({
12691 action: 'mp4Captions',
12692 captions: parsed && parsed.captions || [],
12693 logs: parsed && parsed.logs || [],
12694 data: segment.buffer
12695 }, [segment.buffer]);
12696 };
12697
12698 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
12699 var timescales = _ref.timescales,
12700 data = _ref.data;
12701 var startTime = probe$2.startTime(timescales, data);
12702 this.self.postMessage({
12703 action: 'probeMp4StartTime',
12704 startTime: startTime,
12705 data: data
12706 }, [data.buffer]);
12707 };
12708
12709 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
12710 var data = _ref2.data;
12711 var tracks = probe$2.tracks(data);
12712 this.self.postMessage({
12713 action: 'probeMp4Tracks',
12714 tracks: tracks,
12715 data: data
12716 }, [data.buffer]);
12717 }
12718 /**
12719 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
12720 * internal "media time," as well as whether it contains video and/or audio.
12721 *
12722 * @private
12723 * @param {Uint8Array} bytes - segment bytes
12724 * @param {number} baseStartTime
12725 * Relative reference timestamp used when adjusting frame timestamps for rollover.
12726 * This value should be in seconds, as it's converted to a 90khz clock within the
12727 * function body.
12728 * @return {Object} The start time of the current segment in "media time" as well as
12729 * whether it contains video and/or audio
12730 */
12731 ;
12732
12733 _proto.probeTs = function probeTs(_ref3) {
12734 var data = _ref3.data,
12735 baseStartTime = _ref3.baseStartTime;
12736 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
12737 var timeInfo = tsInspector.inspect(data, tsStartTime);
12738 var result = null;
12739
12740 if (timeInfo) {
12741 result = {
12742 // each type's time info comes back as an array of 2 times, start and end
12743 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
12744 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
12745 };
12746
12747 if (result.hasVideo) {
12748 result.videoStart = timeInfo.video[0].ptsTime;
12749 }
12750
12751 if (result.hasAudio) {
12752 result.audioStart = timeInfo.audio[0].ptsTime;
12753 }
12754 }
12755
12756 this.self.postMessage({
12757 action: 'probeTs',
12758 result: result,
12759 data: data
12760 }, [data.buffer]);
12761 };
12762
12763 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
12764 if (this.captionParser) {
12765 this.captionParser.clearAllCaptions();
12766 }
12767 };
12768
12769 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
12770 if (this.captionParser) {
12771 this.captionParser.clearParsedCaptions();
12772 }
12773 }
12774 /**
12775 * Adds data (a ts segment) to the start of the transmuxer pipeline for
12776 * processing.
12777 *
12778 * @param {ArrayBuffer} data data to push into the muxer
12779 */
12780 ;
12781
12782 _proto.push = function push(data) {
12783 // Cast array buffer to correct type for transmuxer
12784 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12785 this.transmuxer.push(segment);
12786 }
12787 /**
12788 * Recreate the transmuxer so that the next segment added via `push`
12789 * start with a fresh transmuxer.
12790 */
12791 ;
12792
12793 _proto.reset = function reset() {
12794 this.transmuxer.reset();
12795 }
12796 /**
12797 * Set the value that will be used as the `baseMediaDecodeTime` time for the
12798 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
12799 * set relative to the first based on the PTS values.
12800 *
12801 * @param {Object} data used to set the timestamp offset in the muxer
12802 */
12803 ;
12804
12805 _proto.setTimestampOffset = function setTimestampOffset(data) {
12806 var timestampOffset = data.timestampOffset || 0;
12807 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
12808 };
12809
12810 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
12811 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
12812 };
12813
12814 _proto.setRemux = function setRemux(data) {
12815 this.transmuxer.setRemux(data.remux);
12816 }
12817 /**
12818 * Forces the pipeline to finish processing the last segment and emit it's
12819 * results.
12820 *
12821 * @param {Object} data event data, not really used
12822 */
12823 ;
12824
12825 _proto.flush = function flush(data) {
12826 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
12827
12828 self.postMessage({
12829 action: 'done',
12830 type: 'transmuxed'
12831 });
12832 };
12833
12834 _proto.endTimeline = function endTimeline() {
12835 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
12836 // timelines
12837
12838 self.postMessage({
12839 action: 'endedtimeline',
12840 type: 'transmuxed'
12841 });
12842 };
12843
12844 _proto.alignGopsWith = function alignGopsWith(data) {
12845 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
12846 };
12847
12848 return MessageHandlers;
12849 }();
12850 /**
12851 * Our web worker interface so that things can talk to mux.js
12852 * that will be running in a web worker. the scope is passed to this by
12853 * webworkify.
12854 *
12855 * @param {Object} self the scope for the web worker
12856 */
12857
12858
12859 self.onmessage = function (event) {
12860 if (event.data.action === 'init' && event.data.options) {
12861 this.messageHandlers = new MessageHandlers(self, event.data.options);
12862 return;
12863 }
12864
12865 if (!this.messageHandlers) {
12866 this.messageHandlers = new MessageHandlers(self);
12867 }
12868
12869 if (event.data && event.data.action && event.data.action !== 'init') {
12870 if (this.messageHandlers[event.data.action]) {
12871 this.messageHandlers[event.data.action](event.data);
12872 }
12873 }
12874 };
12875}));
12876var TransmuxWorker = factory(workerCode$1);
12877/* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/transmuxer-worker.js */
12878
12879var handleData_ = function handleData_(event, transmuxedData, callback) {
12880 var _event$data$segment = event.data.segment,
12881 type = _event$data$segment.type,
12882 initSegment = _event$data$segment.initSegment,
12883 captions = _event$data$segment.captions,
12884 captionStreams = _event$data$segment.captionStreams,
12885 metadata = _event$data$segment.metadata,
12886 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
12887 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
12888 transmuxedData.buffer.push({
12889 captions: captions,
12890 captionStreams: captionStreams,
12891 metadata: metadata
12892 });
12893 var boxes = event.data.segment.boxes || {
12894 data: event.data.segment.data
12895 };
12896 var result = {
12897 type: type,
12898 // cast ArrayBuffer to TypedArray
12899 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
12900 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
12901 };
12902
12903 if (typeof videoFrameDtsTime !== 'undefined') {
12904 result.videoFrameDtsTime = videoFrameDtsTime;
12905 }
12906
12907 if (typeof videoFramePtsTime !== 'undefined') {
12908 result.videoFramePtsTime = videoFramePtsTime;
12909 }
12910
12911 callback(result);
12912};
12913var handleDone_ = function handleDone_(_ref) {
12914 var transmuxedData = _ref.transmuxedData,
12915 callback = _ref.callback;
12916 // Previously we only returned data on data events,
12917 // not on done events. Clear out the buffer to keep that consistent.
12918 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
12919 // have received
12920
12921 callback(transmuxedData);
12922};
12923var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
12924 transmuxedData.gopInfo = event.data.gopInfo;
12925};
12926var processTransmux = function processTransmux(options) {
12927 var transmuxer = options.transmuxer,
12928 bytes = options.bytes,
12929 audioAppendStart = options.audioAppendStart,
12930 gopsToAlignWith = options.gopsToAlignWith,
12931 remux = options.remux,
12932 onData = options.onData,
12933 onTrackInfo = options.onTrackInfo,
12934 onAudioTimingInfo = options.onAudioTimingInfo,
12935 onVideoTimingInfo = options.onVideoTimingInfo,
12936 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
12937 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
12938 onId3 = options.onId3,
12939 onCaptions = options.onCaptions,
12940 onDone = options.onDone,
12941 onEndedTimeline = options.onEndedTimeline,
12942 onTransmuxerLog = options.onTransmuxerLog,
12943 isEndOfTimeline = options.isEndOfTimeline;
12944 var transmuxedData = {
12945 buffer: []
12946 };
12947 var waitForEndedTimelineEvent = isEndOfTimeline;
12948
12949 var handleMessage = function handleMessage(event) {
12950 if (transmuxer.currentTransmux !== options) {
12951 // disposed
12952 return;
12953 }
12954
12955 if (event.data.action === 'data') {
12956 handleData_(event, transmuxedData, onData);
12957 }
12958
12959 if (event.data.action === 'trackinfo') {
12960 onTrackInfo(event.data.trackInfo);
12961 }
12962
12963 if (event.data.action === 'gopInfo') {
12964 handleGopInfo_(event, transmuxedData);
12965 }
12966
12967 if (event.data.action === 'audioTimingInfo') {
12968 onAudioTimingInfo(event.data.audioTimingInfo);
12969 }
12970
12971 if (event.data.action === 'videoTimingInfo') {
12972 onVideoTimingInfo(event.data.videoTimingInfo);
12973 }
12974
12975 if (event.data.action === 'videoSegmentTimingInfo') {
12976 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
12977 }
12978
12979 if (event.data.action === 'audioSegmentTimingInfo') {
12980 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
12981 }
12982
12983 if (event.data.action === 'id3Frame') {
12984 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
12985 }
12986
12987 if (event.data.action === 'caption') {
12988 onCaptions(event.data.caption);
12989 }
12990
12991 if (event.data.action === 'endedtimeline') {
12992 waitForEndedTimelineEvent = false;
12993 onEndedTimeline();
12994 }
12995
12996 if (event.data.action === 'log') {
12997 onTransmuxerLog(event.data.log);
12998 } // wait for the transmuxed event since we may have audio and video
12999
13000
13001 if (event.data.type !== 'transmuxed') {
13002 return;
13003 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
13004 // of a timeline, that means there may still be data events before the segment
13005 // processing can be considerred complete. In that case, the final event should be
13006 // an "endedtimeline" event with the type "transmuxed."
13007
13008
13009 if (waitForEndedTimelineEvent) {
13010 return;
13011 }
13012
13013 transmuxer.onmessage = null;
13014 handleDone_({
13015 transmuxedData: transmuxedData,
13016 callback: onDone
13017 });
13018 /* eslint-disable no-use-before-define */
13019
13020 dequeue(transmuxer);
13021 /* eslint-enable */
13022 };
13023
13024 transmuxer.onmessage = handleMessage;
13025
13026 if (audioAppendStart) {
13027 transmuxer.postMessage({
13028 action: 'setAudioAppendStart',
13029 appendStart: audioAppendStart
13030 });
13031 } // allow empty arrays to be passed to clear out GOPs
13032
13033
13034 if (Array.isArray(gopsToAlignWith)) {
13035 transmuxer.postMessage({
13036 action: 'alignGopsWith',
13037 gopsToAlignWith: gopsToAlignWith
13038 });
13039 }
13040
13041 if (typeof remux !== 'undefined') {
13042 transmuxer.postMessage({
13043 action: 'setRemux',
13044 remux: remux
13045 });
13046 }
13047
13048 if (bytes.byteLength) {
13049 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
13050 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
13051 transmuxer.postMessage({
13052 action: 'push',
13053 // Send the typed-array of data as an ArrayBuffer so that
13054 // it can be sent as a "Transferable" and avoid the costly
13055 // memory copy
13056 data: buffer,
13057 // To recreate the original typed-array, we need information
13058 // about what portion of the ArrayBuffer it was a view into
13059 byteOffset: byteOffset,
13060 byteLength: bytes.byteLength
13061 }, [buffer]);
13062 }
13063
13064 if (isEndOfTimeline) {
13065 transmuxer.postMessage({
13066 action: 'endTimeline'
13067 });
13068 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
13069 // the end of the segment
13070
13071
13072 transmuxer.postMessage({
13073 action: 'flush'
13074 });
13075};
13076var dequeue = function dequeue(transmuxer) {
13077 transmuxer.currentTransmux = null;
13078
13079 if (transmuxer.transmuxQueue.length) {
13080 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
13081
13082 if (typeof transmuxer.currentTransmux === 'function') {
13083 transmuxer.currentTransmux();
13084 } else {
13085 processTransmux(transmuxer.currentTransmux);
13086 }
13087 }
13088};
13089var processAction = function processAction(transmuxer, action) {
13090 transmuxer.postMessage({
13091 action: action
13092 });
13093 dequeue(transmuxer);
13094};
13095var enqueueAction = function enqueueAction(action, transmuxer) {
13096 if (!transmuxer.currentTransmux) {
13097 transmuxer.currentTransmux = action;
13098 processAction(transmuxer, action);
13099 return;
13100 }
13101
13102 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
13103};
13104var reset = function reset(transmuxer) {
13105 enqueueAction('reset', transmuxer);
13106};
13107var endTimeline = function endTimeline(transmuxer) {
13108 enqueueAction('endTimeline', transmuxer);
13109};
13110var transmux = function transmux(options) {
13111 if (!options.transmuxer.currentTransmux) {
13112 options.transmuxer.currentTransmux = options;
13113 processTransmux(options);
13114 return;
13115 }
13116
13117 options.transmuxer.transmuxQueue.push(options);
13118};
13119var createTransmuxer = function createTransmuxer(options) {
13120 var transmuxer = new TransmuxWorker();
13121 transmuxer.currentTransmux = null;
13122 transmuxer.transmuxQueue = [];
13123 var term = transmuxer.terminate;
13124
13125 transmuxer.terminate = function () {
13126 transmuxer.currentTransmux = null;
13127 transmuxer.transmuxQueue.length = 0;
13128 return term.call(transmuxer);
13129 };
13130
13131 transmuxer.postMessage({
13132 action: 'init',
13133 options: options
13134 });
13135 return transmuxer;
13136};
13137var segmentTransmuxer = {
13138 reset: reset,
13139 endTimeline: endTimeline,
13140 transmux: transmux,
13141 createTransmuxer: createTransmuxer
13142};
13143
13144var workerCallback = function workerCallback(options) {
13145 var transmuxer = options.transmuxer;
13146 var endAction = options.endAction || options.action;
13147 var callback = options.callback;
13148
13149 var message = _extends({}, options, {
13150 endAction: null,
13151 transmuxer: null,
13152 callback: null
13153 });
13154
13155 var listenForEndEvent = function listenForEndEvent(event) {
13156 if (event.data.action !== endAction) {
13157 return;
13158 }
13159
13160 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
13161
13162 if (event.data.data) {
13163 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
13164
13165 if (options.data) {
13166 options.data = event.data.data;
13167 }
13168 }
13169
13170 callback(event.data);
13171 };
13172
13173 transmuxer.addEventListener('message', listenForEndEvent);
13174
13175 if (options.data) {
13176 var isArrayBuffer = options.data instanceof ArrayBuffer;
13177 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
13178 message.byteLength = options.data.byteLength;
13179 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
13180 transmuxer.postMessage(message, transfers);
13181 } else {
13182 transmuxer.postMessage(message);
13183 }
13184};
13185
13186var REQUEST_ERRORS = {
13187 FAILURE: 2,
13188 TIMEOUT: -101,
13189 ABORTED: -102
13190};
13191/**
13192 * Abort all requests
13193 *
13194 * @param {Object} activeXhrs - an object that tracks all XHR requests
13195 */
13196
13197var abortAll = function abortAll(activeXhrs) {
13198 activeXhrs.forEach(function (xhr) {
13199 xhr.abort();
13200 });
13201};
13202/**
13203 * Gather important bandwidth stats once a request has completed
13204 *
13205 * @param {Object} request - the XHR request from which to gather stats
13206 */
13207
13208
13209var getRequestStats = function getRequestStats(request) {
13210 return {
13211 bandwidth: request.bandwidth,
13212 bytesReceived: request.bytesReceived || 0,
13213 roundTripTime: request.roundTripTime || 0
13214 };
13215};
13216/**
13217 * If possible gather bandwidth stats as a request is in
13218 * progress
13219 *
13220 * @param {Event} progressEvent - an event object from an XHR's progress event
13221 */
13222
13223
13224var getProgressStats = function getProgressStats(progressEvent) {
13225 var request = progressEvent.target;
13226 var roundTripTime = Date.now() - request.requestTime;
13227 var stats = {
13228 bandwidth: Infinity,
13229 bytesReceived: 0,
13230 roundTripTime: roundTripTime || 0
13231 };
13232 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
13233 // because we should only use bandwidth stats on progress to determine when
13234 // abort a request early due to insufficient bandwidth
13235
13236 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
13237 return stats;
13238};
13239/**
13240 * Handle all error conditions in one place and return an object
13241 * with all the information
13242 *
13243 * @param {Error|null} error - if non-null signals an error occured with the XHR
13244 * @param {Object} request - the XHR request that possibly generated the error
13245 */
13246
13247
13248var handleErrors = function handleErrors(error, request) {
13249 if (request.timedout) {
13250 return {
13251 status: request.status,
13252 message: 'HLS request timed-out at URL: ' + request.uri,
13253 code: REQUEST_ERRORS.TIMEOUT,
13254 xhr: request
13255 };
13256 }
13257
13258 if (request.aborted) {
13259 return {
13260 status: request.status,
13261 message: 'HLS request aborted at URL: ' + request.uri,
13262 code: REQUEST_ERRORS.ABORTED,
13263 xhr: request
13264 };
13265 }
13266
13267 if (error) {
13268 return {
13269 status: request.status,
13270 message: 'HLS request errored at URL: ' + request.uri,
13271 code: REQUEST_ERRORS.FAILURE,
13272 xhr: request
13273 };
13274 }
13275
13276 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
13277 return {
13278 status: request.status,
13279 message: 'Empty HLS response at URL: ' + request.uri,
13280 code: REQUEST_ERRORS.FAILURE,
13281 xhr: request
13282 };
13283 }
13284
13285 return null;
13286};
13287/**
13288 * Handle responses for key data and convert the key data to the correct format
13289 * for the decryption step later
13290 *
13291 * @param {Object} segment - a simplified copy of the segmentInfo object
13292 * from SegmentLoader
13293 * @param {Array} objects - objects to add the key bytes to.
13294 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13295 * this request
13296 */
13297
13298
13299var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
13300 return function (error, request) {
13301 var response = request.response;
13302 var errorObj = handleErrors(error, request);
13303
13304 if (errorObj) {
13305 return finishProcessingFn(errorObj, segment);
13306 }
13307
13308 if (response.byteLength !== 16) {
13309 return finishProcessingFn({
13310 status: request.status,
13311 message: 'Invalid HLS key at URL: ' + request.uri,
13312 code: REQUEST_ERRORS.FAILURE,
13313 xhr: request
13314 }, segment);
13315 }
13316
13317 var view = new DataView(response);
13318 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
13319
13320 for (var i = 0; i < objects.length; i++) {
13321 objects[i].bytes = bytes;
13322 }
13323
13324 return finishProcessingFn(null, segment);
13325 };
13326};
13327
13328var parseInitSegment = function parseInitSegment(segment, _callback) {
13329 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
13330 // only know how to parse mp4 init segments at the moment
13331
13332 if (type !== 'mp4') {
13333 var uri = segment.map.resolvedUri || segment.map.uri;
13334 return _callback({
13335 internal: true,
13336 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
13337 code: REQUEST_ERRORS.FAILURE
13338 });
13339 }
13340
13341 workerCallback({
13342 action: 'probeMp4Tracks',
13343 data: segment.map.bytes,
13344 transmuxer: segment.transmuxer,
13345 callback: function callback(_ref) {
13346 var tracks = _ref.tracks,
13347 data = _ref.data;
13348 // transfer bytes back to us
13349 segment.map.bytes = data;
13350 tracks.forEach(function (track) {
13351 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
13352
13353 if (segment.map.tracks[track.type]) {
13354 return;
13355 }
13356
13357 segment.map.tracks[track.type] = track;
13358
13359 if (typeof track.id === 'number' && track.timescale) {
13360 segment.map.timescales = segment.map.timescales || {};
13361 segment.map.timescales[track.id] = track.timescale;
13362 }
13363 });
13364 return _callback(null);
13365 }
13366 });
13367};
13368/**
13369 * Handle init-segment responses
13370 *
13371 * @param {Object} segment - a simplified copy of the segmentInfo object
13372 * from SegmentLoader
13373 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13374 * this request
13375 */
13376
13377
13378var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
13379 var segment = _ref2.segment,
13380 finishProcessingFn = _ref2.finishProcessingFn;
13381 return function (error, request) {
13382 var errorObj = handleErrors(error, request);
13383
13384 if (errorObj) {
13385 return finishProcessingFn(errorObj, segment);
13386 }
13387
13388 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
13389 // until the key request is done to decrypt.
13390
13391 if (segment.map.key) {
13392 segment.map.encryptedBytes = bytes;
13393 return finishProcessingFn(null, segment);
13394 }
13395
13396 segment.map.bytes = bytes;
13397 parseInitSegment(segment, function (parseError) {
13398 if (parseError) {
13399 parseError.xhr = request;
13400 parseError.status = request.status;
13401 return finishProcessingFn(parseError, segment);
13402 }
13403
13404 finishProcessingFn(null, segment);
13405 });
13406 };
13407};
13408/**
13409 * Response handler for segment-requests being sure to set the correct
13410 * property depending on whether the segment is encryped or not
13411 * Also records and keeps track of stats that are used for ABR purposes
13412 *
13413 * @param {Object} segment - a simplified copy of the segmentInfo object
13414 * from SegmentLoader
13415 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13416 * this request
13417 */
13418
13419
13420var handleSegmentResponse = function handleSegmentResponse(_ref3) {
13421 var segment = _ref3.segment,
13422 finishProcessingFn = _ref3.finishProcessingFn,
13423 responseType = _ref3.responseType;
13424 return function (error, request) {
13425 var errorObj = handleErrors(error, request);
13426
13427 if (errorObj) {
13428 return finishProcessingFn(errorObj, segment);
13429 }
13430
13431 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
13432 // thrown for two primary cases:
13433 // 1. the mime type override stops working, or is not implemented for a specific
13434 // browser
13435 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
13436 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
13437 segment.stats = getRequestStats(request);
13438
13439 if (segment.key) {
13440 segment.encryptedBytes = new Uint8Array(newBytes);
13441 } else {
13442 segment.bytes = new Uint8Array(newBytes);
13443 }
13444
13445 return finishProcessingFn(null, segment);
13446 };
13447};
13448
13449var transmuxAndNotify = function transmuxAndNotify(_ref4) {
13450 var segment = _ref4.segment,
13451 bytes = _ref4.bytes,
13452 trackInfoFn = _ref4.trackInfoFn,
13453 timingInfoFn = _ref4.timingInfoFn,
13454 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
13455 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
13456 id3Fn = _ref4.id3Fn,
13457 captionsFn = _ref4.captionsFn,
13458 isEndOfTimeline = _ref4.isEndOfTimeline,
13459 endedTimelineFn = _ref4.endedTimelineFn,
13460 dataFn = _ref4.dataFn,
13461 doneFn = _ref4.doneFn,
13462 onTransmuxerLog = _ref4.onTransmuxerLog;
13463 var fmp4Tracks = segment.map && segment.map.tracks || {};
13464 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
13465 // One reason for this is that in the case of full segments, we want to trust start
13466 // times from the probe, rather than the transmuxer.
13467
13468 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
13469 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
13470 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
13471 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
13472
13473 var finish = function finish() {
13474 return transmux({
13475 bytes: bytes,
13476 transmuxer: segment.transmuxer,
13477 audioAppendStart: segment.audioAppendStart,
13478 gopsToAlignWith: segment.gopsToAlignWith,
13479 remux: isMuxed,
13480 onData: function onData(result) {
13481 result.type = result.type === 'combined' ? 'video' : result.type;
13482 dataFn(segment, result);
13483 },
13484 onTrackInfo: function onTrackInfo(trackInfo) {
13485 if (trackInfoFn) {
13486 if (isMuxed) {
13487 trackInfo.isMuxed = true;
13488 }
13489
13490 trackInfoFn(segment, trackInfo);
13491 }
13492 },
13493 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
13494 // we only want the first start value we encounter
13495 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
13496 audioStartFn(audioTimingInfo.start);
13497 audioStartFn = null;
13498 } // we want to continually update the end time
13499
13500
13501 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
13502 audioEndFn(audioTimingInfo.end);
13503 }
13504 },
13505 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
13506 // we only want the first start value we encounter
13507 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
13508 videoStartFn(videoTimingInfo.start);
13509 videoStartFn = null;
13510 } // we want to continually update the end time
13511
13512
13513 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
13514 videoEndFn(videoTimingInfo.end);
13515 }
13516 },
13517 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
13518 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
13519 },
13520 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
13521 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
13522 },
13523 onId3: function onId3(id3Frames, dispatchType) {
13524 id3Fn(segment, id3Frames, dispatchType);
13525 },
13526 onCaptions: function onCaptions(captions) {
13527 captionsFn(segment, [captions]);
13528 },
13529 isEndOfTimeline: isEndOfTimeline,
13530 onEndedTimeline: function onEndedTimeline() {
13531 endedTimelineFn();
13532 },
13533 onTransmuxerLog: onTransmuxerLog,
13534 onDone: function onDone(result) {
13535 if (!doneFn) {
13536 return;
13537 }
13538
13539 result.type = result.type === 'combined' ? 'video' : result.type;
13540 doneFn(null, segment, result);
13541 }
13542 });
13543 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
13544 // Meaning cached frame data may corrupt our notion of where this segment
13545 // really starts. To get around this, probe for the info needed.
13546
13547
13548 workerCallback({
13549 action: 'probeTs',
13550 transmuxer: segment.transmuxer,
13551 data: bytes,
13552 baseStartTime: segment.baseStartTime,
13553 callback: function callback(data) {
13554 segment.bytes = bytes = data.data;
13555 var probeResult = data.result;
13556
13557 if (probeResult) {
13558 trackInfoFn(segment, {
13559 hasAudio: probeResult.hasAudio,
13560 hasVideo: probeResult.hasVideo,
13561 isMuxed: isMuxed
13562 });
13563 trackInfoFn = null;
13564
13565 if (probeResult.hasAudio && !isMuxed) {
13566 audioStartFn(probeResult.audioStart);
13567 }
13568
13569 if (probeResult.hasVideo) {
13570 videoStartFn(probeResult.videoStart);
13571 }
13572
13573 audioStartFn = null;
13574 videoStartFn = null;
13575 }
13576
13577 finish();
13578 }
13579 });
13580};
13581
13582var handleSegmentBytes = function handleSegmentBytes(_ref5) {
13583 var segment = _ref5.segment,
13584 bytes = _ref5.bytes,
13585 trackInfoFn = _ref5.trackInfoFn,
13586 timingInfoFn = _ref5.timingInfoFn,
13587 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
13588 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
13589 id3Fn = _ref5.id3Fn,
13590 captionsFn = _ref5.captionsFn,
13591 isEndOfTimeline = _ref5.isEndOfTimeline,
13592 endedTimelineFn = _ref5.endedTimelineFn,
13593 dataFn = _ref5.dataFn,
13594 doneFn = _ref5.doneFn,
13595 onTransmuxerLog = _ref5.onTransmuxerLog;
13596 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
13597 // We should have a handler that fetches the number of bytes required
13598 // to check if something is fmp4. This will allow us to save bandwidth
13599 // because we can only blacklist a playlist and abort requests
13600 // by codec after trackinfo triggers.
13601
13602 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
13603 segment.isFmp4 = true;
13604 var tracks = segment.map.tracks;
13605 var trackInfo = {
13606 isFmp4: true,
13607 hasVideo: !!tracks.video,
13608 hasAudio: !!tracks.audio
13609 }; // if we have a audio track, with a codec that is not set to
13610 // encrypted audio
13611
13612 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
13613 trackInfo.audioCodec = tracks.audio.codec;
13614 } // if we have a video track, with a codec that is not set to
13615 // encrypted video
13616
13617
13618 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
13619 trackInfo.videoCodec = tracks.video.codec;
13620 }
13621
13622 if (tracks.video && tracks.audio) {
13623 trackInfo.isMuxed = true;
13624 } // since we don't support appending fmp4 data on progress, we know we have the full
13625 // segment here
13626
13627
13628 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
13629 // time. The end time can be roughly calculated by the receiver using the duration.
13630 //
13631 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
13632 // that is the true start of the segment (where the playback engine should begin
13633 // decoding).
13634
13635 var finishLoading = function finishLoading(captions) {
13636 // if the track still has audio at this point it is only possible
13637 // for it to be audio only. See `tracks.video && tracks.audio` if statement
13638 // above.
13639 // we make sure to use segment.bytes here as that
13640 dataFn(segment, {
13641 data: bytesAsUint8Array,
13642 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
13643 });
13644
13645 if (captions && captions.length) {
13646 captionsFn(segment, captions);
13647 }
13648
13649 doneFn(null, segment, {});
13650 };
13651
13652 workerCallback({
13653 action: 'probeMp4StartTime',
13654 timescales: segment.map.timescales,
13655 data: bytesAsUint8Array,
13656 transmuxer: segment.transmuxer,
13657 callback: function callback(_ref6) {
13658 var data = _ref6.data,
13659 startTime = _ref6.startTime;
13660 // transfer bytes back to us
13661 bytes = data.buffer;
13662 segment.bytes = bytesAsUint8Array = data;
13663
13664 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
13665 timingInfoFn(segment, 'audio', 'start', startTime);
13666 }
13667
13668 if (trackInfo.hasVideo) {
13669 timingInfoFn(segment, 'video', 'start', startTime);
13670 } // Run through the CaptionParser in case there are captions.
13671 // Initialize CaptionParser if it hasn't been yet
13672
13673
13674 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
13675 finishLoading();
13676 return;
13677 }
13678
13679 workerCallback({
13680 action: 'pushMp4Captions',
13681 endAction: 'mp4Captions',
13682 transmuxer: segment.transmuxer,
13683 data: bytesAsUint8Array,
13684 timescales: segment.map.timescales,
13685 trackIds: [tracks.video.id],
13686 callback: function callback(message) {
13687 // transfer bytes back to us
13688 bytes = message.data.buffer;
13689 segment.bytes = bytesAsUint8Array = message.data;
13690 message.logs.forEach(function (log) {
13691 onTransmuxerLog(videojs.mergeOptions(log, {
13692 stream: 'mp4CaptionParser'
13693 }));
13694 });
13695 finishLoading(message.captions);
13696 }
13697 });
13698 }
13699 });
13700 return;
13701 } // VTT or other segments that don't need processing
13702
13703
13704 if (!segment.transmuxer) {
13705 doneFn(null, segment, {});
13706 return;
13707 }
13708
13709 if (typeof segment.container === 'undefined') {
13710 segment.container = detectContainerForBytes(bytesAsUint8Array);
13711 }
13712
13713 if (segment.container !== 'ts' && segment.container !== 'aac') {
13714 trackInfoFn(segment, {
13715 hasAudio: false,
13716 hasVideo: false
13717 });
13718 doneFn(null, segment, {});
13719 return;
13720 } // ts or aac
13721
13722
13723 transmuxAndNotify({
13724 segment: segment,
13725 bytes: bytes,
13726 trackInfoFn: trackInfoFn,
13727 timingInfoFn: timingInfoFn,
13728 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13729 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13730 id3Fn: id3Fn,
13731 captionsFn: captionsFn,
13732 isEndOfTimeline: isEndOfTimeline,
13733 endedTimelineFn: endedTimelineFn,
13734 dataFn: dataFn,
13735 doneFn: doneFn,
13736 onTransmuxerLog: onTransmuxerLog
13737 });
13738};
13739
13740var decrypt = function decrypt(_ref7, callback) {
13741 var id = _ref7.id,
13742 key = _ref7.key,
13743 encryptedBytes = _ref7.encryptedBytes,
13744 decryptionWorker = _ref7.decryptionWorker;
13745
13746 var decryptionHandler = function decryptionHandler(event) {
13747 if (event.data.source === id) {
13748 decryptionWorker.removeEventListener('message', decryptionHandler);
13749 var decrypted = event.data.decrypted;
13750 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
13751 }
13752 };
13753
13754 decryptionWorker.addEventListener('message', decryptionHandler);
13755 var keyBytes;
13756
13757 if (key.bytes.slice) {
13758 keyBytes = key.bytes.slice();
13759 } else {
13760 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
13761 } // incrementally decrypt the bytes
13762
13763
13764 decryptionWorker.postMessage(createTransferableMessage({
13765 source: id,
13766 encrypted: encryptedBytes,
13767 key: keyBytes,
13768 iv: key.iv
13769 }), [encryptedBytes.buffer, keyBytes.buffer]);
13770};
13771/**
13772 * Decrypt the segment via the decryption web worker
13773 *
13774 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13775 * routines
13776 * @param {Object} segment - a simplified copy of the segmentInfo object
13777 * from SegmentLoader
13778 * @param {Function} trackInfoFn - a callback that receives track info
13779 * @param {Function} timingInfoFn - a callback that receives timing info
13780 * @param {Function} videoSegmentTimingInfoFn
13781 * a callback that receives video timing info based on media times and
13782 * any adjustments made by the transmuxer
13783 * @param {Function} audioSegmentTimingInfoFn
13784 * a callback that receives audio timing info based on media times and
13785 * any adjustments made by the transmuxer
13786 * @param {boolean} isEndOfTimeline
13787 * true if this segment represents the last segment in a timeline
13788 * @param {Function} endedTimelineFn
13789 * a callback made when a timeline is ended, will only be called if
13790 * isEndOfTimeline is true
13791 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13792 * and ready to use
13793 * @param {Function} doneFn - a callback that is executed after decryption has completed
13794 */
13795
13796
13797var decryptSegment = function decryptSegment(_ref8) {
13798 var decryptionWorker = _ref8.decryptionWorker,
13799 segment = _ref8.segment,
13800 trackInfoFn = _ref8.trackInfoFn,
13801 timingInfoFn = _ref8.timingInfoFn,
13802 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
13803 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
13804 id3Fn = _ref8.id3Fn,
13805 captionsFn = _ref8.captionsFn,
13806 isEndOfTimeline = _ref8.isEndOfTimeline,
13807 endedTimelineFn = _ref8.endedTimelineFn,
13808 dataFn = _ref8.dataFn,
13809 doneFn = _ref8.doneFn,
13810 onTransmuxerLog = _ref8.onTransmuxerLog;
13811 decrypt({
13812 id: segment.requestId,
13813 key: segment.key,
13814 encryptedBytes: segment.encryptedBytes,
13815 decryptionWorker: decryptionWorker
13816 }, function (decryptedBytes) {
13817 segment.bytes = decryptedBytes;
13818 handleSegmentBytes({
13819 segment: segment,
13820 bytes: segment.bytes,
13821 trackInfoFn: trackInfoFn,
13822 timingInfoFn: timingInfoFn,
13823 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13824 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13825 id3Fn: id3Fn,
13826 captionsFn: captionsFn,
13827 isEndOfTimeline: isEndOfTimeline,
13828 endedTimelineFn: endedTimelineFn,
13829 dataFn: dataFn,
13830 doneFn: doneFn,
13831 onTransmuxerLog: onTransmuxerLog
13832 });
13833 });
13834};
13835/**
13836 * This function waits for all XHRs to finish (with either success or failure)
13837 * before continueing processing via it's callback. The function gathers errors
13838 * from each request into a single errors array so that the error status for
13839 * each request can be examined later.
13840 *
13841 * @param {Object} activeXhrs - an object that tracks all XHR requests
13842 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13843 * routines
13844 * @param {Function} trackInfoFn - a callback that receives track info
13845 * @param {Function} timingInfoFn - a callback that receives timing info
13846 * @param {Function} videoSegmentTimingInfoFn
13847 * a callback that receives video timing info based on media times and
13848 * any adjustments made by the transmuxer
13849 * @param {Function} audioSegmentTimingInfoFn
13850 * a callback that receives audio timing info based on media times and
13851 * any adjustments made by the transmuxer
13852 * @param {Function} id3Fn - a callback that receives ID3 metadata
13853 * @param {Function} captionsFn - a callback that receives captions
13854 * @param {boolean} isEndOfTimeline
13855 * true if this segment represents the last segment in a timeline
13856 * @param {Function} endedTimelineFn
13857 * a callback made when a timeline is ended, will only be called if
13858 * isEndOfTimeline is true
13859 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13860 * and ready to use
13861 * @param {Function} doneFn - a callback that is executed after all resources have been
13862 * downloaded and any decryption completed
13863 */
13864
13865
13866var waitForCompletion = function waitForCompletion(_ref9) {
13867 var activeXhrs = _ref9.activeXhrs,
13868 decryptionWorker = _ref9.decryptionWorker,
13869 trackInfoFn = _ref9.trackInfoFn,
13870 timingInfoFn = _ref9.timingInfoFn,
13871 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
13872 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
13873 id3Fn = _ref9.id3Fn,
13874 captionsFn = _ref9.captionsFn,
13875 isEndOfTimeline = _ref9.isEndOfTimeline,
13876 endedTimelineFn = _ref9.endedTimelineFn,
13877 dataFn = _ref9.dataFn,
13878 doneFn = _ref9.doneFn,
13879 onTransmuxerLog = _ref9.onTransmuxerLog;
13880 var count = 0;
13881 var didError = false;
13882 return function (error, segment) {
13883 if (didError) {
13884 return;
13885 }
13886
13887 if (error) {
13888 didError = true; // If there are errors, we have to abort any outstanding requests
13889
13890 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
13891 // handle the aborted events from those requests, there are some cases where we may
13892 // never get an aborted event. For instance, if the network connection is lost and
13893 // there were two requests, the first may have triggered an error immediately, while
13894 // the second request remains unsent. In that case, the aborted algorithm will not
13895 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
13896 //
13897 // We also can't rely on the ready state of the XHR, since the request that
13898 // triggered the connection error may also show as a ready state of 0 (unsent).
13899 // Therefore, we have to finish this group of requests immediately after the first
13900 // seen error.
13901
13902 return doneFn(error, segment);
13903 }
13904
13905 count += 1;
13906
13907 if (count === activeXhrs.length) {
13908 var segmentFinish = function segmentFinish() {
13909 if (segment.encryptedBytes) {
13910 return decryptSegment({
13911 decryptionWorker: decryptionWorker,
13912 segment: segment,
13913 trackInfoFn: trackInfoFn,
13914 timingInfoFn: timingInfoFn,
13915 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13916 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13917 id3Fn: id3Fn,
13918 captionsFn: captionsFn,
13919 isEndOfTimeline: isEndOfTimeline,
13920 endedTimelineFn: endedTimelineFn,
13921 dataFn: dataFn,
13922 doneFn: doneFn,
13923 onTransmuxerLog: onTransmuxerLog
13924 });
13925 } // Otherwise, everything is ready just continue
13926
13927
13928 handleSegmentBytes({
13929 segment: segment,
13930 bytes: segment.bytes,
13931 trackInfoFn: trackInfoFn,
13932 timingInfoFn: timingInfoFn,
13933 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13934 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13935 id3Fn: id3Fn,
13936 captionsFn: captionsFn,
13937 isEndOfTimeline: isEndOfTimeline,
13938 endedTimelineFn: endedTimelineFn,
13939 dataFn: dataFn,
13940 doneFn: doneFn,
13941 onTransmuxerLog: onTransmuxerLog
13942 });
13943 }; // Keep track of when *all* of the requests have completed
13944
13945
13946 segment.endOfAllRequests = Date.now();
13947
13948 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
13949 return decrypt({
13950 decryptionWorker: decryptionWorker,
13951 // add -init to the "id" to differentiate between segment
13952 // and init segment decryption, just in case they happen
13953 // at the same time at some point in the future.
13954 id: segment.requestId + '-init',
13955 encryptedBytes: segment.map.encryptedBytes,
13956 key: segment.map.key
13957 }, function (decryptedBytes) {
13958 segment.map.bytes = decryptedBytes;
13959 parseInitSegment(segment, function (parseError) {
13960 if (parseError) {
13961 abortAll(activeXhrs);
13962 return doneFn(parseError, segment);
13963 }
13964
13965 segmentFinish();
13966 });
13967 });
13968 }
13969
13970 segmentFinish();
13971 }
13972 };
13973};
13974/**
13975 * Calls the abort callback if any request within the batch was aborted. Will only call
13976 * the callback once per batch of requests, even if multiple were aborted.
13977 *
13978 * @param {Object} loadendState - state to check to see if the abort function was called
13979 * @param {Function} abortFn - callback to call for abort
13980 */
13981
13982
13983var handleLoadEnd = function handleLoadEnd(_ref10) {
13984 var loadendState = _ref10.loadendState,
13985 abortFn = _ref10.abortFn;
13986 return function (event) {
13987 var request = event.target;
13988
13989 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
13990 abortFn();
13991 loadendState.calledAbortFn = true;
13992 }
13993 };
13994};
13995/**
13996 * Simple progress event callback handler that gathers some stats before
13997 * executing a provided callback with the `segment` object
13998 *
13999 * @param {Object} segment - a simplified copy of the segmentInfo object
14000 * from SegmentLoader
14001 * @param {Function} progressFn - a callback that is executed each time a progress event
14002 * is received
14003 * @param {Function} trackInfoFn - a callback that receives track info
14004 * @param {Function} timingInfoFn - a callback that receives timing info
14005 * @param {Function} videoSegmentTimingInfoFn
14006 * a callback that receives video timing info based on media times and
14007 * any adjustments made by the transmuxer
14008 * @param {Function} audioSegmentTimingInfoFn
14009 * a callback that receives audio timing info based on media times and
14010 * any adjustments made by the transmuxer
14011 * @param {boolean} isEndOfTimeline
14012 * true if this segment represents the last segment in a timeline
14013 * @param {Function} endedTimelineFn
14014 * a callback made when a timeline is ended, will only be called if
14015 * isEndOfTimeline is true
14016 * @param {Function} dataFn - a callback that is executed when segment bytes are available
14017 * and ready to use
14018 * @param {Event} event - the progress event object from XMLHttpRequest
14019 */
14020
14021
14022var handleProgress = function handleProgress(_ref11) {
14023 var segment = _ref11.segment,
14024 progressFn = _ref11.progressFn;
14025 _ref11.trackInfoFn;
14026 _ref11.timingInfoFn;
14027 _ref11.videoSegmentTimingInfoFn;
14028 _ref11.audioSegmentTimingInfoFn;
14029 _ref11.id3Fn;
14030 _ref11.captionsFn;
14031 _ref11.isEndOfTimeline;
14032 _ref11.endedTimelineFn;
14033 _ref11.dataFn;
14034 return function (event) {
14035 var request = event.target;
14036
14037 if (request.aborted) {
14038 return;
14039 }
14040
14041 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
14042
14043 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
14044 segment.stats.firstBytesReceivedAt = Date.now();
14045 }
14046
14047 return progressFn(event, segment);
14048 };
14049};
14050/**
14051 * Load all resources and does any processing necessary for a media-segment
14052 *
14053 * Features:
14054 * decrypts the media-segment if it has a key uri and an iv
14055 * aborts *all* requests if *any* one request fails
14056 *
14057 * The segment object, at minimum, has the following format:
14058 * {
14059 * resolvedUri: String,
14060 * [transmuxer]: Object,
14061 * [byterange]: {
14062 * offset: Number,
14063 * length: Number
14064 * },
14065 * [key]: {
14066 * resolvedUri: String
14067 * [byterange]: {
14068 * offset: Number,
14069 * length: Number
14070 * },
14071 * iv: {
14072 * bytes: Uint32Array
14073 * }
14074 * },
14075 * [map]: {
14076 * resolvedUri: String,
14077 * [byterange]: {
14078 * offset: Number,
14079 * length: Number
14080 * },
14081 * [bytes]: Uint8Array
14082 * }
14083 * }
14084 * ...where [name] denotes optional properties
14085 *
14086 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
14087 * @param {Object} xhrOptions - the base options to provide to all xhr requests
14088 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
14089 * decryption routines
14090 * @param {Object} segment - a simplified copy of the segmentInfo object
14091 * from SegmentLoader
14092 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
14093 * aborted
14094 * @param {Function} progressFn - a callback that receives progress events from the main
14095 * segment's xhr request
14096 * @param {Function} trackInfoFn - a callback that receives track info
14097 * @param {Function} timingInfoFn - a callback that receives timing info
14098 * @param {Function} videoSegmentTimingInfoFn
14099 * a callback that receives video timing info based on media times and
14100 * any adjustments made by the transmuxer
14101 * @param {Function} audioSegmentTimingInfoFn
14102 * a callback that receives audio timing info based on media times and
14103 * any adjustments made by the transmuxer
14104 * @param {Function} id3Fn - a callback that receives ID3 metadata
14105 * @param {Function} captionsFn - a callback that receives captions
14106 * @param {boolean} isEndOfTimeline
14107 * true if this segment represents the last segment in a timeline
14108 * @param {Function} endedTimelineFn
14109 * a callback made when a timeline is ended, will only be called if
14110 * isEndOfTimeline is true
14111 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
14112 * request, transmuxed if needed
14113 * @param {Function} doneFn - a callback that is executed only once all requests have
14114 * succeeded or failed
14115 * @return {Function} a function that, when invoked, immediately aborts all
14116 * outstanding requests
14117 */
14118
14119
14120var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
14121 var xhr = _ref12.xhr,
14122 xhrOptions = _ref12.xhrOptions,
14123 decryptionWorker = _ref12.decryptionWorker,
14124 segment = _ref12.segment,
14125 abortFn = _ref12.abortFn,
14126 progressFn = _ref12.progressFn,
14127 trackInfoFn = _ref12.trackInfoFn,
14128 timingInfoFn = _ref12.timingInfoFn,
14129 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
14130 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
14131 id3Fn = _ref12.id3Fn,
14132 captionsFn = _ref12.captionsFn,
14133 isEndOfTimeline = _ref12.isEndOfTimeline,
14134 endedTimelineFn = _ref12.endedTimelineFn,
14135 dataFn = _ref12.dataFn,
14136 doneFn = _ref12.doneFn,
14137 onTransmuxerLog = _ref12.onTransmuxerLog;
14138 var activeXhrs = [];
14139 var finishProcessingFn = waitForCompletion({
14140 activeXhrs: activeXhrs,
14141 decryptionWorker: decryptionWorker,
14142 trackInfoFn: trackInfoFn,
14143 timingInfoFn: timingInfoFn,
14144 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14145 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14146 id3Fn: id3Fn,
14147 captionsFn: captionsFn,
14148 isEndOfTimeline: isEndOfTimeline,
14149 endedTimelineFn: endedTimelineFn,
14150 dataFn: dataFn,
14151 doneFn: doneFn,
14152 onTransmuxerLog: onTransmuxerLog
14153 }); // optionally, request the decryption key
14154
14155 if (segment.key && !segment.key.bytes) {
14156 var objects = [segment.key];
14157
14158 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
14159 objects.push(segment.map.key);
14160 }
14161
14162 var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
14163 uri: segment.key.resolvedUri,
14164 responseType: 'arraybuffer'
14165 });
14166 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
14167 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
14168 activeXhrs.push(keyXhr);
14169 } // optionally, request the associated media init segment
14170
14171
14172 if (segment.map && !segment.map.bytes) {
14173 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
14174
14175 if (differentMapKey) {
14176 var mapKeyRequestOptions = videojs.mergeOptions(xhrOptions, {
14177 uri: segment.map.key.resolvedUri,
14178 responseType: 'arraybuffer'
14179 });
14180 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
14181 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
14182 activeXhrs.push(mapKeyXhr);
14183 }
14184
14185 var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
14186 uri: segment.map.resolvedUri,
14187 responseType: 'arraybuffer',
14188 headers: segmentXhrHeaders(segment.map)
14189 });
14190 var initSegmentRequestCallback = handleInitSegmentResponse({
14191 segment: segment,
14192 finishProcessingFn: finishProcessingFn
14193 });
14194 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
14195 activeXhrs.push(initSegmentXhr);
14196 }
14197
14198 var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
14199 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
14200 responseType: 'arraybuffer',
14201 headers: segmentXhrHeaders(segment)
14202 });
14203 var segmentRequestCallback = handleSegmentResponse({
14204 segment: segment,
14205 finishProcessingFn: finishProcessingFn,
14206 responseType: segmentRequestOptions.responseType
14207 });
14208 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
14209 segmentXhr.addEventListener('progress', handleProgress({
14210 segment: segment,
14211 progressFn: progressFn,
14212 trackInfoFn: trackInfoFn,
14213 timingInfoFn: timingInfoFn,
14214 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14215 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14216 id3Fn: id3Fn,
14217 captionsFn: captionsFn,
14218 isEndOfTimeline: isEndOfTimeline,
14219 endedTimelineFn: endedTimelineFn,
14220 dataFn: dataFn
14221 }));
14222 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
14223 // multiple times, provide a shared state object
14224
14225 var loadendState = {};
14226 activeXhrs.forEach(function (activeXhr) {
14227 activeXhr.addEventListener('loadend', handleLoadEnd({
14228 loadendState: loadendState,
14229 abortFn: abortFn
14230 }));
14231 });
14232 return function () {
14233 return abortAll(activeXhrs);
14234 };
14235};
14236
14237/**
14238 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
14239 * codec strings, or translating codec strings into objects that can be examined.
14240 */
14241var logFn$1 = logger('CodecUtils');
14242/**
14243 * Returns a set of codec strings parsed from the playlist or the default
14244 * codec strings if no codecs were specified in the playlist
14245 *
14246 * @param {Playlist} media the current media playlist
14247 * @return {Object} an object with the video and audio codecs
14248 */
14249
14250var getCodecs = function getCodecs(media) {
14251 // if the codecs were explicitly specified, use them instead of the
14252 // defaults
14253 var mediaAttributes = media.attributes || {};
14254
14255 if (mediaAttributes.CODECS) {
14256 return parseCodecs(mediaAttributes.CODECS);
14257 }
14258};
14259
14260var isMaat = function isMaat(master, media) {
14261 var mediaAttributes = media.attributes || {};
14262 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14263};
14264var isMuxed = function isMuxed(master, media) {
14265 if (!isMaat(master, media)) {
14266 return true;
14267 }
14268
14269 var mediaAttributes = media.attributes || {};
14270 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14271
14272 for (var groupId in audioGroup) {
14273 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
14274 // or there are listed playlists (the case for DASH, as the manifest will have already
14275 // provided all of the details necessary to generate the audio playlist, as opposed to
14276 // HLS' externally requested playlists), then the content is demuxed.
14277 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
14278 return true;
14279 }
14280 }
14281
14282 return false;
14283};
14284var unwrapCodecList = function unwrapCodecList(codecList) {
14285 var codecs = {};
14286 codecList.forEach(function (_ref) {
14287 var mediaType = _ref.mediaType,
14288 type = _ref.type,
14289 details = _ref.details;
14290 codecs[mediaType] = codecs[mediaType] || [];
14291 codecs[mediaType].push(translateLegacyCodec("" + type + details));
14292 });
14293 Object.keys(codecs).forEach(function (mediaType) {
14294 if (codecs[mediaType].length > 1) {
14295 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
14296 codecs[mediaType] = null;
14297 return;
14298 }
14299
14300 codecs[mediaType] = codecs[mediaType][0];
14301 });
14302 return codecs;
14303};
14304var codecCount = function codecCount(codecObj) {
14305 var count = 0;
14306
14307 if (codecObj.audio) {
14308 count++;
14309 }
14310
14311 if (codecObj.video) {
14312 count++;
14313 }
14314
14315 return count;
14316};
14317/**
14318 * Calculates the codec strings for a working configuration of
14319 * SourceBuffers to play variant streams in a master playlist. If
14320 * there is no possible working configuration, an empty object will be
14321 * returned.
14322 *
14323 * @param master {Object} the m3u8 object for the master playlist
14324 * @param media {Object} the m3u8 object for the variant playlist
14325 * @return {Object} the codec strings.
14326 *
14327 * @private
14328 */
14329
14330var codecsForPlaylist = function codecsForPlaylist(master, media) {
14331 var mediaAttributes = media.attributes || {};
14332 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
14333 // Put another way, there is no way to have a video-only multiple-audio HLS!
14334
14335 if (isMaat(master, media) && !codecInfo.audio) {
14336 if (!isMuxed(master, media)) {
14337 // It is possible for codecs to be specified on the audio media group playlist but
14338 // not on the rendition playlist. This is mostly the case for DASH, where audio and
14339 // video are always separate (and separately specified).
14340 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
14341
14342 if (defaultCodecs.audio) {
14343 codecInfo.audio = defaultCodecs.audio;
14344 }
14345 }
14346 }
14347
14348 return codecInfo;
14349};
14350
14351var logFn = logger('PlaylistSelector');
14352
14353var representationToString = function representationToString(representation) {
14354 if (!representation || !representation.playlist) {
14355 return;
14356 }
14357
14358 var playlist = representation.playlist;
14359 return JSON.stringify({
14360 id: playlist.id,
14361 bandwidth: representation.bandwidth,
14362 width: representation.width,
14363 height: representation.height,
14364 codecs: playlist.attributes && playlist.attributes.CODECS || ''
14365 });
14366}; // Utilities
14367
14368/**
14369 * Returns the CSS value for the specified property on an element
14370 * using `getComputedStyle`. Firefox has a long-standing issue where
14371 * getComputedStyle() may return null when running in an iframe with
14372 * `display: none`.
14373 *
14374 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
14375 * @param {HTMLElement} el the htmlelement to work on
14376 * @param {string} the proprety to get the style for
14377 */
14378
14379
14380var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
14381 if (!el) {
14382 return '';
14383 }
14384
14385 var result = window$1.getComputedStyle(el);
14386
14387 if (!result) {
14388 return '';
14389 }
14390
14391 return result[property];
14392};
14393/**
14394 * Resuable stable sort function
14395 *
14396 * @param {Playlists} array
14397 * @param {Function} sortFn Different comparators
14398 * @function stableSort
14399 */
14400
14401
14402var stableSort = function stableSort(array, sortFn) {
14403 var newArray = array.slice();
14404 array.sort(function (left, right) {
14405 var cmp = sortFn(left, right);
14406
14407 if (cmp === 0) {
14408 return newArray.indexOf(left) - newArray.indexOf(right);
14409 }
14410
14411 return cmp;
14412 });
14413};
14414/**
14415 * A comparator function to sort two playlist object by bandwidth.
14416 *
14417 * @param {Object} left a media playlist object
14418 * @param {Object} right a media playlist object
14419 * @return {number} Greater than zero if the bandwidth attribute of
14420 * left is greater than the corresponding attribute of right. Less
14421 * than zero if the bandwidth of right is greater than left and
14422 * exactly zero if the two are equal.
14423 */
14424
14425
14426var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
14427 var leftBandwidth;
14428 var rightBandwidth;
14429
14430 if (left.attributes.BANDWIDTH) {
14431 leftBandwidth = left.attributes.BANDWIDTH;
14432 }
14433
14434 leftBandwidth = leftBandwidth || window$1.Number.MAX_VALUE;
14435
14436 if (right.attributes.BANDWIDTH) {
14437 rightBandwidth = right.attributes.BANDWIDTH;
14438 }
14439
14440 rightBandwidth = rightBandwidth || window$1.Number.MAX_VALUE;
14441 return leftBandwidth - rightBandwidth;
14442};
14443/**
14444 * A comparator function to sort two playlist object by resolution (width).
14445 *
14446 * @param {Object} left a media playlist object
14447 * @param {Object} right a media playlist object
14448 * @return {number} Greater than zero if the resolution.width attribute of
14449 * left is greater than the corresponding attribute of right. Less
14450 * than zero if the resolution.width of right is greater than left and
14451 * exactly zero if the two are equal.
14452 */
14453
14454var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
14455 var leftWidth;
14456 var rightWidth;
14457
14458 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
14459 leftWidth = left.attributes.RESOLUTION.width;
14460 }
14461
14462 leftWidth = leftWidth || window$1.Number.MAX_VALUE;
14463
14464 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
14465 rightWidth = right.attributes.RESOLUTION.width;
14466 }
14467
14468 rightWidth = rightWidth || window$1.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
14469 // have the same media dimensions/ resolution
14470
14471 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
14472 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
14473 }
14474
14475 return leftWidth - rightWidth;
14476};
14477/**
14478 * Chooses the appropriate media playlist based on bandwidth and player size
14479 *
14480 * @param {Object} master
14481 * Object representation of the master manifest
14482 * @param {number} playerBandwidth
14483 * Current calculated bandwidth of the player
14484 * @param {number} playerWidth
14485 * Current width of the player element (should account for the device pixel ratio)
14486 * @param {number} playerHeight
14487 * Current height of the player element (should account for the device pixel ratio)
14488 * @param {boolean} limitRenditionByPlayerDimensions
14489 * True if the player width and height should be used during the selection, false otherwise
14490 * @param {Object} masterPlaylistController
14491 * the current masterPlaylistController object
14492 * @return {Playlist} the highest bitrate playlist less than the
14493 * currently detected bandwidth, accounting for some amount of
14494 * bandwidth variance
14495 */
14496
14497var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
14498 // If we end up getting called before `master` is available, exit early
14499 if (!master) {
14500 return;
14501 }
14502
14503 var options = {
14504 bandwidth: playerBandwidth,
14505 width: playerWidth,
14506 height: playerHeight,
14507 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
14508 };
14509 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
14510
14511 if (Playlist.isAudioOnly(master)) {
14512 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
14513 // at the buttom of this function for debugging.
14514
14515 options.audioOnly = true;
14516 } // convert the playlists to an intermediary representation to make comparisons easier
14517
14518
14519 var sortedPlaylistReps = playlists.map(function (playlist) {
14520 var bandwidth;
14521 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
14522 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
14523 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
14524 bandwidth = bandwidth || window$1.Number.MAX_VALUE;
14525 return {
14526 bandwidth: bandwidth,
14527 width: width,
14528 height: height,
14529 playlist: playlist
14530 };
14531 });
14532 stableSort(sortedPlaylistReps, function (left, right) {
14533 return left.bandwidth - right.bandwidth;
14534 }); // filter out any playlists that have been excluded due to
14535 // incompatible configurations
14536
14537 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14538 return !Playlist.isIncompatible(rep.playlist);
14539 }); // filter out any playlists that have been disabled manually through the representations
14540 // api or blacklisted temporarily due to playback errors.
14541
14542 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14543 return Playlist.isEnabled(rep.playlist);
14544 });
14545
14546 if (!enabledPlaylistReps.length) {
14547 // if there are no enabled playlists, then they have all been blacklisted or disabled
14548 // by the user through the representations api. In this case, ignore blacklisting and
14549 // fallback to what the user wants by using playlists the user has not disabled.
14550 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14551 return !Playlist.isDisabled(rep.playlist);
14552 });
14553 } // filter out any variant that has greater effective bitrate
14554 // than the current estimated bandwidth
14555
14556
14557 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
14558 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
14559 });
14560 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
14561 // and then taking the very first element
14562
14563 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
14564 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14565 })[0]; // if we're not going to limit renditions by player size, make an early decision.
14566
14567 if (limitRenditionByPlayerDimensions === false) {
14568 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14569
14570 if (_chosenRep && _chosenRep.playlist) {
14571 var type = 'sortedPlaylistReps';
14572
14573 if (bandwidthBestRep) {
14574 type = 'bandwidthBestRep';
14575 }
14576
14577 if (enabledPlaylistReps[0]) {
14578 type = 'enabledPlaylistReps';
14579 }
14580
14581 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
14582 return _chosenRep.playlist;
14583 }
14584
14585 logFn('could not choose a playlist with options', options);
14586 return null;
14587 } // filter out playlists without resolution information
14588
14589
14590 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
14591 return rep.width && rep.height;
14592 }); // sort variants by resolution
14593
14594 stableSort(haveResolution, function (left, right) {
14595 return left.width - right.width;
14596 }); // if we have the exact resolution as the player use it
14597
14598 var resolutionBestRepList = haveResolution.filter(function (rep) {
14599 return rep.width === playerWidth && rep.height === playerHeight;
14600 });
14601 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
14602
14603 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
14604 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14605 })[0];
14606 var resolutionPlusOneList;
14607 var resolutionPlusOneSmallest;
14608 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
14609 // if there is no match of exact resolution
14610
14611 if (!resolutionBestRep) {
14612 resolutionPlusOneList = haveResolution.filter(function (rep) {
14613 return rep.width > playerWidth || rep.height > playerHeight;
14614 }); // find all the variants have the same smallest resolution
14615
14616 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
14617 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
14618 }); // ensure that we also pick the highest bandwidth variant that
14619 // is just-larger-than the video player
14620
14621 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
14622 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
14623 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14624 })[0];
14625 }
14626
14627 var leastPixelDiffRep; // If this selector proves to be better than others,
14628 // resolutionPlusOneRep and resolutionBestRep and all
14629 // the code involving them should be removed.
14630
14631 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
14632 // find the variant that is closest to the player's pixel size
14633 var leastPixelDiffList = haveResolution.map(function (rep) {
14634 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
14635 return rep;
14636 }); // get the highest bandwidth, closest resolution playlist
14637
14638 stableSort(leastPixelDiffList, function (left, right) {
14639 // sort by highest bandwidth if pixelDiff is the same
14640 if (left.pixelDiff === right.pixelDiff) {
14641 return right.bandwidth - left.bandwidth;
14642 }
14643
14644 return left.pixelDiff - right.pixelDiff;
14645 });
14646 leastPixelDiffRep = leastPixelDiffList[0];
14647 } // fallback chain of variants
14648
14649
14650 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14651
14652 if (chosenRep && chosenRep.playlist) {
14653 var _type = 'sortedPlaylistReps';
14654
14655 if (leastPixelDiffRep) {
14656 _type = 'leastPixelDiffRep';
14657 } else if (resolutionPlusOneRep) {
14658 _type = 'resolutionPlusOneRep';
14659 } else if (resolutionBestRep) {
14660 _type = 'resolutionBestRep';
14661 } else if (bandwidthBestRep) {
14662 _type = 'bandwidthBestRep';
14663 } else if (enabledPlaylistReps[0]) {
14664 _type = 'enabledPlaylistReps';
14665 }
14666
14667 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
14668 return chosenRep.playlist;
14669 }
14670
14671 logFn('could not choose a playlist with options', options);
14672 return null;
14673};
14674
14675/**
14676 * Chooses the appropriate media playlist based on the most recent
14677 * bandwidth estimate and the player size.
14678 *
14679 * Expects to be called within the context of an instance of VhsHandler
14680 *
14681 * @return {Playlist} the highest bitrate playlist less than the
14682 * currently detected bandwidth, accounting for some amount of
14683 * bandwidth variance
14684 */
14685
14686var lastBandwidthSelector = function lastBandwidthSelector() {
14687 var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
14688 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14689};
14690/**
14691 * Chooses the appropriate media playlist based on an
14692 * exponential-weighted moving average of the bandwidth after
14693 * filtering for player size.
14694 *
14695 * Expects to be called within the context of an instance of VhsHandler
14696 *
14697 * @param {number} decay - a number between 0 and 1. Higher values of
14698 * this parameter will cause previous bandwidth estimates to lose
14699 * significance more quickly.
14700 * @return {Function} a function which can be invoked to create a new
14701 * playlist selector function.
14702 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
14703 */
14704
14705var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
14706 var average = -1;
14707 var lastSystemBandwidth = -1;
14708
14709 if (decay < 0 || decay > 1) {
14710 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
14711 }
14712
14713 return function () {
14714 var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
14715
14716 if (average < 0) {
14717 average = this.systemBandwidth;
14718 lastSystemBandwidth = this.systemBandwidth;
14719 } // stop the average value from decaying for every 250ms
14720 // when the systemBandwidth is constant
14721 // and
14722 // stop average from setting to a very low value when the
14723 // systemBandwidth becomes 0 in case of chunk cancellation
14724
14725
14726 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
14727 average = decay * this.systemBandwidth + (1 - decay) * average;
14728 lastSystemBandwidth = this.systemBandwidth;
14729 }
14730
14731 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14732 };
14733};
14734/**
14735 * Chooses the appropriate media playlist based on the potential to rebuffer
14736 *
14737 * @param {Object} settings
14738 * Object of information required to use this selector
14739 * @param {Object} settings.master
14740 * Object representation of the master manifest
14741 * @param {number} settings.currentTime
14742 * The current time of the player
14743 * @param {number} settings.bandwidth
14744 * Current measured bandwidth
14745 * @param {number} settings.duration
14746 * Duration of the media
14747 * @param {number} settings.segmentDuration
14748 * Segment duration to be used in round trip time calculations
14749 * @param {number} settings.timeUntilRebuffer
14750 * Time left in seconds until the player has to rebuffer
14751 * @param {number} settings.currentTimeline
14752 * The current timeline segments are being loaded from
14753 * @param {SyncController} settings.syncController
14754 * SyncController for determining if we have a sync point for a given playlist
14755 * @return {Object|null}
14756 * {Object} return.playlist
14757 * The highest bandwidth playlist with the least amount of rebuffering
14758 * {Number} return.rebufferingImpact
14759 * The amount of time in seconds switching to this playlist will rebuffer. A
14760 * negative value means that switching will cause zero rebuffering.
14761 */
14762
14763var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
14764 var master = settings.master,
14765 currentTime = settings.currentTime,
14766 bandwidth = settings.bandwidth,
14767 duration = settings.duration,
14768 segmentDuration = settings.segmentDuration,
14769 timeUntilRebuffer = settings.timeUntilRebuffer,
14770 currentTimeline = settings.currentTimeline,
14771 syncController = settings.syncController; // filter out any playlists that have been excluded due to
14772 // incompatible configurations
14773
14774 var compatiblePlaylists = master.playlists.filter(function (playlist) {
14775 return !Playlist.isIncompatible(playlist);
14776 }); // filter out any playlists that have been disabled manually through the representations
14777 // api or blacklisted temporarily due to playback errors.
14778
14779 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
14780
14781 if (!enabledPlaylists.length) {
14782 // if there are no enabled playlists, then they have all been blacklisted or disabled
14783 // by the user through the representations api. In this case, ignore blacklisting and
14784 // fallback to what the user wants by using playlists the user has not disabled.
14785 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
14786 return !Playlist.isDisabled(playlist);
14787 });
14788 }
14789
14790 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
14791 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
14792 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
14793 // sync request first. This will double the request time
14794
14795 var numRequests = syncPoint ? 1 : 2;
14796 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
14797 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
14798 return {
14799 playlist: playlist,
14800 rebufferingImpact: rebufferingImpact
14801 };
14802 });
14803 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
14804 return estimate.rebufferingImpact <= 0;
14805 }); // Sort by bandwidth DESC
14806
14807 stableSort(noRebufferingPlaylists, function (a, b) {
14808 return comparePlaylistBandwidth(b.playlist, a.playlist);
14809 });
14810
14811 if (noRebufferingPlaylists.length) {
14812 return noRebufferingPlaylists[0];
14813 }
14814
14815 stableSort(rebufferingEstimates, function (a, b) {
14816 return a.rebufferingImpact - b.rebufferingImpact;
14817 });
14818 return rebufferingEstimates[0] || null;
14819};
14820/**
14821 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
14822 * one with video. If no renditions with video exist, return the lowest audio rendition.
14823 *
14824 * Expects to be called within the context of an instance of VhsHandler
14825 *
14826 * @return {Object|null}
14827 * {Object} return.playlist
14828 * The lowest bitrate playlist that contains a video codec. If no such rendition
14829 * exists pick the lowest audio rendition.
14830 */
14831
14832var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
14833 var _this = this;
14834
14835 // filter out any playlists that have been excluded due to
14836 // incompatible configurations or playback errors
14837 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
14838
14839 stableSort(playlists, function (a, b) {
14840 return comparePlaylistBandwidth(a, b);
14841 }); // Parse and assume that playlists with no video codec have no video
14842 // (this is not necessarily true, although it is generally true).
14843 //
14844 // If an entire manifest has no valid videos everything will get filtered
14845 // out.
14846
14847 var playlistsWithVideo = playlists.filter(function (playlist) {
14848 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
14849 });
14850 return playlistsWithVideo[0] || null;
14851};
14852
14853/**
14854 * Combine all segments into a single Uint8Array
14855 *
14856 * @param {Object} segmentObj
14857 * @return {Uint8Array} concatenated bytes
14858 * @private
14859 */
14860var concatSegments = function concatSegments(segmentObj) {
14861 var offset = 0;
14862 var tempBuffer;
14863
14864 if (segmentObj.bytes) {
14865 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
14866
14867 segmentObj.segments.forEach(function (segment) {
14868 tempBuffer.set(segment, offset);
14869 offset += segment.byteLength;
14870 });
14871 }
14872
14873 return tempBuffer;
14874};
14875
14876/**
14877 * @file text-tracks.js
14878 */
14879/**
14880 * Create captions text tracks on video.js if they do not exist
14881 *
14882 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14883 * @param {Object} tech the video.js tech
14884 * @param {Object} captionStream the caption stream to create
14885 * @private
14886 */
14887
14888var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
14889 if (!inbandTextTracks[captionStream]) {
14890 tech.trigger({
14891 type: 'usage',
14892 name: 'vhs-608'
14893 });
14894 tech.trigger({
14895 type: 'usage',
14896 name: 'hls-608'
14897 });
14898 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
14899
14900 if (/^cc708_/.test(captionStream)) {
14901 instreamId = 'SERVICE' + captionStream.split('_')[1];
14902 }
14903
14904 var track = tech.textTracks().getTrackById(instreamId);
14905
14906 if (track) {
14907 // Resuse an existing track with a CC# id because this was
14908 // very likely created by videojs-contrib-hls from information
14909 // in the m3u8 for us to use
14910 inbandTextTracks[captionStream] = track;
14911 } else {
14912 // This section gets called when we have caption services that aren't specified in the manifest.
14913 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
14914 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
14915 var label = captionStream;
14916 var language = captionStream;
14917 var def = false;
14918 var captionService = captionServices[instreamId];
14919
14920 if (captionService) {
14921 label = captionService.label;
14922 language = captionService.language;
14923 def = captionService.default;
14924 } // Otherwise, create a track with the default `CC#` label and
14925 // without a language
14926
14927
14928 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
14929 kind: 'captions',
14930 id: instreamId,
14931 // TODO: investigate why this doesn't seem to turn the caption on by default
14932 default: def,
14933 label: label,
14934 language: language
14935 }, false).track;
14936 }
14937 }
14938};
14939/**
14940 * Add caption text track data to a source handler given an array of captions
14941 *
14942 * @param {Object}
14943 * @param {Object} inbandTextTracks the inband text tracks
14944 * @param {number} timestampOffset the timestamp offset of the source buffer
14945 * @param {Array} captionArray an array of caption data
14946 * @private
14947 */
14948
14949var addCaptionData = function addCaptionData(_ref) {
14950 var inbandTextTracks = _ref.inbandTextTracks,
14951 captionArray = _ref.captionArray,
14952 timestampOffset = _ref.timestampOffset;
14953
14954 if (!captionArray) {
14955 return;
14956 }
14957
14958 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
14959 captionArray.forEach(function (caption) {
14960 var track = caption.stream;
14961 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
14962 });
14963};
14964/**
14965 * Define properties on a cue for backwards compatability,
14966 * but warn the user that the way that they are using it
14967 * is depricated and will be removed at a later date.
14968 *
14969 * @param {Cue} cue the cue to add the properties on
14970 * @private
14971 */
14972
14973var deprecateOldCue = function deprecateOldCue(cue) {
14974 Object.defineProperties(cue.frame, {
14975 id: {
14976 get: function get() {
14977 videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
14978 return cue.value.key;
14979 }
14980 },
14981 value: {
14982 get: function get() {
14983 videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
14984 return cue.value.data;
14985 }
14986 },
14987 privateData: {
14988 get: function get() {
14989 videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
14990 return cue.value.data;
14991 }
14992 }
14993 });
14994};
14995/**
14996 * Add metadata text track data to a source handler given an array of metadata
14997 *
14998 * @param {Object}
14999 * @param {Object} inbandTextTracks the inband text tracks
15000 * @param {Array} metadataArray an array of meta data
15001 * @param {number} timestampOffset the timestamp offset of the source buffer
15002 * @param {number} videoDuration the duration of the video
15003 * @private
15004 */
15005
15006
15007var addMetadata = function addMetadata(_ref2) {
15008 var inbandTextTracks = _ref2.inbandTextTracks,
15009 metadataArray = _ref2.metadataArray,
15010 timestampOffset = _ref2.timestampOffset,
15011 videoDuration = _ref2.videoDuration;
15012
15013 if (!metadataArray) {
15014 return;
15015 }
15016
15017 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
15018 var metadataTrack = inbandTextTracks.metadataTrack_;
15019
15020 if (!metadataTrack) {
15021 return;
15022 }
15023
15024 metadataArray.forEach(function (metadata) {
15025 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
15026 // ignore this bit of metadata.
15027 // This likely occurs when you have an non-timed ID3 tag like TIT2,
15028 // which is the "Title/Songname/Content description" frame
15029
15030 if (typeof time !== 'number' || window$1.isNaN(time) || time < 0 || !(time < Infinity)) {
15031 return;
15032 }
15033
15034 metadata.frames.forEach(function (frame) {
15035 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
15036 cue.frame = frame;
15037 cue.value = frame;
15038 deprecateOldCue(cue);
15039 metadataTrack.addCue(cue);
15040 });
15041 });
15042
15043 if (!metadataTrack.cues || !metadataTrack.cues.length) {
15044 return;
15045 } // Updating the metadeta cues so that
15046 // the endTime of each cue is the startTime of the next cue
15047 // the endTime of last cue is the duration of the video
15048
15049
15050 var cues = metadataTrack.cues;
15051 var cuesArray = []; // Create a copy of the TextTrackCueList...
15052 // ...disregarding cues with a falsey value
15053
15054 for (var i = 0; i < cues.length; i++) {
15055 if (cues[i]) {
15056 cuesArray.push(cues[i]);
15057 }
15058 } // Group cues by their startTime value
15059
15060
15061 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
15062 var timeSlot = obj[cue.startTime] || [];
15063 timeSlot.push(cue);
15064 obj[cue.startTime] = timeSlot;
15065 return obj;
15066 }, {}); // Sort startTimes by ascending order
15067
15068 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
15069 return Number(a) - Number(b);
15070 }); // Map each cue group's endTime to the next group's startTime
15071
15072 sortedStartTimes.forEach(function (startTime, idx) {
15073 var cueGroup = cuesGroupedByStartTime[startTime];
15074 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
15075
15076 cueGroup.forEach(function (cue) {
15077 cue.endTime = nextTime;
15078 });
15079 });
15080};
15081/**
15082 * Create metadata text track on video.js if it does not exist
15083 *
15084 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
15085 * @param {string} dispatchType the inband metadata track dispatch type
15086 * @param {Object} tech the video.js tech
15087 * @private
15088 */
15089
15090var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
15091 if (inbandTextTracks.metadataTrack_) {
15092 return;
15093 }
15094
15095 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
15096 kind: 'metadata',
15097 label: 'Timed Metadata'
15098 }, false).track;
15099 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
15100};
15101/**
15102 * Remove cues from a track on video.js.
15103 *
15104 * @param {Double} start start of where we should remove the cue
15105 * @param {Double} end end of where the we should remove the cue
15106 * @param {Object} track the text track to remove the cues from
15107 * @private
15108 */
15109
15110var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
15111 var i;
15112 var cue;
15113
15114 if (!track) {
15115 return;
15116 }
15117
15118 if (!track.cues) {
15119 return;
15120 }
15121
15122 i = track.cues.length;
15123
15124 while (i--) {
15125 cue = track.cues[i]; // Remove any cue within the provided start and end time
15126
15127 if (cue.startTime >= start && cue.endTime <= end) {
15128 track.removeCue(cue);
15129 }
15130 }
15131};
15132/**
15133 * Remove duplicate cues from a track on video.js (a cue is considered a
15134 * duplicate if it has the same time interval and text as another)
15135 *
15136 * @param {Object} track the text track to remove the duplicate cues from
15137 * @private
15138 */
15139
15140var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
15141 var cues = track.cues;
15142
15143 if (!cues) {
15144 return;
15145 }
15146
15147 for (var i = 0; i < cues.length; i++) {
15148 var duplicates = [];
15149 var occurrences = 0;
15150
15151 for (var j = 0; j < cues.length; j++) {
15152 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
15153 occurrences++;
15154
15155 if (occurrences > 1) {
15156 duplicates.push(cues[j]);
15157 }
15158 }
15159 }
15160
15161 if (duplicates.length) {
15162 duplicates.forEach(function (dupe) {
15163 return track.removeCue(dupe);
15164 });
15165 }
15166 }
15167};
15168
15169/**
15170 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
15171 * front of current time.
15172 *
15173 * @param {Array} buffer
15174 * The current buffer of gop information
15175 * @param {number} currentTime
15176 * The current time
15177 * @param {Double} mapping
15178 * Offset to map display time to stream presentation time
15179 * @return {Array}
15180 * List of gops considered safe to append over
15181 */
15182
15183var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
15184 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
15185 return [];
15186 } // pts value for current time + 3 seconds to give a bit more wiggle room
15187
15188
15189 var currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
15190 var i;
15191
15192 for (i = 0; i < buffer.length; i++) {
15193 if (buffer[i].pts > currentTimePts) {
15194 break;
15195 }
15196 }
15197
15198 return buffer.slice(i);
15199};
15200/**
15201 * Appends gop information (timing and byteLength) received by the transmuxer for the
15202 * gops appended in the last call to appendBuffer
15203 *
15204 * @param {Array} buffer
15205 * The current buffer of gop information
15206 * @param {Array} gops
15207 * List of new gop information
15208 * @param {boolean} replace
15209 * If true, replace the buffer with the new gop information. If false, append the
15210 * new gop information to the buffer in the right location of time.
15211 * @return {Array}
15212 * Updated list of gop information
15213 */
15214
15215var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
15216 if (!gops.length) {
15217 return buffer;
15218 }
15219
15220 if (replace) {
15221 // If we are in safe append mode, then completely overwrite the gop buffer
15222 // with the most recent appeneded data. This will make sure that when appending
15223 // future segments, we only try to align with gops that are both ahead of current
15224 // time and in the last segment appended.
15225 return gops.slice();
15226 }
15227
15228 var start = gops[0].pts;
15229 var i = 0;
15230
15231 for (i; i < buffer.length; i++) {
15232 if (buffer[i].pts >= start) {
15233 break;
15234 }
15235 }
15236
15237 return buffer.slice(0, i).concat(gops);
15238};
15239/**
15240 * Removes gop information in buffer that overlaps with provided start and end
15241 *
15242 * @param {Array} buffer
15243 * The current buffer of gop information
15244 * @param {Double} start
15245 * position to start the remove at
15246 * @param {Double} end
15247 * position to end the remove at
15248 * @param {Double} mapping
15249 * Offset to map display time to stream presentation time
15250 */
15251
15252var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
15253 var startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
15254 var endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
15255 var updatedBuffer = buffer.slice();
15256 var i = buffer.length;
15257
15258 while (i--) {
15259 if (buffer[i].pts <= endPts) {
15260 break;
15261 }
15262 }
15263
15264 if (i === -1) {
15265 // no removal because end of remove range is before start of buffer
15266 return updatedBuffer;
15267 }
15268
15269 var j = i + 1;
15270
15271 while (j--) {
15272 if (buffer[j].pts <= startPts) {
15273 break;
15274 }
15275 } // clamp remove range start to 0 index
15276
15277
15278 j = Math.max(j, 0);
15279 updatedBuffer.splice(j, i - j + 1);
15280 return updatedBuffer;
15281};
15282
15283var shallowEqual = function shallowEqual(a, b) {
15284 // if both are undefined
15285 // or one or the other is undefined
15286 // they are not equal
15287 if (!a && !b || !a && b || a && !b) {
15288 return false;
15289 } // they are the same object and thus, equal
15290
15291
15292 if (a === b) {
15293 return true;
15294 } // sort keys so we can make sure they have
15295 // all the same keys later.
15296
15297
15298 var akeys = Object.keys(a).sort();
15299 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
15300
15301 if (akeys.length !== bkeys.length) {
15302 return false;
15303 }
15304
15305 for (var i = 0; i < akeys.length; i++) {
15306 var key = akeys[i]; // different sorted keys, not equal
15307
15308 if (key !== bkeys[i]) {
15309 return false;
15310 } // different values, not equal
15311
15312
15313 if (a[key] !== b[key]) {
15314 return false;
15315 }
15316 }
15317
15318 return true;
15319};
15320
15321// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
15322var QUOTA_EXCEEDED_ERR = 22;
15323
15324/**
15325 * The segment loader has no recourse except to fetch a segment in the
15326 * current playlist and use the internal timestamps in that segment to
15327 * generate a syncPoint. This function returns a good candidate index
15328 * for that process.
15329 *
15330 * @param {Array} segments - the segments array from a playlist.
15331 * @return {number} An index of a segment from the playlist to load
15332 */
15333
15334var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
15335 segments = segments || [];
15336 var timelineSegments = [];
15337 var time = 0;
15338
15339 for (var i = 0; i < segments.length; i++) {
15340 var segment = segments[i];
15341
15342 if (currentTimeline === segment.timeline) {
15343 timelineSegments.push(i);
15344 time += segment.duration;
15345
15346 if (time > targetTime) {
15347 return i;
15348 }
15349 }
15350 }
15351
15352 if (timelineSegments.length === 0) {
15353 return 0;
15354 } // default to the last timeline segment
15355
15356
15357 return timelineSegments[timelineSegments.length - 1];
15358}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
15359// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
15360// as a start to prevent any potential issues with removing content too close to the
15361// playhead.
15362
15363var MIN_BACK_BUFFER = 1; // in ms
15364
15365var CHECK_BUFFER_DELAY = 500;
15366
15367var finite = function finite(num) {
15368 return typeof num === 'number' && isFinite(num);
15369}; // With most content hovering around 30fps, if a segment has a duration less than a half
15370// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
15371// not accurately reflect the rest of the content.
15372
15373
15374var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
15375var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
15376 // Although these checks should most likely cover non 'main' types, for now it narrows
15377 // the scope of our checks.
15378 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
15379 return null;
15380 }
15381
15382 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
15383 return 'Neither audio nor video found in segment.';
15384 }
15385
15386 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
15387 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
15388 }
15389
15390 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
15391 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
15392 }
15393
15394 return null;
15395};
15396/**
15397 * Calculates a time value that is safe to remove from the back buffer without interrupting
15398 * playback.
15399 *
15400 * @param {TimeRange} seekable
15401 * The current seekable range
15402 * @param {number} currentTime
15403 * The current time of the player
15404 * @param {number} targetDuration
15405 * The target duration of the current playlist
15406 * @return {number}
15407 * Time that is safe to remove from the back buffer without interrupting playback
15408 */
15409
15410var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
15411 // 30 seconds before the playhead provides a safe default for trimming.
15412 //
15413 // Choosing a reasonable default is particularly important for high bitrate content and
15414 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
15415 // throw an APPEND_BUFFER_ERR.
15416 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
15417
15418 if (seekable.length) {
15419 // Some live playlists may have a shorter window of content than the full allowed back
15420 // buffer. For these playlists, don't save content that's no longer within the window.
15421 trimTime = Math.max(trimTime, seekable.start(0));
15422 } // Don't remove within target duration of the current time to avoid the possibility of
15423 // removing the GOP currently being played, as removing it can cause playback stalls.
15424
15425
15426 var maxTrimTime = currentTime - targetDuration;
15427 return Math.min(maxTrimTime, trimTime);
15428};
15429var segmentInfoString = function segmentInfoString(segmentInfo) {
15430 var startOfSegment = segmentInfo.startOfSegment,
15431 duration = segmentInfo.duration,
15432 segment = segmentInfo.segment,
15433 part = segmentInfo.part,
15434 _segmentInfo$playlist = segmentInfo.playlist,
15435 seq = _segmentInfo$playlist.mediaSequence,
15436 id = _segmentInfo$playlist.id,
15437 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
15438 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
15439 index = segmentInfo.mediaIndex,
15440 partIndex = segmentInfo.partIndex,
15441 timeline = segmentInfo.timeline;
15442 var segmentLen = segments.length - 1;
15443 var selection = 'mediaIndex/partIndex increment';
15444
15445 if (segmentInfo.getMediaInfoForTime) {
15446 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
15447 } else if (segmentInfo.isSyncRequest) {
15448 selection = 'getSyncSegmentCandidate (isSyncRequest)';
15449 }
15450
15451 if (segmentInfo.independent) {
15452 selection += " with independent " + segmentInfo.independent;
15453 }
15454
15455 var hasPartIndex = typeof partIndex === 'number';
15456 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
15457 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
15458 preloadSegment: segment
15459 }) - 1 : 0;
15460 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
15461};
15462
15463var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
15464 return mediaType + "TimingInfo";
15465};
15466/**
15467 * Returns the timestamp offset to use for the segment.
15468 *
15469 * @param {number} segmentTimeline
15470 * The timeline of the segment
15471 * @param {number} currentTimeline
15472 * The timeline currently being followed by the loader
15473 * @param {number} startOfSegment
15474 * The estimated segment start
15475 * @param {TimeRange[]} buffered
15476 * The loader's buffer
15477 * @param {boolean} overrideCheck
15478 * If true, no checks are made to see if the timestamp offset value should be set,
15479 * but sets it directly to a value.
15480 *
15481 * @return {number|null}
15482 * Either a number representing a new timestamp offset, or null if the segment is
15483 * part of the same timeline
15484 */
15485
15486
15487var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
15488 var segmentTimeline = _ref.segmentTimeline,
15489 currentTimeline = _ref.currentTimeline,
15490 startOfSegment = _ref.startOfSegment,
15491 buffered = _ref.buffered,
15492 overrideCheck = _ref.overrideCheck;
15493
15494 // Check to see if we are crossing a discontinuity to see if we need to set the
15495 // timestamp offset on the transmuxer and source buffer.
15496 //
15497 // Previously, we changed the timestampOffset if the start of this segment was less than
15498 // the currently set timestampOffset, but this isn't desirable as it can produce bad
15499 // behavior, especially around long running live streams.
15500 if (!overrideCheck && segmentTimeline === currentTimeline) {
15501 return null;
15502 } // When changing renditions, it's possible to request a segment on an older timeline. For
15503 // instance, given two renditions with the following:
15504 //
15505 // #EXTINF:10
15506 // segment1
15507 // #EXT-X-DISCONTINUITY
15508 // #EXTINF:10
15509 // segment2
15510 // #EXTINF:10
15511 // segment3
15512 //
15513 // And the current player state:
15514 //
15515 // current time: 8
15516 // buffer: 0 => 20
15517 //
15518 // The next segment on the current rendition would be segment3, filling the buffer from
15519 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
15520 // then the next segment to be requested will be segment1 from the new rendition in
15521 // order to fill time 8 and onwards. Using the buffered end would result in repeated
15522 // content (since it would position segment1 of the new rendition starting at 20s). This
15523 // case can be identified when the new segment's timeline is a prior value. Instead of
15524 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
15525 // more accurate to the actual start time of the segment.
15526
15527
15528 if (segmentTimeline < currentTimeline) {
15529 return startOfSegment;
15530 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
15531 // value uses the end of the last segment if it is available. While this value
15532 // should often be correct, it's better to rely on the buffered end, as the new
15533 // content post discontinuity should line up with the buffered end as if it were
15534 // time 0 for the new content.
15535
15536
15537 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
15538};
15539/**
15540 * Returns whether or not the loader should wait for a timeline change from the timeline
15541 * change controller before processing the segment.
15542 *
15543 * Primary timing in VHS goes by video. This is different from most media players, as
15544 * audio is more often used as the primary timing source. For the foreseeable future, VHS
15545 * will continue to use video as the primary timing source, due to the current logic and
15546 * expectations built around it.
15547
15548 * Since the timing follows video, in order to maintain sync, the video loader is
15549 * responsible for setting both audio and video source buffer timestamp offsets.
15550 *
15551 * Setting different values for audio and video source buffers could lead to
15552 * desyncing. The following examples demonstrate some of the situations where this
15553 * distinction is important. Note that all of these cases involve demuxed content. When
15554 * content is muxed, the audio and video are packaged together, therefore syncing
15555 * separate media playlists is not an issue.
15556 *
15557 * CASE 1: Audio prepares to load a new timeline before video:
15558 *
15559 * Timeline: 0 1
15560 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15561 * Audio Loader: ^
15562 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15563 * Video Loader ^
15564 *
15565 * In the above example, the audio loader is preparing to load the 6th segment, the first
15566 * after a discontinuity, while the video loader is still loading the 5th segment, before
15567 * the discontinuity.
15568 *
15569 * If the audio loader goes ahead and loads and appends the 6th segment before the video
15570 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
15571 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
15572 * the audio loader must provide the audioAppendStart value to trim the content in the
15573 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
15574 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
15575 * segment until that value is provided.
15576 *
15577 * CASE 2: Video prepares to load a new timeline before audio:
15578 *
15579 * Timeline: 0 1
15580 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15581 * Audio Loader: ^
15582 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15583 * Video Loader ^
15584 *
15585 * In the above example, the video loader is preparing to load the 6th segment, the first
15586 * after a discontinuity, while the audio loader is still loading the 5th segment, before
15587 * the discontinuity.
15588 *
15589 * If the video loader goes ahead and loads and appends the 6th segment, then once the
15590 * segment is loaded and processed, both the video and audio timestamp offsets will be
15591 * set, since video is used as the primary timing source. This is to ensure content lines
15592 * up appropriately, as any modifications to the video timing are reflected by audio when
15593 * the video loader sets the audio and video timestamp offsets to the same value. However,
15594 * setting the timestamp offset for audio before audio has had a chance to change
15595 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
15596 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
15597 *
15598 * CASE 3: When seeking, audio prepares to load a new timeline before video
15599 *
15600 * Timeline: 0 1
15601 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15602 * Audio Loader: ^
15603 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15604 * Video Loader ^
15605 *
15606 * In the above example, both audio and video loaders are loading segments from timeline
15607 * 0, but imagine that the seek originated from timeline 1.
15608 *
15609 * When seeking to a new timeline, the timestamp offset will be set based on the expected
15610 * segment start of the loaded video segment. In order to maintain sync, the audio loader
15611 * must wait for the video loader to load its segment and update both the audio and video
15612 * timestamp offsets before it may load and append its own segment. This is the case
15613 * whether the seek results in a mismatched segment request (e.g., the audio loader
15614 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
15615 * loaders choose to load the same segment index from each playlist, as the segments may
15616 * not be aligned perfectly, even for matching segment indexes.
15617 *
15618 * @param {Object} timelinechangeController
15619 * @param {number} currentTimeline
15620 * The timeline currently being followed by the loader
15621 * @param {number} segmentTimeline
15622 * The timeline of the segment being loaded
15623 * @param {('main'|'audio')} loaderType
15624 * The loader type
15625 * @param {boolean} audioDisabled
15626 * Whether the audio is disabled for the loader. This should only be true when the
15627 * loader may have muxed audio in its segment, but should not append it, e.g., for
15628 * the main loader when an alternate audio playlist is active.
15629 *
15630 * @return {boolean}
15631 * Whether the loader should wait for a timeline change from the timeline change
15632 * controller before processing the segment
15633 */
15634
15635var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
15636 var timelineChangeController = _ref2.timelineChangeController,
15637 currentTimeline = _ref2.currentTimeline,
15638 segmentTimeline = _ref2.segmentTimeline,
15639 loaderType = _ref2.loaderType,
15640 audioDisabled = _ref2.audioDisabled;
15641
15642 if (currentTimeline === segmentTimeline) {
15643 return false;
15644 }
15645
15646 if (loaderType === 'audio') {
15647 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
15648 type: 'main'
15649 }); // Audio loader should wait if:
15650 //
15651 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
15652 // * main hasn't yet changed to the timeline audio is looking to load
15653
15654 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
15655 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
15656 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
15657 // loader's segments (or the content is audio/video only and handled by the main
15658 // loader).
15659
15660
15661 if (loaderType === 'main' && audioDisabled) {
15662 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
15663 type: 'audio'
15664 }); // Main loader should wait for the audio loader if audio is not pending a timeline
15665 // change to the current timeline.
15666 //
15667 // Since the main loader is responsible for setting the timestamp offset for both
15668 // audio and video, the main loader must wait for audio to be about to change to its
15669 // timeline before setting the offset, otherwise, if audio is behind in loading,
15670 // segments from the previous timeline would be adjusted by the new timestamp offset.
15671 //
15672 // This requirement means that video will not cross a timeline until the audio is
15673 // about to cross to it, so that way audio and video will always cross the timeline
15674 // together.
15675 //
15676 // In addition to normal timeline changes, these rules also apply to the start of a
15677 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
15678 // that these rules apply to the first timeline change because if they did not, it's
15679 // possible that the main loader will cross two timelines before the audio loader has
15680 // crossed one. Logic may be implemented to handle the startup as a special case, but
15681 // it's easier to simply treat all timeline changes the same.
15682
15683 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
15684 return false;
15685 }
15686
15687 return true;
15688 }
15689
15690 return false;
15691};
15692var mediaDuration = function mediaDuration(timingInfos) {
15693 var maxDuration = 0;
15694 ['video', 'audio'].forEach(function (type) {
15695 var typeTimingInfo = timingInfos[type + "TimingInfo"];
15696
15697 if (!typeTimingInfo) {
15698 return;
15699 }
15700
15701 var start = typeTimingInfo.start,
15702 end = typeTimingInfo.end;
15703 var duration;
15704
15705 if (typeof start === 'bigint' || typeof end === 'bigint') {
15706 duration = window$1.BigInt(end) - window$1.BigInt(start);
15707 } else if (typeof start === 'number' && typeof end === 'number') {
15708 duration = end - start;
15709 }
15710
15711 if (typeof duration !== 'undefined' && duration > maxDuration) {
15712 maxDuration = duration;
15713 }
15714 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
15715 // as we only need BigInt when we are above that.
15716
15717 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
15718 maxDuration = Number(maxDuration);
15719 }
15720
15721 return maxDuration;
15722};
15723var segmentTooLong = function segmentTooLong(_ref3) {
15724 var segmentDuration = _ref3.segmentDuration,
15725 maxDuration = _ref3.maxDuration;
15726
15727 // 0 duration segments are most likely due to metadata only segments or a lack of
15728 // information.
15729 if (!segmentDuration) {
15730 return false;
15731 } // For HLS:
15732 //
15733 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
15734 // The EXTINF duration of each Media Segment in the Playlist
15735 // file, when rounded to the nearest integer, MUST be less than or equal
15736 // to the target duration; longer segments can trigger playback stalls
15737 // or other errors.
15738 //
15739 // For DASH, the mpd-parser uses the largest reported segment duration as the target
15740 // duration. Although that reported duration is occasionally approximate (i.e., not
15741 // exact), a strict check may report that a segment is too long more often in DASH.
15742
15743
15744 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
15745};
15746var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
15747 // Right now we aren't following DASH's timing model exactly, so only perform
15748 // this check for HLS content.
15749 if (sourceType !== 'hls') {
15750 return null;
15751 }
15752
15753 var segmentDuration = mediaDuration({
15754 audioTimingInfo: segmentInfo.audioTimingInfo,
15755 videoTimingInfo: segmentInfo.videoTimingInfo
15756 }); // Don't report if we lack information.
15757 //
15758 // If the segment has a duration of 0 it is either a lack of information or a
15759 // metadata only segment and shouldn't be reported here.
15760
15761 if (!segmentDuration) {
15762 return null;
15763 }
15764
15765 var targetDuration = segmentInfo.playlist.targetDuration;
15766 var isSegmentWayTooLong = segmentTooLong({
15767 segmentDuration: segmentDuration,
15768 maxDuration: targetDuration * 2
15769 });
15770 var isSegmentSlightlyTooLong = segmentTooLong({
15771 segmentDuration: segmentDuration,
15772 maxDuration: targetDuration
15773 });
15774 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
15775
15776 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
15777 return {
15778 severity: isSegmentWayTooLong ? 'warn' : 'info',
15779 message: segmentTooLongMessage
15780 };
15781 }
15782
15783 return null;
15784};
15785/**
15786 * An object that manages segment loading and appending.
15787 *
15788 * @class SegmentLoader
15789 * @param {Object} options required and optional options
15790 * @extends videojs.EventTarget
15791 */
15792
15793var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
15794 _inheritsLoose(SegmentLoader, _videojs$EventTarget);
15795
15796 function SegmentLoader(settings, options) {
15797 var _this;
15798
15799 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
15800
15801 if (!settings) {
15802 throw new TypeError('Initialization settings are required');
15803 }
15804
15805 if (typeof settings.currentTime !== 'function') {
15806 throw new TypeError('No currentTime getter specified');
15807 }
15808
15809 if (!settings.mediaSource) {
15810 throw new TypeError('No MediaSource specified');
15811 } // public properties
15812
15813
15814 _this.bandwidth = settings.bandwidth;
15815 _this.throughput = {
15816 rate: 0,
15817 count: 0
15818 };
15819 _this.roundTrip = NaN;
15820
15821 _this.resetStats_();
15822
15823 _this.mediaIndex = null;
15824 _this.partIndex = null; // private settings
15825
15826 _this.hasPlayed_ = settings.hasPlayed;
15827 _this.currentTime_ = settings.currentTime;
15828 _this.seekable_ = settings.seekable;
15829 _this.seeking_ = settings.seeking;
15830 _this.duration_ = settings.duration;
15831 _this.mediaSource_ = settings.mediaSource;
15832 _this.vhs_ = settings.vhs;
15833 _this.loaderType_ = settings.loaderType;
15834 _this.currentMediaInfo_ = void 0;
15835 _this.startingMediaInfo_ = void 0;
15836 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
15837 _this.goalBufferLength_ = settings.goalBufferLength;
15838 _this.sourceType_ = settings.sourceType;
15839 _this.sourceUpdater_ = settings.sourceUpdater;
15840 _this.inbandTextTracks_ = settings.inbandTextTracks;
15841 _this.state_ = 'INIT';
15842 _this.timelineChangeController_ = settings.timelineChangeController;
15843 _this.shouldSaveSegmentTimingInfo_ = true;
15844 _this.parse708captions_ = settings.parse708captions;
15845 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
15846 _this.captionServices_ = settings.captionServices;
15847 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
15848
15849 _this.checkBufferTimeout_ = null;
15850 _this.error_ = void 0;
15851 _this.currentTimeline_ = -1;
15852 _this.pendingSegment_ = null;
15853 _this.xhrOptions_ = null;
15854 _this.pendingSegments_ = [];
15855 _this.audioDisabled_ = false;
15856 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
15857
15858 _this.gopBuffer_ = [];
15859 _this.timeMapping_ = 0;
15860 _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
15861 _this.appendInitSegment_ = {
15862 audio: true,
15863 video: true
15864 };
15865 _this.playlistOfLastInitSegment_ = {
15866 audio: null,
15867 video: null
15868 };
15869 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
15870 // information yet to start the loading process (e.g., if the audio loader wants to
15871 // load a segment from the next timeline but the main loader hasn't yet crossed that
15872 // timeline), then the load call will be added to the queue until it is ready to be
15873 // processed.
15874
15875 _this.loadQueue_ = [];
15876 _this.metadataQueue_ = {
15877 id3: [],
15878 caption: []
15879 };
15880 _this.waitingOnRemove_ = false;
15881 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
15882
15883 _this.activeInitSegmentId_ = null;
15884 _this.initSegments_ = {}; // HLSe playback
15885
15886 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
15887 _this.keyCache_ = {};
15888 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
15889 // between a time in the display time and a segment index within
15890 // a playlist
15891
15892 _this.syncController_ = settings.syncController;
15893 _this.syncPoint_ = {
15894 segmentIndex: 0,
15895 time: 0
15896 };
15897 _this.transmuxer_ = _this.createTransmuxer_();
15898
15899 _this.triggerSyncInfoUpdate_ = function () {
15900 return _this.trigger('syncinfoupdate');
15901 };
15902
15903 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
15904
15905 _this.mediaSource_.addEventListener('sourceopen', function () {
15906 if (!_this.isEndOfStream_()) {
15907 _this.ended_ = false;
15908 }
15909 }); // ...for determining the fetch location
15910
15911
15912 _this.fetchAtBuffer_ = false;
15913 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
15914 Object.defineProperty(_assertThisInitialized(_this), 'state', {
15915 get: function get() {
15916 return this.state_;
15917 },
15918 set: function set(newState) {
15919 if (newState !== this.state_) {
15920 this.logger_(this.state_ + " -> " + newState);
15921 this.state_ = newState;
15922 this.trigger('statechange');
15923 }
15924 }
15925 });
15926
15927 _this.sourceUpdater_.on('ready', function () {
15928 if (_this.hasEnoughInfoToAppend_()) {
15929 _this.processCallQueue_();
15930 }
15931 }); // Only the main loader needs to listen for pending timeline changes, as the main
15932 // loader should wait for audio to be ready to change its timeline so that both main
15933 // and audio timelines change together. For more details, see the
15934 // shouldWaitForTimelineChange function.
15935
15936
15937 if (_this.loaderType_ === 'main') {
15938 _this.timelineChangeController_.on('pendingtimelinechange', function () {
15939 if (_this.hasEnoughInfoToAppend_()) {
15940 _this.processCallQueue_();
15941 }
15942 });
15943 } // The main loader only listens on pending timeline changes, but the audio loader,
15944 // since its loads follow main, needs to listen on timeline changes. For more details,
15945 // see the shouldWaitForTimelineChange function.
15946
15947
15948 if (_this.loaderType_ === 'audio') {
15949 _this.timelineChangeController_.on('timelinechange', function () {
15950 if (_this.hasEnoughInfoToLoad_()) {
15951 _this.processLoadQueue_();
15952 }
15953
15954 if (_this.hasEnoughInfoToAppend_()) {
15955 _this.processCallQueue_();
15956 }
15957 });
15958 }
15959
15960 return _this;
15961 }
15962
15963 var _proto = SegmentLoader.prototype;
15964
15965 _proto.createTransmuxer_ = function createTransmuxer_() {
15966 return segmentTransmuxer.createTransmuxer({
15967 remux: false,
15968 alignGopsAtEnd: this.safeAppend_,
15969 keepOriginalTimestamps: true,
15970 parse708captions: this.parse708captions_,
15971 captionServices: this.captionServices_
15972 });
15973 }
15974 /**
15975 * reset all of our media stats
15976 *
15977 * @private
15978 */
15979 ;
15980
15981 _proto.resetStats_ = function resetStats_() {
15982 this.mediaBytesTransferred = 0;
15983 this.mediaRequests = 0;
15984 this.mediaRequestsAborted = 0;
15985 this.mediaRequestsTimedout = 0;
15986 this.mediaRequestsErrored = 0;
15987 this.mediaTransferDuration = 0;
15988 this.mediaSecondsLoaded = 0;
15989 this.mediaAppends = 0;
15990 }
15991 /**
15992 * dispose of the SegmentLoader and reset to the default state
15993 */
15994 ;
15995
15996 _proto.dispose = function dispose() {
15997 this.trigger('dispose');
15998 this.state = 'DISPOSED';
15999 this.pause();
16000 this.abort_();
16001
16002 if (this.transmuxer_) {
16003 this.transmuxer_.terminate();
16004 }
16005
16006 this.resetStats_();
16007
16008 if (this.checkBufferTimeout_) {
16009 window$1.clearTimeout(this.checkBufferTimeout_);
16010 }
16011
16012 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
16013 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
16014 }
16015
16016 this.off();
16017 };
16018
16019 _proto.setAudio = function setAudio(enable) {
16020 this.audioDisabled_ = !enable;
16021
16022 if (enable) {
16023 this.appendInitSegment_.audio = true;
16024 } else {
16025 // remove current track audio if it gets disabled
16026 this.sourceUpdater_.removeAudio(0, this.duration_());
16027 }
16028 }
16029 /**
16030 * abort anything that is currently doing on with the SegmentLoader
16031 * and reset to a default state
16032 */
16033 ;
16034
16035 _proto.abort = function abort() {
16036 if (this.state !== 'WAITING') {
16037 if (this.pendingSegment_) {
16038 this.pendingSegment_ = null;
16039 }
16040
16041 return;
16042 }
16043
16044 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
16045 // since we are no longer "waiting" on any requests. XHR callback is not always run
16046 // when the request is aborted. This will prevent the loader from being stuck in the
16047 // WAITING state indefinitely.
16048
16049 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
16050 // next segment
16051
16052 if (!this.paused()) {
16053 this.monitorBuffer_();
16054 }
16055 }
16056 /**
16057 * abort all pending xhr requests and null any pending segements
16058 *
16059 * @private
16060 */
16061 ;
16062
16063 _proto.abort_ = function abort_() {
16064 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
16065 this.pendingSegment_.abortRequests();
16066 } // clear out the segment being processed
16067
16068
16069 this.pendingSegment_ = null;
16070 this.callQueue_ = [];
16071 this.loadQueue_ = [];
16072 this.metadataQueue_.id3 = [];
16073 this.metadataQueue_.caption = [];
16074 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
16075 this.waitingOnRemove_ = false;
16076 window$1.clearTimeout(this.quotaExceededErrorRetryTimeout_);
16077 this.quotaExceededErrorRetryTimeout_ = null;
16078 };
16079
16080 _proto.checkForAbort_ = function checkForAbort_(requestId) {
16081 // If the state is APPENDING, then aborts will not modify the state, meaning the first
16082 // callback that happens should reset the state to READY so that loading can continue.
16083 if (this.state === 'APPENDING' && !this.pendingSegment_) {
16084 this.state = 'READY';
16085 return true;
16086 }
16087
16088 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
16089 return true;
16090 }
16091
16092 return false;
16093 }
16094 /**
16095 * set an error on the segment loader and null out any pending segements
16096 *
16097 * @param {Error} error the error to set on the SegmentLoader
16098 * @return {Error} the error that was set or that is currently set
16099 */
16100 ;
16101
16102 _proto.error = function error(_error) {
16103 if (typeof _error !== 'undefined') {
16104 this.logger_('error occurred:', _error);
16105 this.error_ = _error;
16106 }
16107
16108 this.pendingSegment_ = null;
16109 return this.error_;
16110 };
16111
16112 _proto.endOfStream = function endOfStream() {
16113 this.ended_ = true;
16114
16115 if (this.transmuxer_) {
16116 // need to clear out any cached data to prepare for the new segment
16117 segmentTransmuxer.reset(this.transmuxer_);
16118 }
16119
16120 this.gopBuffer_.length = 0;
16121 this.pause();
16122 this.trigger('ended');
16123 }
16124 /**
16125 * Indicates which time ranges are buffered
16126 *
16127 * @return {TimeRange}
16128 * TimeRange object representing the current buffered ranges
16129 */
16130 ;
16131
16132 _proto.buffered_ = function buffered_() {
16133 var trackInfo = this.getMediaInfo_();
16134
16135 if (!this.sourceUpdater_ || !trackInfo) {
16136 return videojs.createTimeRanges();
16137 }
16138
16139 if (this.loaderType_ === 'main') {
16140 var hasAudio = trackInfo.hasAudio,
16141 hasVideo = trackInfo.hasVideo,
16142 isMuxed = trackInfo.isMuxed;
16143
16144 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
16145 return this.sourceUpdater_.buffered();
16146 }
16147
16148 if (hasVideo) {
16149 return this.sourceUpdater_.videoBuffered();
16150 }
16151 } // One case that can be ignored for now is audio only with alt audio,
16152 // as we don't yet have proper support for that.
16153
16154
16155 return this.sourceUpdater_.audioBuffered();
16156 }
16157 /**
16158 * Gets and sets init segment for the provided map
16159 *
16160 * @param {Object} map
16161 * The map object representing the init segment to get or set
16162 * @param {boolean=} set
16163 * If true, the init segment for the provided map should be saved
16164 * @return {Object}
16165 * map object for desired init segment
16166 */
16167 ;
16168
16169 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
16170 if (set === void 0) {
16171 set = false;
16172 }
16173
16174 if (!map) {
16175 return null;
16176 }
16177
16178 var id = initSegmentId(map);
16179 var storedMap = this.initSegments_[id];
16180
16181 if (set && !storedMap && map.bytes) {
16182 this.initSegments_[id] = storedMap = {
16183 resolvedUri: map.resolvedUri,
16184 byterange: map.byterange,
16185 bytes: map.bytes,
16186 tracks: map.tracks,
16187 timescales: map.timescales
16188 };
16189 }
16190
16191 return storedMap || map;
16192 }
16193 /**
16194 * Gets and sets key for the provided key
16195 *
16196 * @param {Object} key
16197 * The key object representing the key to get or set
16198 * @param {boolean=} set
16199 * If true, the key for the provided key should be saved
16200 * @return {Object}
16201 * Key object for desired key
16202 */
16203 ;
16204
16205 _proto.segmentKey = function segmentKey(key, set) {
16206 if (set === void 0) {
16207 set = false;
16208 }
16209
16210 if (!key) {
16211 return null;
16212 }
16213
16214 var id = segmentKeyId(key);
16215 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
16216 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
16217
16218 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
16219 this.keyCache_[id] = storedKey = {
16220 resolvedUri: key.resolvedUri,
16221 bytes: key.bytes
16222 };
16223 }
16224
16225 var result = {
16226 resolvedUri: (storedKey || key).resolvedUri
16227 };
16228
16229 if (storedKey) {
16230 result.bytes = storedKey.bytes;
16231 }
16232
16233 return result;
16234 }
16235 /**
16236 * Returns true if all configuration required for loading is present, otherwise false.
16237 *
16238 * @return {boolean} True if the all configuration is ready for loading
16239 * @private
16240 */
16241 ;
16242
16243 _proto.couldBeginLoading_ = function couldBeginLoading_() {
16244 return this.playlist_ && !this.paused();
16245 }
16246 /**
16247 * load a playlist and start to fill the buffer
16248 */
16249 ;
16250
16251 _proto.load = function load() {
16252 // un-pause
16253 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
16254 // specified
16255
16256 if (!this.playlist_) {
16257 return;
16258 } // if all the configuration is ready, initialize and begin loading
16259
16260
16261 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16262 return this.init_();
16263 } // if we're in the middle of processing a segment already, don't
16264 // kick off an additional segment request
16265
16266
16267 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
16268 return;
16269 }
16270
16271 this.state = 'READY';
16272 }
16273 /**
16274 * Once all the starting parameters have been specified, begin
16275 * operation. This method should only be invoked from the INIT
16276 * state.
16277 *
16278 * @private
16279 */
16280 ;
16281
16282 _proto.init_ = function init_() {
16283 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
16284 // audio data from the muxed content should be removed
16285
16286 this.resetEverything();
16287 return this.monitorBuffer_();
16288 }
16289 /**
16290 * set a playlist on the segment loader
16291 *
16292 * @param {PlaylistLoader} media the playlist to set on the segment loader
16293 */
16294 ;
16295
16296 _proto.playlist = function playlist(newPlaylist, options) {
16297 if (options === void 0) {
16298 options = {};
16299 }
16300
16301 if (!newPlaylist) {
16302 return;
16303 }
16304
16305 var oldPlaylist = this.playlist_;
16306 var segmentInfo = this.pendingSegment_;
16307 this.playlist_ = newPlaylist;
16308 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
16309 // is always our zero-time so force a sync update each time the playlist
16310 // is refreshed from the server
16311 //
16312 // Use the INIT state to determine if playback has started, as the playlist sync info
16313 // should be fixed once requests begin (as sync points are generated based on sync
16314 // info), but not before then.
16315
16316 if (this.state === 'INIT') {
16317 newPlaylist.syncInfo = {
16318 mediaSequence: newPlaylist.mediaSequence,
16319 time: 0
16320 }; // Setting the date time mapping means mapping the program date time (if available)
16321 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
16322 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
16323 // be updated as the playlist is refreshed before the loader starts loading, the
16324 // program date time mapping needs to be updated as well.
16325 //
16326 // This mapping is only done for the main loader because a program date time should
16327 // map equivalently between playlists.
16328
16329 if (this.loaderType_ === 'main') {
16330 this.syncController_.setDateTimeMappingForStart(newPlaylist);
16331 }
16332 }
16333
16334 var oldId = null;
16335
16336 if (oldPlaylist) {
16337 if (oldPlaylist.id) {
16338 oldId = oldPlaylist.id;
16339 } else if (oldPlaylist.uri) {
16340 oldId = oldPlaylist.uri;
16341 }
16342 }
16343
16344 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
16345 // in LIVE, we always want to update with new playlists (including refreshes)
16346
16347 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
16348 // buffering now
16349
16350 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16351 return this.init_();
16352 }
16353
16354 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
16355 if (this.mediaIndex !== null) {
16356 // we must reset/resync the segment loader when we switch renditions and
16357 // the segment loader is already synced to the previous rendition
16358 // on playlist changes we want it to be possible to fetch
16359 // at the buffer for vod but not for live. So we use resetLoader
16360 // for live and resyncLoader for vod. We want this because
16361 // if a playlist uses independent and non-independent segments/parts the
16362 // buffer may not accurately reflect the next segment that we should try
16363 // downloading.
16364 if (!newPlaylist.endList) {
16365 this.resetLoader();
16366 } else {
16367 this.resyncLoader();
16368 }
16369 }
16370
16371 this.currentMediaInfo_ = void 0;
16372 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
16373
16374 return;
16375 } // we reloaded the same playlist so we are in a live scenario
16376 // and we will likely need to adjust the mediaIndex
16377
16378
16379 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
16380 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
16381 // this is important because we can abort a request and this value must be
16382 // equal to the last appended mediaIndex
16383
16384 if (this.mediaIndex !== null) {
16385 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
16386 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
16387 // new playlist was incremented by 1.
16388
16389 if (this.mediaIndex < 0) {
16390 this.mediaIndex = null;
16391 this.partIndex = null;
16392 } else {
16393 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
16394 // unless parts fell off of the playlist for this segment.
16395 // In that case we need to reset partIndex and resync
16396
16397 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
16398 var mediaIndex = this.mediaIndex;
16399 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
16400 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
16401 // as the part was dropped from our current playlists segment.
16402 // The mediaIndex will still be valid so keep that around.
16403
16404 this.mediaIndex = mediaIndex;
16405 }
16406 }
16407 } // update the mediaIndex on the SegmentInfo object
16408 // this is important because we will update this.mediaIndex with this value
16409 // in `handleAppendsDone_` after the segment has been successfully appended
16410
16411
16412 if (segmentInfo) {
16413 segmentInfo.mediaIndex -= mediaSequenceDiff;
16414
16415 if (segmentInfo.mediaIndex < 0) {
16416 segmentInfo.mediaIndex = null;
16417 segmentInfo.partIndex = null;
16418 } else {
16419 // we need to update the referenced segment so that timing information is
16420 // saved for the new playlist's segment, however, if the segment fell off the
16421 // playlist, we can leave the old reference and just lose the timing info
16422 if (segmentInfo.mediaIndex >= 0) {
16423 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
16424 }
16425
16426 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
16427 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
16428 }
16429 }
16430 }
16431
16432 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
16433 }
16434 /**
16435 * Prevent the loader from fetching additional segments. If there
16436 * is a segment request outstanding, it will finish processing
16437 * before the loader halts. A segment loader can be unpaused by
16438 * calling load().
16439 */
16440 ;
16441
16442 _proto.pause = function pause() {
16443 if (this.checkBufferTimeout_) {
16444 window$1.clearTimeout(this.checkBufferTimeout_);
16445 this.checkBufferTimeout_ = null;
16446 }
16447 }
16448 /**
16449 * Returns whether the segment loader is fetching additional
16450 * segments when given the opportunity. This property can be
16451 * modified through calls to pause() and load().
16452 */
16453 ;
16454
16455 _proto.paused = function paused() {
16456 return this.checkBufferTimeout_ === null;
16457 }
16458 /**
16459 * Delete all the buffered data and reset the SegmentLoader
16460 *
16461 * @param {Function} [done] an optional callback to be executed when the remove
16462 * operation is complete
16463 */
16464 ;
16465
16466 _proto.resetEverything = function resetEverything(done) {
16467 this.ended_ = false;
16468 this.appendInitSegment_ = {
16469 audio: true,
16470 video: true
16471 };
16472 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
16473 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
16474 // we then clamp the value to duration if necessary.
16475
16476 this.remove(0, Infinity, done); // clears fmp4 captions
16477
16478 if (this.transmuxer_) {
16479 this.transmuxer_.postMessage({
16480 action: 'clearAllMp4Captions'
16481 }); // reset the cache in the transmuxer
16482
16483 this.transmuxer_.postMessage({
16484 action: 'reset'
16485 });
16486 }
16487 }
16488 /**
16489 * Force the SegmentLoader to resync and start loading around the currentTime instead
16490 * of starting at the end of the buffer
16491 *
16492 * Useful for fast quality changes
16493 */
16494 ;
16495
16496 _proto.resetLoader = function resetLoader() {
16497 this.fetchAtBuffer_ = false;
16498 this.resyncLoader();
16499 }
16500 /**
16501 * Force the SegmentLoader to restart synchronization and make a conservative guess
16502 * before returning to the simple walk-forward method
16503 */
16504 ;
16505
16506 _proto.resyncLoader = function resyncLoader() {
16507 if (this.transmuxer_) {
16508 // need to clear out any cached data to prepare for the new segment
16509 segmentTransmuxer.reset(this.transmuxer_);
16510 }
16511
16512 this.mediaIndex = null;
16513 this.partIndex = null;
16514 this.syncPoint_ = null;
16515 this.isPendingTimestampOffset_ = false;
16516 this.callQueue_ = [];
16517 this.loadQueue_ = [];
16518 this.metadataQueue_.id3 = [];
16519 this.metadataQueue_.caption = [];
16520 this.abort();
16521
16522 if (this.transmuxer_) {
16523 this.transmuxer_.postMessage({
16524 action: 'clearParsedMp4Captions'
16525 });
16526 }
16527 }
16528 /**
16529 * Remove any data in the source buffer between start and end times
16530 *
16531 * @param {number} start - the start time of the region to remove from the buffer
16532 * @param {number} end - the end time of the region to remove from the buffer
16533 * @param {Function} [done] - an optional callback to be executed when the remove
16534 * @param {boolean} force - force all remove operations to happen
16535 * operation is complete
16536 */
16537 ;
16538
16539 _proto.remove = function remove(start, end, done, force) {
16540 if (done === void 0) {
16541 done = function done() {};
16542 }
16543
16544 if (force === void 0) {
16545 force = false;
16546 }
16547
16548 // clamp end to duration if we need to remove everything.
16549 // This is due to a browser bug that causes issues if we remove to Infinity.
16550 // videojs/videojs-contrib-hls#1225
16551 if (end === Infinity) {
16552 end = this.duration_();
16553 } // skip removes that would throw an error
16554 // commonly happens during a rendition switch at the start of a video
16555 // from start 0 to end 0
16556
16557
16558 if (end <= start) {
16559 this.logger_('skipping remove because end ${end} is <= start ${start}');
16560 return;
16561 }
16562
16563 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
16564 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
16565
16566 return;
16567 } // set it to one to complete this function's removes
16568
16569
16570 var removesRemaining = 1;
16571
16572 var removeFinished = function removeFinished() {
16573 removesRemaining--;
16574
16575 if (removesRemaining === 0) {
16576 done();
16577 }
16578 };
16579
16580 if (force || !this.audioDisabled_) {
16581 removesRemaining++;
16582 this.sourceUpdater_.removeAudio(start, end, removeFinished);
16583 } // While it would be better to only remove video if the main loader has video, this
16584 // should be safe with audio only as removeVideo will call back even if there's no
16585 // video buffer.
16586 //
16587 // In theory we can check to see if there's video before calling the remove, but in
16588 // the event that we're switching between renditions and from video to audio only
16589 // (when we add support for that), we may need to clear the video contents despite
16590 // what the new media will contain.
16591
16592
16593 if (force || this.loaderType_ === 'main') {
16594 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
16595 removesRemaining++;
16596 this.sourceUpdater_.removeVideo(start, end, removeFinished);
16597 } // remove any captions and ID3 tags
16598
16599
16600 for (var track in this.inbandTextTracks_) {
16601 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
16602 }
16603
16604 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
16605
16606 removeFinished();
16607 }
16608 /**
16609 * (re-)schedule monitorBufferTick_ to run as soon as possible
16610 *
16611 * @private
16612 */
16613 ;
16614
16615 _proto.monitorBuffer_ = function monitorBuffer_() {
16616 if (this.checkBufferTimeout_) {
16617 window$1.clearTimeout(this.checkBufferTimeout_);
16618 }
16619
16620 this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), 1);
16621 }
16622 /**
16623 * As long as the SegmentLoader is in the READY state, periodically
16624 * invoke fillBuffer_().
16625 *
16626 * @private
16627 */
16628 ;
16629
16630 _proto.monitorBufferTick_ = function monitorBufferTick_() {
16631 if (this.state === 'READY') {
16632 this.fillBuffer_();
16633 }
16634
16635 if (this.checkBufferTimeout_) {
16636 window$1.clearTimeout(this.checkBufferTimeout_);
16637 }
16638
16639 this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
16640 }
16641 /**
16642 * fill the buffer with segements unless the sourceBuffers are
16643 * currently updating
16644 *
16645 * Note: this function should only ever be called by monitorBuffer_
16646 * and never directly
16647 *
16648 * @private
16649 */
16650 ;
16651
16652 _proto.fillBuffer_ = function fillBuffer_() {
16653 // TODO since the source buffer maintains a queue, and we shouldn't call this function
16654 // except when we're ready for the next segment, this check can most likely be removed
16655 if (this.sourceUpdater_.updating()) {
16656 return;
16657 } // see if we need to begin loading immediately
16658
16659
16660 var segmentInfo = this.chooseNextRequest_();
16661
16662 if (!segmentInfo) {
16663 return;
16664 }
16665
16666 if (typeof segmentInfo.timestampOffset === 'number') {
16667 this.isPendingTimestampOffset_ = false;
16668 this.timelineChangeController_.pendingTimelineChange({
16669 type: this.loaderType_,
16670 from: this.currentTimeline_,
16671 to: segmentInfo.timeline
16672 });
16673 }
16674
16675 this.loadSegment_(segmentInfo);
16676 }
16677 /**
16678 * Determines if we should call endOfStream on the media source based
16679 * on the state of the buffer or if appened segment was the final
16680 * segment in the playlist.
16681 *
16682 * @param {number} [mediaIndex] the media index of segment we last appended
16683 * @param {Object} [playlist] a media playlist object
16684 * @return {boolean} do we need to call endOfStream on the MediaSource
16685 */
16686 ;
16687
16688 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
16689 if (mediaIndex === void 0) {
16690 mediaIndex = this.mediaIndex;
16691 }
16692
16693 if (playlist === void 0) {
16694 playlist = this.playlist_;
16695 }
16696
16697 if (partIndex === void 0) {
16698 partIndex = this.partIndex;
16699 }
16700
16701 if (!playlist || !this.mediaSource_) {
16702 return false;
16703 }
16704
16705 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
16706
16707 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
16708
16709 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
16710 // so that MediaSources can trigger the `ended` event when it runs out of
16711 // buffered data instead of waiting for me
16712
16713 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
16714 }
16715 /**
16716 * Determines what request should be made given current segment loader state.
16717 *
16718 * @return {Object} a request object that describes the segment/part to load
16719 */
16720 ;
16721
16722 _proto.chooseNextRequest_ = function chooseNextRequest_() {
16723 var buffered = this.buffered_();
16724 var bufferedEnd = lastBufferedEnd(buffered) || 0;
16725 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
16726 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
16727 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
16728 var segments = this.playlist_.segments; // return no segment if:
16729 // 1. we don't have segments
16730 // 2. The video has not yet played and we already downloaded a segment
16731 // 3. we already have enough buffered time
16732
16733 if (!segments.length || preloaded || haveEnoughBuffer) {
16734 return null;
16735 }
16736
16737 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
16738 var next = {
16739 partIndex: null,
16740 mediaIndex: null,
16741 startOfSegment: null,
16742 playlist: this.playlist_,
16743 isSyncRequest: Boolean(!this.syncPoint_)
16744 };
16745
16746 if (next.isSyncRequest) {
16747 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
16748 } else if (this.mediaIndex !== null) {
16749 var segment = segments[this.mediaIndex];
16750 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
16751 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
16752
16753 if (segment.parts && segment.parts[partIndex + 1]) {
16754 next.mediaIndex = this.mediaIndex;
16755 next.partIndex = partIndex + 1;
16756 } else {
16757 next.mediaIndex = this.mediaIndex + 1;
16758 }
16759 } else {
16760 // Find the segment containing the end of the buffer or current time.
16761 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
16762 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
16763 playlist: this.playlist_,
16764 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
16765 startingPartIndex: this.syncPoint_.partIndex,
16766 startingSegmentIndex: this.syncPoint_.segmentIndex,
16767 startTime: this.syncPoint_.time
16768 }),
16769 segmentIndex = _Playlist$getMediaInf.segmentIndex,
16770 startTime = _Playlist$getMediaInf.startTime,
16771 _partIndex = _Playlist$getMediaInf.partIndex;
16772
16773 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
16774 next.mediaIndex = segmentIndex;
16775 next.startOfSegment = startTime;
16776 next.partIndex = _partIndex;
16777 }
16778
16779 var nextSegment = segments[next.mediaIndex];
16780 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
16781 // the next partIndex is invalid do not choose a next segment.
16782
16783 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
16784 return null;
16785 } // if the next segment has parts, and we don't have a partIndex.
16786 // Set partIndex to 0
16787
16788
16789 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
16790 next.partIndex = 0;
16791 nextPart = nextSegment.parts[0];
16792 } // if we have no buffered data then we need to make sure
16793 // that the next part we append is "independent" if possible.
16794 // So we check if the previous part is independent, and request
16795 // it if it is.
16796
16797
16798 if (!bufferedTime && nextPart && !nextPart.independent) {
16799 if (next.partIndex === 0) {
16800 var lastSegment = segments[next.mediaIndex - 1];
16801 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
16802
16803 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
16804 next.mediaIndex -= 1;
16805 next.partIndex = lastSegment.parts.length - 1;
16806 next.independent = 'previous segment';
16807 }
16808 } else if (nextSegment.parts[next.partIndex - 1].independent) {
16809 next.partIndex -= 1;
16810 next.independent = 'previous part';
16811 }
16812 }
16813
16814 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
16815 // 1. this is the last segment in the playlist
16816 // 2. end of stream has been called on the media source already
16817 // 3. the player is not seeking
16818
16819 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
16820 return null;
16821 }
16822
16823 return this.generateSegmentInfo_(next);
16824 };
16825
16826 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
16827 var independent = options.independent,
16828 playlist = options.playlist,
16829 mediaIndex = options.mediaIndex,
16830 startOfSegment = options.startOfSegment,
16831 isSyncRequest = options.isSyncRequest,
16832 partIndex = options.partIndex,
16833 forceTimestampOffset = options.forceTimestampOffset,
16834 getMediaInfoForTime = options.getMediaInfoForTime;
16835 var segment = playlist.segments[mediaIndex];
16836 var part = typeof partIndex === 'number' && segment.parts[partIndex];
16837 var segmentInfo = {
16838 requestId: 'segment-loader-' + Math.random(),
16839 // resolve the segment URL relative to the playlist
16840 uri: part && part.resolvedUri || segment.resolvedUri,
16841 // the segment's mediaIndex at the time it was requested
16842 mediaIndex: mediaIndex,
16843 partIndex: part ? partIndex : null,
16844 // whether or not to update the SegmentLoader's state with this
16845 // segment's mediaIndex
16846 isSyncRequest: isSyncRequest,
16847 startOfSegment: startOfSegment,
16848 // the segment's playlist
16849 playlist: playlist,
16850 // unencrypted bytes of the segment
16851 bytes: null,
16852 // when a key is defined for this segment, the encrypted bytes
16853 encryptedBytes: null,
16854 // The target timestampOffset for this segment when we append it
16855 // to the source buffer
16856 timestampOffset: null,
16857 // The timeline that the segment is in
16858 timeline: segment.timeline,
16859 // The expected duration of the segment in seconds
16860 duration: part && part.duration || segment.duration,
16861 // retain the segment in case the playlist updates while doing an async process
16862 segment: segment,
16863 part: part,
16864 byteLength: 0,
16865 transmuxer: this.transmuxer_,
16866 // type of getMediaInfoForTime that was used to get this segment
16867 getMediaInfoForTime: getMediaInfoForTime,
16868 independent: independent
16869 };
16870 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
16871 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
16872 segmentTimeline: segment.timeline,
16873 currentTimeline: this.currentTimeline_,
16874 startOfSegment: startOfSegment,
16875 buffered: this.buffered_(),
16876 overrideCheck: overrideCheck
16877 });
16878 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
16879
16880 if (typeof audioBufferedEnd === 'number') {
16881 // since the transmuxer is using the actual timing values, but the buffer is
16882 // adjusted by the timestamp offset, we must adjust the value here
16883 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
16884 }
16885
16886 if (this.sourceUpdater_.videoBuffered().length) {
16887 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
16888 // adjusted by the timestmap offset, we must adjust the value here
16889 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
16890 }
16891
16892 return segmentInfo;
16893 } // get the timestampoffset for a segment,
16894 // added so that vtt segment loader can override and prevent
16895 // adding timestamp offsets.
16896 ;
16897
16898 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
16899 return timestampOffsetForSegment(options);
16900 }
16901 /**
16902 * Determines if the network has enough bandwidth to complete the current segment
16903 * request in a timely manner. If not, the request will be aborted early and bandwidth
16904 * updated to trigger a playlist switch.
16905 *
16906 * @param {Object} stats
16907 * Object containing stats about the request timing and size
16908 * @private
16909 */
16910 ;
16911
16912 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
16913 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
16914 // TODO: Replace using timeout with a boolean indicating whether this playlist is
16915 // the lowestEnabledRendition.
16916 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
16917 !this.playlist_.attributes.BANDWIDTH) {
16918 return;
16919 } // Wait at least 1 second since the first byte of data has been received before
16920 // using the calculated bandwidth from the progress event to allow the bitrate
16921 // to stabilize
16922
16923
16924 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
16925 return;
16926 }
16927
16928 var currentTime = this.currentTime_();
16929 var measuredBandwidth = stats.bandwidth;
16930 var segmentDuration = this.pendingSegment_.duration;
16931 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
16932 // if we are only left with less than 1 second when the request completes.
16933 // A negative timeUntilRebuffering indicates we are already rebuffering
16934
16935 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
16936 // is larger than the estimated time until the player runs out of forward buffer
16937
16938 if (requestTimeRemaining <= timeUntilRebuffer$1) {
16939 return;
16940 }
16941
16942 var switchCandidate = minRebufferMaxBandwidthSelector({
16943 master: this.vhs_.playlists.master,
16944 currentTime: currentTime,
16945 bandwidth: measuredBandwidth,
16946 duration: this.duration_(),
16947 segmentDuration: segmentDuration,
16948 timeUntilRebuffer: timeUntilRebuffer$1,
16949 currentTimeline: this.currentTimeline_,
16950 syncController: this.syncController_
16951 });
16952
16953 if (!switchCandidate) {
16954 return;
16955 }
16956
16957 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
16958 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
16959 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
16960 // potential round trip time of the new request so that we are not too aggressive
16961 // with switching to a playlist that might save us a fraction of a second.
16962
16963 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
16964 minimumTimeSaving = 1;
16965 }
16966
16967 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
16968 return;
16969 } // set the bandwidth to that of the desired playlist being sure to scale by
16970 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
16971 // don't trigger a bandwidthupdate as the bandwidth is artifial
16972
16973
16974 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
16975 this.trigger('earlyabort');
16976 };
16977
16978 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
16979 this.logger_("Aborting " + segmentInfoString(segmentInfo));
16980 this.mediaRequestsAborted += 1;
16981 }
16982 /**
16983 * XHR `progress` event handler
16984 *
16985 * @param {Event}
16986 * The XHR `progress` event
16987 * @param {Object} simpleSegment
16988 * A simplified segment object copy
16989 * @private
16990 */
16991 ;
16992
16993 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
16994 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16995
16996 if (this.checkForAbort_(simpleSegment.requestId)) {
16997 return;
16998 }
16999
17000 this.trigger('progress');
17001 };
17002
17003 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
17004 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17005
17006 if (this.checkForAbort_(simpleSegment.requestId)) {
17007 return;
17008 }
17009
17010 if (this.checkForIllegalMediaSwitch(trackInfo)) {
17011 return;
17012 }
17013
17014 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
17015 // Guard against cases where we're not getting track info at all until we are
17016 // certain that all streams will provide it.
17017
17018 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
17019 this.appendInitSegment_ = {
17020 audio: true,
17021 video: true
17022 };
17023 this.startingMediaInfo_ = trackInfo;
17024 this.currentMediaInfo_ = trackInfo;
17025 this.logger_('trackinfo update', trackInfo);
17026 this.trigger('trackinfo');
17027 } // trackinfo may cause an abort if the trackinfo
17028 // causes a codec change to an unsupported codec.
17029
17030
17031 if (this.checkForAbort_(simpleSegment.requestId)) {
17032 return;
17033 } // set trackinfo on the pending segment so that
17034 // it can append.
17035
17036
17037 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
17038
17039 if (this.hasEnoughInfoToAppend_()) {
17040 this.processCallQueue_();
17041 }
17042 };
17043
17044 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
17045 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17046
17047 if (this.checkForAbort_(simpleSegment.requestId)) {
17048 return;
17049 }
17050
17051 var segmentInfo = this.pendingSegment_;
17052 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
17053 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
17054 segmentInfo[timingInfoProperty][timeType] = time;
17055 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
17056
17057 if (this.hasEnoughInfoToAppend_()) {
17058 this.processCallQueue_();
17059 }
17060 };
17061
17062 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
17063 var _this2 = this;
17064
17065 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17066
17067 if (this.checkForAbort_(simpleSegment.requestId)) {
17068 return;
17069 } // This could only happen with fmp4 segments, but
17070 // should still not happen in general
17071
17072
17073 if (captionData.length === 0) {
17074 this.logger_('SegmentLoader received no captions from a caption event');
17075 return;
17076 }
17077
17078 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
17079 // can be adjusted by the timestamp offset
17080
17081 if (!segmentInfo.hasAppendedData_) {
17082 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
17083 return;
17084 }
17085
17086 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
17087 var captionTracks = {}; // get total start/end and captions for each track/stream
17088
17089 captionData.forEach(function (caption) {
17090 // caption.stream is actually a track name...
17091 // set to the existing values in tracks or default values
17092 captionTracks[caption.stream] = captionTracks[caption.stream] || {
17093 // Infinity, as any other value will be less than this
17094 startTime: Infinity,
17095 captions: [],
17096 // 0 as an other value will be more than this
17097 endTime: 0
17098 };
17099 var captionTrack = captionTracks[caption.stream];
17100 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
17101 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
17102 captionTrack.captions.push(caption);
17103 });
17104 Object.keys(captionTracks).forEach(function (trackName) {
17105 var _captionTracks$trackN = captionTracks[trackName],
17106 startTime = _captionTracks$trackN.startTime,
17107 endTime = _captionTracks$trackN.endTime,
17108 captions = _captionTracks$trackN.captions;
17109 var inbandTextTracks = _this2.inbandTextTracks_;
17110
17111 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
17112
17113 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
17114 // We do this because a rendition change that also changes the timescale for captions
17115 // will result in captions being re-parsed for certain segments. If we add them again
17116 // without clearing we will have two of the same captions visible.
17117
17118 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
17119 addCaptionData({
17120 captionArray: captions,
17121 inbandTextTracks: inbandTextTracks,
17122 timestampOffset: timestampOffset
17123 });
17124 }); // Reset stored captions since we added parsed
17125 // captions to a text track at this point
17126
17127 if (this.transmuxer_) {
17128 this.transmuxer_.postMessage({
17129 action: 'clearParsedMp4Captions'
17130 });
17131 }
17132 };
17133
17134 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
17135 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17136
17137 if (this.checkForAbort_(simpleSegment.requestId)) {
17138 return;
17139 }
17140
17141 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
17142
17143 if (!segmentInfo.hasAppendedData_) {
17144 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
17145 return;
17146 }
17147
17148 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
17149 // audio/video source with a metadata track, and an alt audio with a metadata track.
17150 // However, this probably won't happen, and if it does it can be handled then.
17151
17152 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
17153 addMetadata({
17154 inbandTextTracks: this.inbandTextTracks_,
17155 metadataArray: id3Frames,
17156 timestampOffset: timestampOffset,
17157 videoDuration: this.duration_()
17158 });
17159 };
17160
17161 _proto.processMetadataQueue_ = function processMetadataQueue_() {
17162 this.metadataQueue_.id3.forEach(function (fn) {
17163 return fn();
17164 });
17165 this.metadataQueue_.caption.forEach(function (fn) {
17166 return fn();
17167 });
17168 this.metadataQueue_.id3 = [];
17169 this.metadataQueue_.caption = [];
17170 };
17171
17172 _proto.processCallQueue_ = function processCallQueue_() {
17173 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
17174 // functions may check the length of the load queue and default to pushing themselves
17175 // back onto the queue.
17176
17177 this.callQueue_ = [];
17178 callQueue.forEach(function (fun) {
17179 return fun();
17180 });
17181 };
17182
17183 _proto.processLoadQueue_ = function processLoadQueue_() {
17184 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
17185 // functions may check the length of the load queue and default to pushing themselves
17186 // back onto the queue.
17187
17188 this.loadQueue_ = [];
17189 loadQueue.forEach(function (fun) {
17190 return fun();
17191 });
17192 }
17193 /**
17194 * Determines whether the loader has enough info to load the next segment.
17195 *
17196 * @return {boolean}
17197 * Whether or not the loader has enough info to load the next segment
17198 */
17199 ;
17200
17201 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
17202 // Since primary timing goes by video, only the audio loader potentially needs to wait
17203 // to load.
17204 if (this.loaderType_ !== 'audio') {
17205 return true;
17206 }
17207
17208 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
17209 // enough info to load.
17210
17211 if (!segmentInfo) {
17212 return false;
17213 } // The first segment can and should be loaded immediately so that source buffers are
17214 // created together (before appending). Source buffer creation uses the presence of
17215 // audio and video data to determine whether to create audio/video source buffers, and
17216 // uses processed (transmuxed or parsed) media to determine the types required.
17217
17218
17219 if (!this.getCurrentMediaInfo_()) {
17220 return true;
17221 }
17222
17223 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
17224 // can be requested and downloaded and only wait before it is transmuxed or parsed.
17225 // But in practice, there are a few reasons why it is better to wait until a loader
17226 // is ready to append that segment before requesting and downloading:
17227 //
17228 // 1. Because audio and main loaders cross discontinuities together, if this loader
17229 // is waiting for the other to catch up, then instead of requesting another
17230 // segment and using up more bandwidth, by not yet loading, more bandwidth is
17231 // allotted to the loader currently behind.
17232 // 2. media-segment-request doesn't have to have logic to consider whether a segment
17233 // is ready to be processed or not, isolating the queueing behavior to the loader.
17234 // 3. The audio loader bases some of its segment properties on timing information
17235 // provided by the main loader, meaning that, if the logic for waiting on
17236 // processing was in media-segment-request, then it would also need to know how
17237 // to re-generate the segment information after the main loader caught up.
17238 shouldWaitForTimelineChange({
17239 timelineChangeController: this.timelineChangeController_,
17240 currentTimeline: this.currentTimeline_,
17241 segmentTimeline: segmentInfo.timeline,
17242 loaderType: this.loaderType_,
17243 audioDisabled: this.audioDisabled_
17244 })) {
17245 return false;
17246 }
17247
17248 return true;
17249 };
17250
17251 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
17252 if (segmentInfo === void 0) {
17253 segmentInfo = this.pendingSegment_;
17254 }
17255
17256 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
17257 };
17258
17259 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
17260 if (segmentInfo === void 0) {
17261 segmentInfo = this.pendingSegment_;
17262 }
17263
17264 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
17265 };
17266
17267 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
17268 if (!this.sourceUpdater_.ready()) {
17269 return false;
17270 } // If content needs to be removed or the loader is waiting on an append reattempt,
17271 // then no additional content should be appended until the prior append is resolved.
17272
17273
17274 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
17275 return false;
17276 }
17277
17278 var segmentInfo = this.pendingSegment_;
17279 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
17280 // we do not have information on this specific
17281 // segment yet
17282
17283 if (!segmentInfo || !trackInfo) {
17284 return false;
17285 }
17286
17287 var hasAudio = trackInfo.hasAudio,
17288 hasVideo = trackInfo.hasVideo,
17289 isMuxed = trackInfo.isMuxed;
17290
17291 if (hasVideo && !segmentInfo.videoTimingInfo) {
17292 return false;
17293 } // muxed content only relies on video timing information for now.
17294
17295
17296 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
17297 return false;
17298 }
17299
17300 if (shouldWaitForTimelineChange({
17301 timelineChangeController: this.timelineChangeController_,
17302 currentTimeline: this.currentTimeline_,
17303 segmentTimeline: segmentInfo.timeline,
17304 loaderType: this.loaderType_,
17305 audioDisabled: this.audioDisabled_
17306 })) {
17307 return false;
17308 }
17309
17310 return true;
17311 };
17312
17313 _proto.handleData_ = function handleData_(simpleSegment, result) {
17314 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17315
17316 if (this.checkForAbort_(simpleSegment.requestId)) {
17317 return;
17318 } // If there's anything in the call queue, then this data came later and should be
17319 // executed after the calls currently queued.
17320
17321
17322 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
17323 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
17324 return;
17325 }
17326
17327 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
17328
17329 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
17330
17331 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
17332 // logic may change behavior depending on the state, and changing state too early may
17333 // inflate our estimates of bandwidth. In the future this should be re-examined to
17334 // note more granular states.
17335 // don't process and append data if the mediaSource is closed
17336
17337 if (this.mediaSource_.readyState === 'closed') {
17338 return;
17339 } // if this request included an initialization segment, save that data
17340 // to the initSegment cache
17341
17342
17343 if (simpleSegment.map) {
17344 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
17345
17346 segmentInfo.segment.map = simpleSegment.map;
17347 } // if this request included a segment key, save that data in the cache
17348
17349
17350 if (simpleSegment.key) {
17351 this.segmentKey(simpleSegment.key, true);
17352 }
17353
17354 segmentInfo.isFmp4 = simpleSegment.isFmp4;
17355 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
17356
17357 if (segmentInfo.isFmp4) {
17358 this.trigger('fmp4');
17359 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
17360 } else {
17361 var trackInfo = this.getCurrentMediaInfo_();
17362 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
17363 var firstVideoFrameTimeForData;
17364
17365 if (useVideoTimingInfo) {
17366 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
17367 } // Segment loader knows more about segment timing than the transmuxer (in certain
17368 // aspects), so make any changes required for a more accurate start time.
17369 // Don't set the end time yet, as the segment may not be finished processing.
17370
17371
17372 segmentInfo.timingInfo.start = this.trueSegmentStart_({
17373 currentStart: segmentInfo.timingInfo.start,
17374 playlist: segmentInfo.playlist,
17375 mediaIndex: segmentInfo.mediaIndex,
17376 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
17377 useVideoTimingInfo: useVideoTimingInfo,
17378 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
17379 videoTimingInfo: segmentInfo.videoTimingInfo,
17380 audioTimingInfo: segmentInfo.audioTimingInfo
17381 });
17382 } // Init segments for audio and video only need to be appended in certain cases. Now
17383 // that data is about to be appended, we can check the final cases to determine
17384 // whether we should append an init segment.
17385
17386
17387 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
17388 // as we use the start of the segment to offset the best guess (playlist provided)
17389 // timestamp offset.
17390
17391 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
17392 // be appended or not.
17393
17394 if (segmentInfo.isSyncRequest) {
17395 // first save/update our timing info for this segment.
17396 // this is what allows us to choose an accurate segment
17397 // and the main reason we make a sync request.
17398 this.updateTimingInfoEnd_(segmentInfo);
17399 this.syncController_.saveSegmentTimingInfo({
17400 segmentInfo: segmentInfo,
17401 shouldSaveTimelineMapping: this.loaderType_ === 'main'
17402 });
17403 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
17404 // after taking into account its timing info, do not append it.
17405
17406 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
17407 this.logger_('sync segment was incorrect, not appending');
17408 return;
17409 } // otherwise append it like any other segment as our guess was correct.
17410
17411
17412 this.logger_('sync segment was correct, appending');
17413 } // Save some state so that in the future anything waiting on first append (and/or
17414 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
17415 // we need some notion of whether the timestamp offset or other relevant information
17416 // has had a chance to be set.
17417
17418
17419 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
17420
17421 this.processMetadataQueue_();
17422 this.appendData_(segmentInfo, result);
17423 };
17424
17425 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
17426 // alt audio doesn't manage timestamp offset
17427 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
17428 // segment for each chunk
17429 !segmentInfo.changedTimestampOffset) {
17430 // if the timestamp offset changed, the timeline may have changed, so we have to re-
17431 // append init segments
17432 this.appendInitSegment_ = {
17433 audio: true,
17434 video: true
17435 };
17436 }
17437
17438 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
17439 // make sure we append init segment on playlist changes, in case the media config
17440 // changed
17441 this.appendInitSegment_[type] = true;
17442 }
17443 };
17444
17445 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
17446 var type = _ref4.type,
17447 initSegment = _ref4.initSegment,
17448 map = _ref4.map,
17449 playlist = _ref4.playlist;
17450
17451 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
17452 // (Section 3) required to parse the applicable Media Segments. It applies to every
17453 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
17454 // or until the end of the playlist."
17455 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
17456 if (map) {
17457 var id = initSegmentId(map);
17458
17459 if (this.activeInitSegmentId_ === id) {
17460 // don't need to re-append the init segment if the ID matches
17461 return null;
17462 } // a map-specified init segment takes priority over any transmuxed (or otherwise
17463 // obtained) init segment
17464 //
17465 // this also caches the init segment for later use
17466
17467
17468 initSegment = this.initSegmentForMap(map, true).bytes;
17469 this.activeInitSegmentId_ = id;
17470 } // We used to always prepend init segments for video, however, that shouldn't be
17471 // necessary. Instead, we should only append on changes, similar to what we've always
17472 // done for audio. This is more important (though may not be that important) for
17473 // frame-by-frame appending for LHLS, simply because of the increased quantity of
17474 // appends.
17475
17476
17477 if (initSegment && this.appendInitSegment_[type]) {
17478 // Make sure we track the playlist that we last used for the init segment, so that
17479 // we can re-append the init segment in the event that we get data from a new
17480 // playlist. Discontinuities and track changes are handled in other sections.
17481 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
17482
17483 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
17484 // we are appending the muxer init segment
17485
17486 this.activeInitSegmentId_ = null;
17487 return initSegment;
17488 }
17489
17490 return null;
17491 };
17492
17493 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
17494 var _this3 = this;
17495
17496 var segmentInfo = _ref5.segmentInfo,
17497 type = _ref5.type,
17498 bytes = _ref5.bytes;
17499 var audioBuffered = this.sourceUpdater_.audioBuffered();
17500 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
17501 // should be cleared out during the buffer removals. However, log in case it helps
17502 // debug.
17503
17504 if (audioBuffered.length > 1) {
17505 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
17506 }
17507
17508 if (videoBuffered.length > 1) {
17509 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
17510 }
17511
17512 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
17513 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
17514 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
17515 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
17516
17517 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
17518 // Can't remove enough buffer to make room for new segment (or the browser doesn't
17519 // allow for appends of segments this size). In the future, it may be possible to
17520 // split up the segment and append in pieces, but for now, error out this playlist
17521 // in an attempt to switch to a more manageable rendition.
17522 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
17523 this.error({
17524 message: 'Quota exceeded error with append of a single segment of content',
17525 excludeUntil: Infinity
17526 });
17527 this.trigger('error');
17528 return;
17529 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
17530 // that the segment-loader should block on future events until this one is handled, so
17531 // that it doesn't keep moving onto further segments. Adding the call to the call
17532 // queue will prevent further appends until waitingOnRemove_ and
17533 // quotaExceededErrorRetryTimeout_ are cleared.
17534 //
17535 // Note that this will only block the current loader. In the case of demuxed content,
17536 // the other load may keep filling as fast as possible. In practice, this should be
17537 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
17538 // source buffer, or video fills without enough room for audio to append (and without
17539 // the availability of clearing out seconds of back buffer to make room for audio).
17540 // But it might still be good to handle this case in the future as a TODO.
17541
17542
17543 this.waitingOnRemove_ = true;
17544 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
17545 segmentInfo: segmentInfo,
17546 type: type,
17547 bytes: bytes
17548 }));
17549 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
17550 // before retrying.
17551
17552 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
17553 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
17554 this.remove(0, timeToRemoveUntil, function () {
17555 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
17556
17557 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
17558 // attempts (since we can't clear less than the minimum)
17559
17560 _this3.quotaExceededErrorRetryTimeout_ = window$1.setTimeout(function () {
17561 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
17562
17563 _this3.quotaExceededErrorRetryTimeout_ = null;
17564
17565 _this3.processCallQueue_();
17566 }, MIN_BACK_BUFFER * 1000);
17567 }, true);
17568 };
17569
17570 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
17571 var segmentInfo = _ref6.segmentInfo,
17572 type = _ref6.type,
17573 bytes = _ref6.bytes;
17574
17575 // if there's no error, nothing to do
17576 if (!error) {
17577 return;
17578 }
17579
17580 if (error.code === QUOTA_EXCEEDED_ERR) {
17581 this.handleQuotaExceededError_({
17582 segmentInfo: segmentInfo,
17583 type: type,
17584 bytes: bytes
17585 }); // A quota exceeded error should be recoverable with a future re-append, so no need
17586 // to trigger an append error.
17587
17588 return;
17589 }
17590
17591 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
17592 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
17593 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
17594 //
17595 // Trigger a special error so that it can be handled separately from normal,
17596 // recoverable errors.
17597
17598 this.trigger('appenderror');
17599 };
17600
17601 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
17602 var segmentInfo = _ref7.segmentInfo,
17603 type = _ref7.type,
17604 initSegment = _ref7.initSegment,
17605 data = _ref7.data,
17606 bytes = _ref7.bytes;
17607
17608 // If this is a re-append, bytes were already created and don't need to be recreated
17609 if (!bytes) {
17610 var segments = [data];
17611 var byteLength = data.byteLength;
17612
17613 if (initSegment) {
17614 // if the media initialization segment is changing, append it before the content
17615 // segment
17616 segments.unshift(initSegment);
17617 byteLength += initSegment.byteLength;
17618 } // Technically we should be OK appending the init segment separately, however, we
17619 // haven't yet tested that, and prepending is how we have always done things.
17620
17621
17622 bytes = concatSegments({
17623 bytes: byteLength,
17624 segments: segments
17625 });
17626 }
17627
17628 this.sourceUpdater_.appendBuffer({
17629 segmentInfo: segmentInfo,
17630 type: type,
17631 bytes: bytes
17632 }, this.handleAppendError_.bind(this, {
17633 segmentInfo: segmentInfo,
17634 type: type,
17635 bytes: bytes
17636 }));
17637 };
17638
17639 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
17640 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
17641 return;
17642 }
17643
17644 var segment = this.pendingSegment_.segment;
17645 var timingInfoProperty = type + "TimingInfo";
17646
17647 if (!segment[timingInfoProperty]) {
17648 segment[timingInfoProperty] = {};
17649 }
17650
17651 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
17652 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
17653 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
17654 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
17655 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
17656
17657 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
17658 };
17659
17660 _proto.appendData_ = function appendData_(segmentInfo, result) {
17661 var type = result.type,
17662 data = result.data;
17663
17664 if (!data || !data.byteLength) {
17665 return;
17666 }
17667
17668 if (type === 'audio' && this.audioDisabled_) {
17669 return;
17670 }
17671
17672 var initSegment = this.getInitSegmentAndUpdateState_({
17673 type: type,
17674 initSegment: result.initSegment,
17675 playlist: segmentInfo.playlist,
17676 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
17677 });
17678 this.appendToSourceBuffer_({
17679 segmentInfo: segmentInfo,
17680 type: type,
17681 initSegment: initSegment,
17682 data: data
17683 });
17684 }
17685 /**
17686 * load a specific segment from a request into the buffer
17687 *
17688 * @private
17689 */
17690 ;
17691
17692 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
17693 var _this4 = this;
17694
17695 this.state = 'WAITING';
17696 this.pendingSegment_ = segmentInfo;
17697 this.trimBackBuffer_(segmentInfo);
17698
17699 if (typeof segmentInfo.timestampOffset === 'number') {
17700 if (this.transmuxer_) {
17701 this.transmuxer_.postMessage({
17702 action: 'clearAllMp4Captions'
17703 });
17704 }
17705 }
17706
17707 if (!this.hasEnoughInfoToLoad_()) {
17708 this.loadQueue_.push(function () {
17709 // regenerate the audioAppendStart, timestampOffset, etc as they
17710 // may have changed since this function was added to the queue.
17711 var options = _extends({}, segmentInfo, {
17712 forceTimestampOffset: true
17713 });
17714
17715 _extends(segmentInfo, _this4.generateSegmentInfo_(options));
17716
17717 _this4.isPendingTimestampOffset_ = false;
17718
17719 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
17720 });
17721 return;
17722 }
17723
17724 this.updateTransmuxerAndRequestSegment_(segmentInfo);
17725 };
17726
17727 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
17728 var _this5 = this;
17729
17730 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
17731 // the transmuxer still needs to be updated before then.
17732 //
17733 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
17734 // offset must be passed to the transmuxer for stream correcting adjustments.
17735 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
17736 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
17737
17738 segmentInfo.gopsToAlignWith = [];
17739 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
17740
17741 this.transmuxer_.postMessage({
17742 action: 'reset'
17743 });
17744 this.transmuxer_.postMessage({
17745 action: 'setTimestampOffset',
17746 timestampOffset: segmentInfo.timestampOffset
17747 });
17748 }
17749
17750 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
17751 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
17752 var isWalkingForward = this.mediaIndex !== null;
17753 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
17754 // the first timeline
17755 segmentInfo.timeline > 0;
17756 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
17757 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
17758 // then this init segment has never been seen before and should be appended.
17759 //
17760 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
17761 // both to true and leave the decision of whether to append the init segment to append time.
17762
17763 if (simpleSegment.map && !simpleSegment.map.bytes) {
17764 this.logger_('going to request init segment.');
17765 this.appendInitSegment_ = {
17766 video: true,
17767 audio: true
17768 };
17769 }
17770
17771 segmentInfo.abortRequests = mediaSegmentRequest({
17772 xhr: this.vhs_.xhr,
17773 xhrOptions: this.xhrOptions_,
17774 decryptionWorker: this.decrypter_,
17775 segment: simpleSegment,
17776 abortFn: this.handleAbort_.bind(this, segmentInfo),
17777 progressFn: this.handleProgress_.bind(this),
17778 trackInfoFn: this.handleTrackInfo_.bind(this),
17779 timingInfoFn: this.handleTimingInfo_.bind(this),
17780 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
17781 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
17782 captionsFn: this.handleCaptions_.bind(this),
17783 isEndOfTimeline: isEndOfTimeline,
17784 endedTimelineFn: function endedTimelineFn() {
17785 _this5.logger_('received endedtimeline callback');
17786 },
17787 id3Fn: this.handleId3_.bind(this),
17788 dataFn: this.handleData_.bind(this),
17789 doneFn: this.segmentRequestFinished_.bind(this),
17790 onTransmuxerLog: function onTransmuxerLog(_ref8) {
17791 var message = _ref8.message,
17792 level = _ref8.level,
17793 stream = _ref8.stream;
17794
17795 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
17796 }
17797 });
17798 }
17799 /**
17800 * trim the back buffer so that we don't have too much data
17801 * in the source buffer
17802 *
17803 * @private
17804 *
17805 * @param {Object} segmentInfo - the current segment
17806 */
17807 ;
17808
17809 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
17810 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
17811 // buffer and a very conservative "garbage collector"
17812 // We manually clear out the old buffer to ensure
17813 // we don't trigger the QuotaExceeded error
17814 // on the source buffer during subsequent appends
17815
17816 if (removeToTime > 0) {
17817 this.remove(0, removeToTime);
17818 }
17819 }
17820 /**
17821 * created a simplified copy of the segment object with just the
17822 * information necessary to perform the XHR and decryption
17823 *
17824 * @private
17825 *
17826 * @param {Object} segmentInfo - the current segment
17827 * @return {Object} a simplified segment object copy
17828 */
17829 ;
17830
17831 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
17832 var segment = segmentInfo.segment;
17833 var part = segmentInfo.part;
17834 var simpleSegment = {
17835 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
17836 byterange: part ? part.byterange : segment.byterange,
17837 requestId: segmentInfo.requestId,
17838 transmuxer: segmentInfo.transmuxer,
17839 audioAppendStart: segmentInfo.audioAppendStart,
17840 gopsToAlignWith: segmentInfo.gopsToAlignWith,
17841 part: segmentInfo.part
17842 };
17843 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
17844
17845 if (previousSegment && previousSegment.timeline === segment.timeline) {
17846 // The baseStartTime of a segment is used to handle rollover when probing the TS
17847 // segment to retrieve timing information. Since the probe only looks at the media's
17848 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
17849 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
17850 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
17851 // seconds of media time, so should be used here. The previous segment is used since
17852 // the end of the previous segment should represent the beginning of the current
17853 // segment, so long as they are on the same timeline.
17854 if (previousSegment.videoTimingInfo) {
17855 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
17856 } else if (previousSegment.audioTimingInfo) {
17857 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
17858 }
17859 }
17860
17861 if (segment.key) {
17862 // if the media sequence is greater than 2^32, the IV will be incorrect
17863 // assuming 10s segments, that would be about 1300 years
17864 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
17865 simpleSegment.key = this.segmentKey(segment.key);
17866 simpleSegment.key.iv = iv;
17867 }
17868
17869 if (segment.map) {
17870 simpleSegment.map = this.initSegmentForMap(segment.map);
17871 }
17872
17873 return simpleSegment;
17874 };
17875
17876 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
17877 // every request counts as a media request even if it has been aborted
17878 // or canceled due to a timeout
17879 this.mediaRequests += 1;
17880
17881 if (stats) {
17882 this.mediaBytesTransferred += stats.bytesReceived;
17883 this.mediaTransferDuration += stats.roundTripTime;
17884 }
17885 };
17886
17887 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
17888 // byteLength will be used for throughput, and should be based on bytes receieved,
17889 // which we only know at the end of the request and should reflect total bytes
17890 // downloaded rather than just bytes processed from components of the segment
17891 this.pendingSegment_.byteLength = stats.bytesReceived;
17892
17893 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
17894 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
17895 return;
17896 }
17897
17898 this.bandwidth = stats.bandwidth;
17899 this.roundTrip = stats.roundTripTime;
17900 };
17901
17902 _proto.handleTimeout_ = function handleTimeout_() {
17903 // although the VTT segment loader bandwidth isn't really used, it's good to
17904 // maintain functinality between segment loaders
17905 this.mediaRequestsTimedout += 1;
17906 this.bandwidth = 1;
17907 this.roundTrip = NaN;
17908 this.trigger('bandwidthupdate');
17909 this.trigger('timeout');
17910 }
17911 /**
17912 * Handle the callback from the segmentRequest function and set the
17913 * associated SegmentLoader state and errors if necessary
17914 *
17915 * @private
17916 */
17917 ;
17918
17919 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
17920 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
17921 // check the call queue directly since this function doesn't need to deal with any
17922 // data, and can continue even if the source buffers are not set up and we didn't get
17923 // any data from the segment
17924 if (this.callQueue_.length) {
17925 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
17926 return;
17927 }
17928
17929 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
17930
17931 if (!this.pendingSegment_) {
17932 return;
17933 } // the request was aborted and the SegmentLoader has already started
17934 // another request. this can happen when the timeout for an aborted
17935 // request triggers due to a limitation in the XHR library
17936 // do not count this as any sort of request or we risk double-counting
17937
17938
17939 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
17940 return;
17941 } // an error occurred from the active pendingSegment_ so reset everything
17942
17943
17944 if (error) {
17945 this.pendingSegment_ = null;
17946 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
17947
17948 if (error.code === REQUEST_ERRORS.ABORTED) {
17949 return;
17950 }
17951
17952 this.pause(); // the error is really just that at least one of the requests timed-out
17953 // set the bandwidth to a very low value and trigger an ABR switch to
17954 // take emergency action
17955
17956 if (error.code === REQUEST_ERRORS.TIMEOUT) {
17957 this.handleTimeout_();
17958 return;
17959 } // if control-flow has arrived here, then the error is real
17960 // emit an error event to blacklist the current playlist
17961
17962
17963 this.mediaRequestsErrored += 1;
17964 this.error(error);
17965 this.trigger('error');
17966 return;
17967 }
17968
17969 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
17970 // generated for ABR purposes
17971
17972 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
17973 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
17974
17975 if (result.gopInfo) {
17976 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
17977 } // Although we may have already started appending on progress, we shouldn't switch the
17978 // state away from loading until we are officially done loading the segment data.
17979
17980
17981 this.state = 'APPENDING'; // used for testing
17982
17983 this.trigger('appending');
17984 this.waitForAppendsToComplete_(segmentInfo);
17985 };
17986
17987 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
17988 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
17989
17990 if (timelineMapping !== null) {
17991 this.timeMapping_ = timelineMapping;
17992 }
17993 };
17994
17995 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
17996 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
17997 this.mediaSecondsLoaded += segment.end - segment.start;
17998 } else {
17999 this.mediaSecondsLoaded += segment.duration;
18000 }
18001 };
18002
18003 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
18004 if (timestampOffset === null) {
18005 return false;
18006 } // note that we're potentially using the same timestamp offset for both video and
18007 // audio
18008
18009
18010 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18011 return true;
18012 }
18013
18014 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18015 return true;
18016 }
18017
18018 return false;
18019 };
18020
18021 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
18022 var currentStart = _ref9.currentStart,
18023 playlist = _ref9.playlist,
18024 mediaIndex = _ref9.mediaIndex,
18025 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
18026 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
18027 useVideoTimingInfo = _ref9.useVideoTimingInfo,
18028 videoTimingInfo = _ref9.videoTimingInfo,
18029 audioTimingInfo = _ref9.audioTimingInfo;
18030
18031 if (typeof currentStart !== 'undefined') {
18032 // if start was set once, keep using it
18033 return currentStart;
18034 }
18035
18036 if (!useVideoTimingInfo) {
18037 return audioTimingInfo.start;
18038 }
18039
18040 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
18041 // within that segment. Since the transmuxer maintains a cache of incomplete data
18042 // from and/or the last frame seen, the start time may reflect a frame that starts
18043 // in the previous segment. Check for that case and ensure the start time is
18044 // accurate for the segment.
18045
18046 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
18047 return firstVideoFrameTimeForData;
18048 }
18049
18050 return videoTimingInfo.start;
18051 };
18052
18053 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
18054 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
18055
18056 if (!trackInfo) {
18057 this.error({
18058 message: 'No starting media returned, likely due to an unsupported media format.',
18059 blacklistDuration: Infinity
18060 });
18061 this.trigger('error');
18062 return;
18063 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
18064 // on each queue this loader is responsible for to ensure that the appends are
18065 // complete.
18066
18067
18068 var hasAudio = trackInfo.hasAudio,
18069 hasVideo = trackInfo.hasVideo,
18070 isMuxed = trackInfo.isMuxed;
18071 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
18072 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
18073 segmentInfo.waitingOnAppends = 0; // segments with no data
18074
18075 if (!segmentInfo.hasAppendedData_) {
18076 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
18077 // When there's no audio or video data in the segment, there's no audio or video
18078 // timing information.
18079 //
18080 // If there's no audio or video timing information, then the timestamp offset
18081 // can't be adjusted to the appropriate value for the transmuxer and source
18082 // buffers.
18083 //
18084 // Therefore, the next segment should be used to set the timestamp offset.
18085 this.isPendingTimestampOffset_ = true;
18086 } // override settings for metadata only segments
18087
18088
18089 segmentInfo.timingInfo = {
18090 start: 0
18091 };
18092 segmentInfo.waitingOnAppends++;
18093
18094 if (!this.isPendingTimestampOffset_) {
18095 // update the timestampoffset
18096 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
18097 // no video/audio data.
18098
18099 this.processMetadataQueue_();
18100 } // append is "done" instantly with no data.
18101
18102
18103 this.checkAppendsDone_(segmentInfo);
18104 return;
18105 } // Since source updater could call back synchronously, do the increments first.
18106
18107
18108 if (waitForVideo) {
18109 segmentInfo.waitingOnAppends++;
18110 }
18111
18112 if (waitForAudio) {
18113 segmentInfo.waitingOnAppends++;
18114 }
18115
18116 if (waitForVideo) {
18117 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
18118 }
18119
18120 if (waitForAudio) {
18121 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
18122 }
18123 };
18124
18125 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
18126 if (this.checkForAbort_(segmentInfo.requestId)) {
18127 return;
18128 }
18129
18130 segmentInfo.waitingOnAppends--;
18131
18132 if (segmentInfo.waitingOnAppends === 0) {
18133 this.handleAppendsDone_();
18134 }
18135 };
18136
18137 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
18138 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
18139
18140 if (illegalMediaSwitchError) {
18141 this.error({
18142 message: illegalMediaSwitchError,
18143 blacklistDuration: Infinity
18144 });
18145 this.trigger('error');
18146 return true;
18147 }
18148
18149 return false;
18150 };
18151
18152 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
18153 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
18154 // priority, timing-wise, so we must wait
18155 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
18156 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
18157 this.loaderType_ !== 'main') {
18158 return;
18159 }
18160
18161 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
18162 // the timing info here comes from video. In the event that the audio is longer than
18163 // the video, this will trim the start of the audio.
18164 // This also trims any offset from 0 at the beginning of the media
18165
18166 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
18167 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
18168 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
18169 timingInfo: segmentInfo.timingInfo
18170 }); // In the event that there are part segment downloads, each will try to update the
18171 // timestamp offset. Retaining this bit of state prevents us from updating in the
18172 // future (within the same segment), however, there may be a better way to handle it.
18173
18174 segmentInfo.changedTimestampOffset = true;
18175
18176 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18177 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
18178 didChange = true;
18179 }
18180
18181 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18182 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
18183 didChange = true;
18184 }
18185
18186 if (didChange) {
18187 this.trigger('timestampoffset');
18188 }
18189 };
18190
18191 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
18192 var videoTimingInfo = _ref10.videoTimingInfo,
18193 audioTimingInfo = _ref10.audioTimingInfo,
18194 timingInfo = _ref10.timingInfo;
18195
18196 if (!this.useDtsForTimestampOffset_) {
18197 return timingInfo.start;
18198 }
18199
18200 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
18201 return videoTimingInfo.transmuxedDecodeStart;
18202 } // handle audio only
18203
18204
18205 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
18206 return audioTimingInfo.transmuxedDecodeStart;
18207 } // handle content not transmuxed (e.g., MP4)
18208
18209
18210 return timingInfo.start;
18211 };
18212
18213 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
18214 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
18215 var trackInfo = this.getMediaInfo_();
18216 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
18217 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
18218
18219 if (!prioritizedTimingInfo) {
18220 return;
18221 }
18222
18223 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
18224 // current example is the case of fmp4), so use the rough duration to calculate an
18225 // end time.
18226 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
18227 }
18228 /**
18229 * callback to run when appendBuffer is finished. detects if we are
18230 * in a good state to do things with the data we got, or if we need
18231 * to wait for more
18232 *
18233 * @private
18234 */
18235 ;
18236
18237 _proto.handleAppendsDone_ = function handleAppendsDone_() {
18238 // appendsdone can cause an abort
18239 if (this.pendingSegment_) {
18240 this.trigger('appendsdone');
18241 }
18242
18243 if (!this.pendingSegment_) {
18244 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
18245 // all appending cases?
18246
18247 if (!this.paused()) {
18248 this.monitorBuffer_();
18249 }
18250
18251 return;
18252 }
18253
18254 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
18255 // best to wait until all appends are done so we're sure that the primary media is
18256 // finished (and we have its end time).
18257
18258 this.updateTimingInfoEnd_(segmentInfo);
18259
18260 if (this.shouldSaveSegmentTimingInfo_) {
18261 // Timeline mappings should only be saved for the main loader. This is for multiple
18262 // reasons:
18263 //
18264 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
18265 // and the main loader try to save the timeline mapping, whichever comes later
18266 // will overwrite the first. In theory this is OK, as the mappings should be the
18267 // same, however, it breaks for (2)
18268 // 2) In the event of a live stream, the initial live point will make for a somewhat
18269 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
18270 // the mapping will be off for one of the streams, dependent on which one was
18271 // first saved (see (1)).
18272 // 3) Primary timing goes by video in VHS, so the mapping should be video.
18273 //
18274 // Since the audio loader will wait for the main loader to load the first segment,
18275 // the main loader will save the first timeline mapping, and ensure that there won't
18276 // be a case where audio loads two segments without saving a mapping (thus leading
18277 // to missing segment timing info).
18278 this.syncController_.saveSegmentTimingInfo({
18279 segmentInfo: segmentInfo,
18280 shouldSaveTimelineMapping: this.loaderType_ === 'main'
18281 });
18282 }
18283
18284 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
18285
18286 if (segmentDurationMessage) {
18287 if (segmentDurationMessage.severity === 'warn') {
18288 videojs.log.warn(segmentDurationMessage.message);
18289 } else {
18290 this.logger_(segmentDurationMessage.message);
18291 }
18292 }
18293
18294 this.recordThroughput_(segmentInfo);
18295 this.pendingSegment_ = null;
18296 this.state = 'READY';
18297
18298 if (segmentInfo.isSyncRequest) {
18299 this.trigger('syncinfoupdate'); // if the sync request was not appended
18300 // then it was not the correct segment.
18301 // throw it away and use the data it gave us
18302 // to get the correct one.
18303
18304 if (!segmentInfo.hasAppendedData_) {
18305 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
18306 return;
18307 }
18308 }
18309
18310 this.logger_("Appended " + segmentInfoString(segmentInfo));
18311 this.addSegmentMetadataCue_(segmentInfo);
18312 this.fetchAtBuffer_ = true;
18313
18314 if (this.currentTimeline_ !== segmentInfo.timeline) {
18315 this.timelineChangeController_.lastTimelineChange({
18316 type: this.loaderType_,
18317 from: this.currentTimeline_,
18318 to: segmentInfo.timeline
18319 }); // If audio is not disabled, the main segment loader is responsible for updating
18320 // the audio timeline as well. If the content is video only, this won't have any
18321 // impact.
18322
18323 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
18324 this.timelineChangeController_.lastTimelineChange({
18325 type: 'audio',
18326 from: this.currentTimeline_,
18327 to: segmentInfo.timeline
18328 });
18329 }
18330 }
18331
18332 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
18333 // the following conditional otherwise it may consider this a bad "guess"
18334 // and attempt to resync when the post-update seekable window and live
18335 // point would mean that this was the perfect segment to fetch
18336
18337 this.trigger('syncinfoupdate');
18338 var segment = segmentInfo.segment;
18339 var part = segmentInfo.part;
18340 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
18341 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
18342 // the currentTime_ that means that our conservative guess was too conservative.
18343 // In that case, reset the loader state so that we try to use any information gained
18344 // from the previous request to create a new, more accurate, sync-point.
18345
18346 if (badSegmentGuess || badPartGuess) {
18347 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
18348 this.resetEverything();
18349 return;
18350 }
18351
18352 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
18353 // and conservatively guess
18354
18355 if (isWalkingForward) {
18356 this.trigger('bandwidthupdate');
18357 }
18358
18359 this.trigger('progress');
18360 this.mediaIndex = segmentInfo.mediaIndex;
18361 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
18362 // buffer, end the stream. this ensures the "ended" event will
18363 // fire if playback reaches that point.
18364
18365 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
18366 this.endOfStream();
18367 } // used for testing
18368
18369
18370 this.trigger('appended');
18371
18372 if (segmentInfo.hasAppendedData_) {
18373 this.mediaAppends++;
18374 }
18375
18376 if (!this.paused()) {
18377 this.monitorBuffer_();
18378 }
18379 }
18380 /**
18381 * Records the current throughput of the decrypt, transmux, and append
18382 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
18383 * moving average of the throughput. `throughput.count` is the number of
18384 * data points in the average.
18385 *
18386 * @private
18387 * @param {Object} segmentInfo the object returned by loadSegment
18388 */
18389 ;
18390
18391 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
18392 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
18393 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
18394 return;
18395 }
18396
18397 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
18398 // by zero in the case where the throughput is ridiculously high
18399
18400 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
18401
18402 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
18403 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
18404
18405 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
18406 }
18407 /**
18408 * Adds a cue to the segment-metadata track with some metadata information about the
18409 * segment
18410 *
18411 * @private
18412 * @param {Object} segmentInfo
18413 * the object returned by loadSegment
18414 * @method addSegmentMetadataCue_
18415 */
18416 ;
18417
18418 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
18419 if (!this.segmentMetadataTrack_) {
18420 return;
18421 }
18422
18423 var segment = segmentInfo.segment;
18424 var start = segment.start;
18425 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
18426
18427 if (!finite(start) || !finite(end)) {
18428 return;
18429 }
18430
18431 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
18432 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
18433 var value = {
18434 custom: segment.custom,
18435 dateTimeObject: segment.dateTimeObject,
18436 dateTimeString: segment.dateTimeString,
18437 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
18438 resolution: segmentInfo.playlist.attributes.RESOLUTION,
18439 codecs: segmentInfo.playlist.attributes.CODECS,
18440 byteLength: segmentInfo.byteLength,
18441 uri: segmentInfo.uri,
18442 timeline: segmentInfo.timeline,
18443 playlist: segmentInfo.playlist.id,
18444 start: start,
18445 end: end
18446 };
18447 var data = JSON.stringify(value);
18448 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
18449 // the differences of WebKitDataCue in safari and VTTCue in other browsers
18450
18451 cue.value = value;
18452 this.segmentMetadataTrack_.addCue(cue);
18453 };
18454
18455 return SegmentLoader;
18456}(videojs.EventTarget);
18457
18458function noop() {}
18459
18460var toTitleCase = function toTitleCase(string) {
18461 if (typeof string !== 'string') {
18462 return string;
18463 }
18464
18465 return string.replace(/./, function (w) {
18466 return w.toUpperCase();
18467 });
18468};
18469
18470var bufferTypes = ['video', 'audio'];
18471
18472var _updating = function updating(type, sourceUpdater) {
18473 var sourceBuffer = sourceUpdater[type + "Buffer"];
18474 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
18475};
18476
18477var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
18478 for (var i = 0; i < queue.length; i++) {
18479 var queueEntry = queue[i];
18480
18481 if (queueEntry.type === 'mediaSource') {
18482 // If the next entry is a media source entry (uses multiple source buffers), block
18483 // processing to allow it to go through first.
18484 return null;
18485 }
18486
18487 if (queueEntry.type === type) {
18488 return i;
18489 }
18490 }
18491
18492 return null;
18493};
18494
18495var shiftQueue = function shiftQueue(type, sourceUpdater) {
18496 if (sourceUpdater.queue.length === 0) {
18497 return;
18498 }
18499
18500 var queueIndex = 0;
18501 var queueEntry = sourceUpdater.queue[queueIndex];
18502
18503 if (queueEntry.type === 'mediaSource') {
18504 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
18505 sourceUpdater.queue.shift();
18506 queueEntry.action(sourceUpdater);
18507
18508 if (queueEntry.doneFn) {
18509 queueEntry.doneFn();
18510 } // Only specific source buffer actions must wait for async updateend events. Media
18511 // Source actions process synchronously. Therefore, both audio and video source
18512 // buffers are now clear to process the next queue entries.
18513
18514
18515 shiftQueue('audio', sourceUpdater);
18516 shiftQueue('video', sourceUpdater);
18517 } // Media Source actions require both source buffers, so if the media source action
18518 // couldn't process yet (because one or both source buffers are busy), block other
18519 // queue actions until both are available and the media source action can process.
18520
18521
18522 return;
18523 }
18524
18525 if (type === 'mediaSource') {
18526 // If the queue was shifted by a media source action (this happens when pushing a
18527 // media source action onto the queue), then it wasn't from an updateend event from an
18528 // audio or video source buffer, so there's no change from previous state, and no
18529 // processing should be done.
18530 return;
18531 } // Media source queue entries don't need to consider whether the source updater is
18532 // started (i.e., source buffers are created) as they don't need the source buffers, but
18533 // source buffer queue entries do.
18534
18535
18536 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
18537 return;
18538 }
18539
18540 if (queueEntry.type !== type) {
18541 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
18542
18543 if (queueIndex === null) {
18544 // Either there's no queue entry that uses this source buffer type in the queue, or
18545 // there's a media source queue entry before the next entry of this type, in which
18546 // case wait for that action to process first.
18547 return;
18548 }
18549
18550 queueEntry = sourceUpdater.queue[queueIndex];
18551 }
18552
18553 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
18554 //
18555 // The queue pending operation must be set before the action is performed in the event
18556 // that the action results in a synchronous event that is acted upon. For instance, if
18557 // an exception is thrown that can be handled, it's possible that new actions will be
18558 // appended to an empty queue and immediately executed, but would not have the correct
18559 // pending information if this property was set after the action was performed.
18560
18561 sourceUpdater.queuePending[type] = queueEntry;
18562 queueEntry.action(type, sourceUpdater);
18563
18564 if (!queueEntry.doneFn) {
18565 // synchronous operation, process next entry
18566 sourceUpdater.queuePending[type] = null;
18567 shiftQueue(type, sourceUpdater);
18568 return;
18569 }
18570};
18571
18572var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
18573 var buffer = sourceUpdater[type + "Buffer"];
18574 var titleType = toTitleCase(type);
18575
18576 if (!buffer) {
18577 return;
18578 }
18579
18580 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18581 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18582 sourceUpdater.codecs[type] = null;
18583 sourceUpdater[type + "Buffer"] = null;
18584};
18585
18586var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
18587 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
18588};
18589
18590var actions = {
18591 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
18592 return function (type, sourceUpdater) {
18593 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18594 // or the media source does not contain this source buffer.
18595
18596 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18597 return;
18598 }
18599
18600 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
18601
18602 try {
18603 sourceBuffer.appendBuffer(bytes);
18604 } catch (e) {
18605 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
18606 sourceUpdater.queuePending[type] = null;
18607 onError(e);
18608 }
18609 };
18610 },
18611 remove: function remove(start, end) {
18612 return function (type, sourceUpdater) {
18613 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18614 // or the media source does not contain this source buffer.
18615
18616 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18617 return;
18618 }
18619
18620 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
18621
18622 try {
18623 sourceBuffer.remove(start, end);
18624 } catch (e) {
18625 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
18626 }
18627 };
18628 },
18629 timestampOffset: function timestampOffset(offset) {
18630 return function (type, sourceUpdater) {
18631 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18632 // or the media source does not contain this source buffer.
18633
18634 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18635 return;
18636 }
18637
18638 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
18639 sourceBuffer.timestampOffset = offset;
18640 };
18641 },
18642 callback: function callback(_callback) {
18643 return function (type, sourceUpdater) {
18644 _callback();
18645 };
18646 },
18647 endOfStream: function endOfStream(error) {
18648 return function (sourceUpdater) {
18649 if (sourceUpdater.mediaSource.readyState !== 'open') {
18650 return;
18651 }
18652
18653 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
18654
18655 try {
18656 sourceUpdater.mediaSource.endOfStream(error);
18657 } catch (e) {
18658 videojs.log.warn('Failed to call media source endOfStream', e);
18659 }
18660 };
18661 },
18662 duration: function duration(_duration) {
18663 return function (sourceUpdater) {
18664 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
18665
18666 try {
18667 sourceUpdater.mediaSource.duration = _duration;
18668 } catch (e) {
18669 videojs.log.warn('Failed to set media source duration', e);
18670 }
18671 };
18672 },
18673 abort: function abort() {
18674 return function (type, sourceUpdater) {
18675 if (sourceUpdater.mediaSource.readyState !== 'open') {
18676 return;
18677 }
18678
18679 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18680 // or the media source does not contain this source buffer.
18681
18682 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18683 return;
18684 }
18685
18686 sourceUpdater.logger_("calling abort on " + type + "Buffer");
18687
18688 try {
18689 sourceBuffer.abort();
18690 } catch (e) {
18691 videojs.log.warn("Failed to abort on " + type + "Buffer", e);
18692 }
18693 };
18694 },
18695 addSourceBuffer: function addSourceBuffer(type, codec) {
18696 return function (sourceUpdater) {
18697 var titleType = toTitleCase(type);
18698 var mime = getMimeForCodec(codec);
18699 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
18700 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
18701 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18702 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18703 sourceUpdater.codecs[type] = codec;
18704 sourceUpdater[type + "Buffer"] = sourceBuffer;
18705 };
18706 },
18707 removeSourceBuffer: function removeSourceBuffer(type) {
18708 return function (sourceUpdater) {
18709 var sourceBuffer = sourceUpdater[type + "Buffer"];
18710 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
18711 // or the media source does not contain this source buffer.
18712
18713 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18714 return;
18715 }
18716
18717 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
18718
18719 try {
18720 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
18721 } catch (e) {
18722 videojs.log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
18723 }
18724 };
18725 },
18726 changeType: function changeType(codec) {
18727 return function (type, sourceUpdater) {
18728 var sourceBuffer = sourceUpdater[type + "Buffer"];
18729 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
18730 // or the media source does not contain this source buffer.
18731
18732 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18733 return;
18734 } // do not update codec if we don't need to.
18735
18736
18737 if (sourceUpdater.codecs[type] === codec) {
18738 return;
18739 }
18740
18741 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
18742 sourceBuffer.changeType(mime);
18743 sourceUpdater.codecs[type] = codec;
18744 };
18745 }
18746};
18747
18748var pushQueue = function pushQueue(_ref) {
18749 var type = _ref.type,
18750 sourceUpdater = _ref.sourceUpdater,
18751 action = _ref.action,
18752 doneFn = _ref.doneFn,
18753 name = _ref.name;
18754 sourceUpdater.queue.push({
18755 type: type,
18756 action: action,
18757 doneFn: doneFn,
18758 name: name
18759 });
18760 shiftQueue(type, sourceUpdater);
18761};
18762
18763var onUpdateend = function onUpdateend(type, sourceUpdater) {
18764 return function (e) {
18765 // Although there should, in theory, be a pending action for any updateend receieved,
18766 // there are some actions that may trigger updateend events without set definitions in
18767 // the w3c spec. For instance, setting the duration on the media source may trigger
18768 // updateend events on source buffers. This does not appear to be in the spec. As such,
18769 // if we encounter an updateend without a corresponding pending action from our queue
18770 // for that source buffer type, process the next action.
18771 if (sourceUpdater.queuePending[type]) {
18772 var doneFn = sourceUpdater.queuePending[type].doneFn;
18773 sourceUpdater.queuePending[type] = null;
18774
18775 if (doneFn) {
18776 // if there's an error, report it
18777 doneFn(sourceUpdater[type + "Error_"]);
18778 }
18779 }
18780
18781 shiftQueue(type, sourceUpdater);
18782 };
18783};
18784/**
18785 * A queue of callbacks to be serialized and applied when a
18786 * MediaSource and its associated SourceBuffers are not in the
18787 * updating state. It is used by the segment loader to update the
18788 * underlying SourceBuffers when new data is loaded, for instance.
18789 *
18790 * @class SourceUpdater
18791 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
18792 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
18793 */
18794
18795
18796var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
18797 _inheritsLoose(SourceUpdater, _videojs$EventTarget);
18798
18799 function SourceUpdater(mediaSource) {
18800 var _this;
18801
18802 _this = _videojs$EventTarget.call(this) || this;
18803 _this.mediaSource = mediaSource;
18804
18805 _this.sourceopenListener_ = function () {
18806 return shiftQueue('mediaSource', _assertThisInitialized(_this));
18807 };
18808
18809 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
18810
18811 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
18812
18813 _this.audioTimestampOffset_ = 0;
18814 _this.videoTimestampOffset_ = 0;
18815 _this.queue = [];
18816 _this.queuePending = {
18817 audio: null,
18818 video: null
18819 };
18820 _this.delayedAudioAppendQueue_ = [];
18821 _this.videoAppendQueued_ = false;
18822 _this.codecs = {};
18823 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized(_this));
18824 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized(_this));
18825
18826 _this.onVideoError_ = function (e) {
18827 // used for debugging
18828 _this.videoError_ = e;
18829 };
18830
18831 _this.onAudioError_ = function (e) {
18832 // used for debugging
18833 _this.audioError_ = e;
18834 };
18835
18836 _this.createdSourceBuffers_ = false;
18837 _this.initializedEme_ = false;
18838 _this.triggeredReady_ = false;
18839 return _this;
18840 }
18841
18842 var _proto = SourceUpdater.prototype;
18843
18844 _proto.initializedEme = function initializedEme() {
18845 this.initializedEme_ = true;
18846 this.triggerReady();
18847 };
18848
18849 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
18850 // if false, likely waiting on one of the segment loaders to get enough data to create
18851 // source buffers
18852 return this.createdSourceBuffers_;
18853 };
18854
18855 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
18856 return this.initializedEme_;
18857 };
18858
18859 _proto.ready = function ready() {
18860 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
18861 };
18862
18863 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
18864 if (this.hasCreatedSourceBuffers()) {
18865 // already created them before
18866 return;
18867 } // the intial addOrChangeSourceBuffers will always be
18868 // two add buffers.
18869
18870
18871 this.addOrChangeSourceBuffers(codecs);
18872 this.createdSourceBuffers_ = true;
18873 this.trigger('createdsourcebuffers');
18874 this.triggerReady();
18875 };
18876
18877 _proto.triggerReady = function triggerReady() {
18878 // only allow ready to be triggered once, this prevents the case
18879 // where:
18880 // 1. we trigger createdsourcebuffers
18881 // 2. ie 11 synchronously initializates eme
18882 // 3. the synchronous initialization causes us to trigger ready
18883 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
18884 if (this.ready() && !this.triggeredReady_) {
18885 this.triggeredReady_ = true;
18886 this.trigger('ready');
18887 }
18888 }
18889 /**
18890 * Add a type of source buffer to the media source.
18891 *
18892 * @param {string} type
18893 * The type of source buffer to add.
18894 *
18895 * @param {string} codec
18896 * The codec to add the source buffer with.
18897 */
18898 ;
18899
18900 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
18901 pushQueue({
18902 type: 'mediaSource',
18903 sourceUpdater: this,
18904 action: actions.addSourceBuffer(type, codec),
18905 name: 'addSourceBuffer'
18906 });
18907 }
18908 /**
18909 * call abort on a source buffer.
18910 *
18911 * @param {string} type
18912 * The type of source buffer to call abort on.
18913 */
18914 ;
18915
18916 _proto.abort = function abort(type) {
18917 pushQueue({
18918 type: type,
18919 sourceUpdater: this,
18920 action: actions.abort(type),
18921 name: 'abort'
18922 });
18923 }
18924 /**
18925 * Call removeSourceBuffer and remove a specific type
18926 * of source buffer on the mediaSource.
18927 *
18928 * @param {string} type
18929 * The type of source buffer to remove.
18930 */
18931 ;
18932
18933 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
18934 if (!this.canRemoveSourceBuffer()) {
18935 videojs.log.error('removeSourceBuffer is not supported!');
18936 return;
18937 }
18938
18939 pushQueue({
18940 type: 'mediaSource',
18941 sourceUpdater: this,
18942 action: actions.removeSourceBuffer(type),
18943 name: 'removeSourceBuffer'
18944 });
18945 }
18946 /**
18947 * Whether or not the removeSourceBuffer function is supported
18948 * on the mediaSource.
18949 *
18950 * @return {boolean}
18951 * if removeSourceBuffer can be called.
18952 */
18953 ;
18954
18955 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
18956 // IE reports that it supports removeSourceBuffer, but often throws
18957 // errors when attempting to use the function. So we report that it
18958 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
18959 // throws errors, so we report that it does not support this as well.
18960 return !videojs.browser.IE_VERSION && !videojs.browser.IS_FIREFOX && window$1.MediaSource && window$1.MediaSource.prototype && typeof window$1.MediaSource.prototype.removeSourceBuffer === 'function';
18961 }
18962 /**
18963 * Whether or not the changeType function is supported
18964 * on our SourceBuffers.
18965 *
18966 * @return {boolean}
18967 * if changeType can be called.
18968 */
18969 ;
18970
18971 SourceUpdater.canChangeType = function canChangeType() {
18972 return window$1.SourceBuffer && window$1.SourceBuffer.prototype && typeof window$1.SourceBuffer.prototype.changeType === 'function';
18973 }
18974 /**
18975 * Whether or not the changeType function is supported
18976 * on our SourceBuffers.
18977 *
18978 * @return {boolean}
18979 * if changeType can be called.
18980 */
18981 ;
18982
18983 _proto.canChangeType = function canChangeType() {
18984 return this.constructor.canChangeType();
18985 }
18986 /**
18987 * Call the changeType function on a source buffer, given the code and type.
18988 *
18989 * @param {string} type
18990 * The type of source buffer to call changeType on.
18991 *
18992 * @param {string} codec
18993 * The codec string to change type with on the source buffer.
18994 */
18995 ;
18996
18997 _proto.changeType = function changeType(type, codec) {
18998 if (!this.canChangeType()) {
18999 videojs.log.error('changeType is not supported!');
19000 return;
19001 }
19002
19003 pushQueue({
19004 type: type,
19005 sourceUpdater: this,
19006 action: actions.changeType(codec),
19007 name: 'changeType'
19008 });
19009 }
19010 /**
19011 * Add source buffers with a codec or, if they are already created,
19012 * call changeType on source buffers using changeType.
19013 *
19014 * @param {Object} codecs
19015 * Codecs to switch to
19016 */
19017 ;
19018
19019 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
19020 var _this2 = this;
19021
19022 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
19023 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
19024 }
19025
19026 Object.keys(codecs).forEach(function (type) {
19027 var codec = codecs[type];
19028
19029 if (!_this2.hasCreatedSourceBuffers()) {
19030 return _this2.addSourceBuffer(type, codec);
19031 }
19032
19033 if (_this2.canChangeType()) {
19034 _this2.changeType(type, codec);
19035 }
19036 });
19037 }
19038 /**
19039 * Queue an update to append an ArrayBuffer.
19040 *
19041 * @param {MediaObject} object containing audioBytes and/or videoBytes
19042 * @param {Function} done the function to call when done
19043 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
19044 */
19045 ;
19046
19047 _proto.appendBuffer = function appendBuffer(options, doneFn) {
19048 var _this3 = this;
19049
19050 var segmentInfo = options.segmentInfo,
19051 type = options.type,
19052 bytes = options.bytes;
19053 this.processedAppend_ = true;
19054
19055 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
19056 this.delayedAudioAppendQueue_.push([options, doneFn]);
19057 this.logger_("delayed audio append of " + bytes.length + " until video append");
19058 return;
19059 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
19060 // not be fired. This means that the queue will be blocked until the next action
19061 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
19062 // these errors by calling the doneFn with the specific error.
19063
19064
19065 var onError = doneFn;
19066 pushQueue({
19067 type: type,
19068 sourceUpdater: this,
19069 action: actions.appendBuffer(bytes, segmentInfo || {
19070 mediaIndex: -1
19071 }, onError),
19072 doneFn: doneFn,
19073 name: 'appendBuffer'
19074 });
19075
19076 if (type === 'video') {
19077 this.videoAppendQueued_ = true;
19078
19079 if (!this.delayedAudioAppendQueue_.length) {
19080 return;
19081 }
19082
19083 var queue = this.delayedAudioAppendQueue_.slice();
19084 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
19085 this.delayedAudioAppendQueue_.length = 0;
19086 queue.forEach(function (que) {
19087 _this3.appendBuffer.apply(_this3, que);
19088 });
19089 }
19090 }
19091 /**
19092 * Get the audio buffer's buffered timerange.
19093 *
19094 * @return {TimeRange}
19095 * The audio buffer's buffered time range
19096 */
19097 ;
19098
19099 _proto.audioBuffered = function audioBuffered() {
19100 // no media source/source buffer or it isn't in the media sources
19101 // source buffer list
19102 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
19103 return videojs.createTimeRange();
19104 }
19105
19106 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs.createTimeRange();
19107 }
19108 /**
19109 * Get the video buffer's buffered timerange.
19110 *
19111 * @return {TimeRange}
19112 * The video buffer's buffered time range
19113 */
19114 ;
19115
19116 _proto.videoBuffered = function videoBuffered() {
19117 // no media source/source buffer or it isn't in the media sources
19118 // source buffer list
19119 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
19120 return videojs.createTimeRange();
19121 }
19122
19123 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs.createTimeRange();
19124 }
19125 /**
19126 * Get a combined video/audio buffer's buffered timerange.
19127 *
19128 * @return {TimeRange}
19129 * the combined time range
19130 */
19131 ;
19132
19133 _proto.buffered = function buffered() {
19134 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
19135 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
19136
19137 if (audio && !video) {
19138 return this.audioBuffered();
19139 }
19140
19141 if (video && !audio) {
19142 return this.videoBuffered();
19143 }
19144
19145 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
19146 }
19147 /**
19148 * Add a callback to the queue that will set duration on the mediaSource.
19149 *
19150 * @param {number} duration
19151 * The duration to set
19152 *
19153 * @param {Function} [doneFn]
19154 * function to run after duration has been set.
19155 */
19156 ;
19157
19158 _proto.setDuration = function setDuration(duration, doneFn) {
19159 if (doneFn === void 0) {
19160 doneFn = noop;
19161 }
19162
19163 // In order to set the duration on the media source, it's necessary to wait for all
19164 // source buffers to no longer be updating. "If the updating attribute equals true on
19165 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19166 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19167 pushQueue({
19168 type: 'mediaSource',
19169 sourceUpdater: this,
19170 action: actions.duration(duration),
19171 name: 'duration',
19172 doneFn: doneFn
19173 });
19174 }
19175 /**
19176 * Add a mediaSource endOfStream call to the queue
19177 *
19178 * @param {Error} [error]
19179 * Call endOfStream with an error
19180 *
19181 * @param {Function} [doneFn]
19182 * A function that should be called when the
19183 * endOfStream call has finished.
19184 */
19185 ;
19186
19187 _proto.endOfStream = function endOfStream(error, doneFn) {
19188 if (error === void 0) {
19189 error = null;
19190 }
19191
19192 if (doneFn === void 0) {
19193 doneFn = noop;
19194 }
19195
19196 if (typeof error !== 'string') {
19197 error = undefined;
19198 } // In order to set the duration on the media source, it's necessary to wait for all
19199 // source buffers to no longer be updating. "If the updating attribute equals true on
19200 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19201 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19202
19203
19204 pushQueue({
19205 type: 'mediaSource',
19206 sourceUpdater: this,
19207 action: actions.endOfStream(error),
19208 name: 'endOfStream',
19209 doneFn: doneFn
19210 });
19211 }
19212 /**
19213 * Queue an update to remove a time range from the buffer.
19214 *
19215 * @param {number} start where to start the removal
19216 * @param {number} end where to end the removal
19217 * @param {Function} [done=noop] optional callback to be executed when the remove
19218 * operation is complete
19219 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19220 */
19221 ;
19222
19223 _proto.removeAudio = function removeAudio(start, end, done) {
19224 if (done === void 0) {
19225 done = noop;
19226 }
19227
19228 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
19229 done();
19230 return;
19231 }
19232
19233 pushQueue({
19234 type: 'audio',
19235 sourceUpdater: this,
19236 action: actions.remove(start, end),
19237 doneFn: done,
19238 name: 'remove'
19239 });
19240 }
19241 /**
19242 * Queue an update to remove a time range from the buffer.
19243 *
19244 * @param {number} start where to start the removal
19245 * @param {number} end where to end the removal
19246 * @param {Function} [done=noop] optional callback to be executed when the remove
19247 * operation is complete
19248 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19249 */
19250 ;
19251
19252 _proto.removeVideo = function removeVideo(start, end, done) {
19253 if (done === void 0) {
19254 done = noop;
19255 }
19256
19257 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
19258 done();
19259 return;
19260 }
19261
19262 pushQueue({
19263 type: 'video',
19264 sourceUpdater: this,
19265 action: actions.remove(start, end),
19266 doneFn: done,
19267 name: 'remove'
19268 });
19269 }
19270 /**
19271 * Whether the underlying sourceBuffer is updating or not
19272 *
19273 * @return {boolean} the updating status of the SourceBuffer
19274 */
19275 ;
19276
19277 _proto.updating = function updating() {
19278 // the audio/video source buffer is updating
19279 if (_updating('audio', this) || _updating('video', this)) {
19280 return true;
19281 }
19282
19283 return false;
19284 }
19285 /**
19286 * Set/get the timestampoffset on the audio SourceBuffer
19287 *
19288 * @return {number} the timestamp offset
19289 */
19290 ;
19291
19292 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
19293 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
19294 this.audioTimestampOffset_ !== offset) {
19295 pushQueue({
19296 type: 'audio',
19297 sourceUpdater: this,
19298 action: actions.timestampOffset(offset),
19299 name: 'timestampOffset'
19300 });
19301 this.audioTimestampOffset_ = offset;
19302 }
19303
19304 return this.audioTimestampOffset_;
19305 }
19306 /**
19307 * Set/get the timestampoffset on the video SourceBuffer
19308 *
19309 * @return {number} the timestamp offset
19310 */
19311 ;
19312
19313 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
19314 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
19315 this.videoTimestampOffset !== offset) {
19316 pushQueue({
19317 type: 'video',
19318 sourceUpdater: this,
19319 action: actions.timestampOffset(offset),
19320 name: 'timestampOffset'
19321 });
19322 this.videoTimestampOffset_ = offset;
19323 }
19324
19325 return this.videoTimestampOffset_;
19326 }
19327 /**
19328 * Add a function to the queue that will be called
19329 * when it is its turn to run in the audio queue.
19330 *
19331 * @param {Function} callback
19332 * The callback to queue.
19333 */
19334 ;
19335
19336 _proto.audioQueueCallback = function audioQueueCallback(callback) {
19337 if (!this.audioBuffer) {
19338 return;
19339 }
19340
19341 pushQueue({
19342 type: 'audio',
19343 sourceUpdater: this,
19344 action: actions.callback(callback),
19345 name: 'callback'
19346 });
19347 }
19348 /**
19349 * Add a function to the queue that will be called
19350 * when it is its turn to run in the video queue.
19351 *
19352 * @param {Function} callback
19353 * The callback to queue.
19354 */
19355 ;
19356
19357 _proto.videoQueueCallback = function videoQueueCallback(callback) {
19358 if (!this.videoBuffer) {
19359 return;
19360 }
19361
19362 pushQueue({
19363 type: 'video',
19364 sourceUpdater: this,
19365 action: actions.callback(callback),
19366 name: 'callback'
19367 });
19368 }
19369 /**
19370 * dispose of the source updater and the underlying sourceBuffer
19371 */
19372 ;
19373
19374 _proto.dispose = function dispose() {
19375 var _this4 = this;
19376
19377 this.trigger('dispose');
19378 bufferTypes.forEach(function (type) {
19379 _this4.abort(type);
19380
19381 if (_this4.canRemoveSourceBuffer()) {
19382 _this4.removeSourceBuffer(type);
19383 } else {
19384 _this4[type + "QueueCallback"](function () {
19385 return cleanupBuffer(type, _this4);
19386 });
19387 }
19388 });
19389 this.videoAppendQueued_ = false;
19390 this.delayedAudioAppendQueue_.length = 0;
19391
19392 if (this.sourceopenListener_) {
19393 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
19394 }
19395
19396 this.off();
19397 };
19398
19399 return SourceUpdater;
19400}(videojs.EventTarget);
19401
19402var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
19403 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
19404};
19405
19406var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
19407 return char.charCodeAt(0);
19408}));
19409
19410var NoVttJsError = /*#__PURE__*/function (_Error) {
19411 _inheritsLoose(NoVttJsError, _Error);
19412
19413 function NoVttJsError() {
19414 return _Error.call(this, 'Trying to parse received VTT cues, but there is no WebVTT. Make sure vtt.js is loaded.') || this;
19415 }
19416
19417 return NoVttJsError;
19418}( /*#__PURE__*/_wrapNativeSuper(Error));
19419/**
19420 * An object that manages segment loading and appending.
19421 *
19422 * @class VTTSegmentLoader
19423 * @param {Object} options required and optional options
19424 * @extends videojs.EventTarget
19425 */
19426
19427
19428var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
19429 _inheritsLoose(VTTSegmentLoader, _SegmentLoader);
19430
19431 function VTTSegmentLoader(settings, options) {
19432 var _this;
19433
19434 if (options === void 0) {
19435 options = {};
19436 }
19437
19438 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
19439 // however, VTTSegmentLoader has no need of a media source, so delete the reference
19440
19441 _this.mediaSource_ = null;
19442 _this.subtitlesTrack_ = null;
19443 _this.loaderType_ = 'subtitle';
19444 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
19445 _this.loadVttJs = settings.loadVttJs; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
19446 // the sync controller leads to improper behavior.
19447
19448 _this.shouldSaveSegmentTimingInfo_ = false;
19449 return _this;
19450 }
19451
19452 var _proto = VTTSegmentLoader.prototype;
19453
19454 _proto.createTransmuxer_ = function createTransmuxer_() {
19455 // don't need to transmux any subtitles
19456 return null;
19457 }
19458 /**
19459 * Indicates which time ranges are buffered
19460 *
19461 * @return {TimeRange}
19462 * TimeRange object representing the current buffered ranges
19463 */
19464 ;
19465
19466 _proto.buffered_ = function buffered_() {
19467 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
19468 return videojs.createTimeRanges();
19469 }
19470
19471 var cues = this.subtitlesTrack_.cues;
19472 var start = cues[0].startTime;
19473 var end = cues[cues.length - 1].startTime;
19474 return videojs.createTimeRanges([[start, end]]);
19475 }
19476 /**
19477 * Gets and sets init segment for the provided map
19478 *
19479 * @param {Object} map
19480 * The map object representing the init segment to get or set
19481 * @param {boolean=} set
19482 * If true, the init segment for the provided map should be saved
19483 * @return {Object}
19484 * map object for desired init segment
19485 */
19486 ;
19487
19488 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
19489 if (set === void 0) {
19490 set = false;
19491 }
19492
19493 if (!map) {
19494 return null;
19495 }
19496
19497 var id = initSegmentId(map);
19498 var storedMap = this.initSegments_[id];
19499
19500 if (set && !storedMap && map.bytes) {
19501 // append WebVTT line terminators to the media initialization segment if it exists
19502 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
19503 // requires two or more WebVTT line terminators between the WebVTT header and the
19504 // rest of the file
19505 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
19506 var combinedSegment = new Uint8Array(combinedByteLength);
19507 combinedSegment.set(map.bytes);
19508 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
19509 this.initSegments_[id] = storedMap = {
19510 resolvedUri: map.resolvedUri,
19511 byterange: map.byterange,
19512 bytes: combinedSegment
19513 };
19514 }
19515
19516 return storedMap || map;
19517 }
19518 /**
19519 * Returns true if all configuration required for loading is present, otherwise false.
19520 *
19521 * @return {boolean} True if the all configuration is ready for loading
19522 * @private
19523 */
19524 ;
19525
19526 _proto.couldBeginLoading_ = function couldBeginLoading_() {
19527 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
19528 }
19529 /**
19530 * Once all the starting parameters have been specified, begin
19531 * operation. This method should only be invoked from the INIT
19532 * state.
19533 *
19534 * @private
19535 */
19536 ;
19537
19538 _proto.init_ = function init_() {
19539 this.state = 'READY';
19540 this.resetEverything();
19541 return this.monitorBuffer_();
19542 }
19543 /**
19544 * Set a subtitle track on the segment loader to add subtitles to
19545 *
19546 * @param {TextTrack=} track
19547 * The text track to add loaded subtitles to
19548 * @return {TextTrack}
19549 * Returns the subtitles track
19550 */
19551 ;
19552
19553 _proto.track = function track(_track) {
19554 if (typeof _track === 'undefined') {
19555 return this.subtitlesTrack_;
19556 }
19557
19558 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
19559 // buffering now
19560
19561 if (this.state === 'INIT' && this.couldBeginLoading_()) {
19562 this.init_();
19563 }
19564
19565 return this.subtitlesTrack_;
19566 }
19567 /**
19568 * Remove any data in the source buffer between start and end times
19569 *
19570 * @param {number} start - the start time of the region to remove from the buffer
19571 * @param {number} end - the end time of the region to remove from the buffer
19572 */
19573 ;
19574
19575 _proto.remove = function remove(start, end) {
19576 removeCuesFromTrack(start, end, this.subtitlesTrack_);
19577 }
19578 /**
19579 * fill the buffer with segements unless the sourceBuffers are
19580 * currently updating
19581 *
19582 * Note: this function should only ever be called by monitorBuffer_
19583 * and never directly
19584 *
19585 * @private
19586 */
19587 ;
19588
19589 _proto.fillBuffer_ = function fillBuffer_() {
19590 var _this2 = this;
19591
19592 // see if we need to begin loading immediately
19593 var segmentInfo = this.chooseNextRequest_();
19594
19595 if (!segmentInfo) {
19596 return;
19597 }
19598
19599 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
19600 // We don't have the timestamp offset that we need to sync subtitles.
19601 // Rerun on a timestamp offset or user interaction.
19602 var checkTimestampOffset = function checkTimestampOffset() {
19603 _this2.state = 'READY';
19604
19605 if (!_this2.paused()) {
19606 // if not paused, queue a buffer check as soon as possible
19607 _this2.monitorBuffer_();
19608 }
19609 };
19610
19611 this.syncController_.one('timestampoffset', checkTimestampOffset);
19612 this.state = 'WAITING_ON_TIMELINE';
19613 return;
19614 }
19615
19616 this.loadSegment_(segmentInfo);
19617 } // never set a timestamp offset for vtt segments.
19618 ;
19619
19620 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
19621 return null;
19622 };
19623
19624 _proto.chooseNextRequest_ = function chooseNextRequest_() {
19625 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
19626 }
19627 /**
19628 * Prevents the segment loader from requesting segments we know contain no subtitles
19629 * by walking forward until we find the next segment that we don't know whether it is
19630 * empty or not.
19631 *
19632 * @param {Object} segmentInfo
19633 * a segment info object that describes the current segment
19634 * @return {Object}
19635 * a segment info object that describes the current segment
19636 */
19637 ;
19638
19639 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
19640 while (segmentInfo && segmentInfo.segment.empty) {
19641 // stop at the last possible segmentInfo
19642 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
19643 segmentInfo = null;
19644 break;
19645 }
19646
19647 segmentInfo = this.generateSegmentInfo_({
19648 playlist: segmentInfo.playlist,
19649 mediaIndex: segmentInfo.mediaIndex + 1,
19650 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
19651 isSyncRequest: segmentInfo.isSyncRequest
19652 });
19653 }
19654
19655 return segmentInfo;
19656 };
19657
19658 _proto.stopForError = function stopForError(error) {
19659 this.error(error);
19660 this.state = 'READY';
19661 this.pause();
19662 this.trigger('error');
19663 }
19664 /**
19665 * append a decrypted segement to the SourceBuffer through a SourceUpdater
19666 *
19667 * @private
19668 */
19669 ;
19670
19671 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
19672 var _this3 = this;
19673
19674 if (!this.subtitlesTrack_) {
19675 this.state = 'READY';
19676 return;
19677 }
19678
19679 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
19680
19681 if (!this.pendingSegment_) {
19682 this.state = 'READY';
19683 this.mediaRequestsAborted += 1;
19684 return;
19685 }
19686
19687 if (error) {
19688 if (error.code === REQUEST_ERRORS.TIMEOUT) {
19689 this.handleTimeout_();
19690 }
19691
19692 if (error.code === REQUEST_ERRORS.ABORTED) {
19693 this.mediaRequestsAborted += 1;
19694 } else {
19695 this.mediaRequestsErrored += 1;
19696 }
19697
19698 this.stopForError(error);
19699 return;
19700 }
19701
19702 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
19703 // maintain functionality between segment loaders
19704
19705 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats); // if this request included a segment key, save that data in the cache
19706
19707 if (simpleSegment.key) {
19708 this.segmentKey(simpleSegment.key, true);
19709 }
19710
19711 this.state = 'APPENDING'; // used for tests
19712
19713 this.trigger('appending');
19714 var segment = segmentInfo.segment;
19715
19716 if (segment.map) {
19717 segment.map.bytes = simpleSegment.map.bytes;
19718 }
19719
19720 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, load it and wait till it finished loading
19721
19722 if (typeof window$1.WebVTT !== 'function' && typeof this.loadVttJs === 'function') {
19723 this.state = 'WAITING_ON_VTTJS'; // should be fine to call multiple times
19724 // script will be loaded once but multiple listeners will be added to the queue, which is expected.
19725
19726 this.loadVttJs().then(function () {
19727 return _this3.segmentRequestFinished_(error, simpleSegment, result);
19728 }, function () {
19729 return _this3.stopForError({
19730 message: 'Error loading vtt.js'
19731 });
19732 });
19733 return;
19734 }
19735
19736 segment.requested = true;
19737
19738 try {
19739 this.parseVTTCues_(segmentInfo);
19740 } catch (e) {
19741 this.stopForError({
19742 message: e.message
19743 });
19744 return;
19745 }
19746
19747 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
19748
19749 if (segmentInfo.cues.length) {
19750 segmentInfo.timingInfo = {
19751 start: segmentInfo.cues[0].startTime,
19752 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
19753 };
19754 } else {
19755 segmentInfo.timingInfo = {
19756 start: segmentInfo.startOfSegment,
19757 end: segmentInfo.startOfSegment + segmentInfo.duration
19758 };
19759 }
19760
19761 if (segmentInfo.isSyncRequest) {
19762 this.trigger('syncinfoupdate');
19763 this.pendingSegment_ = null;
19764 this.state = 'READY';
19765 return;
19766 }
19767
19768 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
19769 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
19770 // the subtitle track
19771
19772 segmentInfo.cues.forEach(function (cue) {
19773 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window$1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
19774 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
19775 // cues to have identical time-intervals, but if the text is also identical
19776 // we can safely assume it is a duplicate that can be removed (ex. when a cue
19777 // "overlaps" VTT segments)
19778
19779 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
19780 this.handleAppendsDone_();
19781 };
19782
19783 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
19784 // that we do not support here.
19785 };
19786
19787 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
19788 }
19789 /**
19790 * Uses the WebVTT parser to parse the segment response
19791 *
19792 * @throws NoVttJsError
19793 *
19794 * @param {Object} segmentInfo
19795 * a segment info object that describes the current segment
19796 * @private
19797 */
19798 ;
19799
19800 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
19801 var decoder;
19802 var decodeBytesToString = false;
19803
19804 if (typeof window$1.WebVTT !== 'function') {
19805 // caller is responsible for exception handling.
19806 throw new NoVttJsError();
19807 }
19808
19809 if (typeof window$1.TextDecoder === 'function') {
19810 decoder = new window$1.TextDecoder('utf8');
19811 } else {
19812 decoder = window$1.WebVTT.StringDecoder();
19813 decodeBytesToString = true;
19814 }
19815
19816 var parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, decoder);
19817 segmentInfo.cues = [];
19818 segmentInfo.timestampmap = {
19819 MPEGTS: 0,
19820 LOCAL: 0
19821 };
19822 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
19823
19824 parser.ontimestampmap = function (map) {
19825 segmentInfo.timestampmap = map;
19826 };
19827
19828 parser.onparsingerror = function (error) {
19829 videojs.log.warn('Error encountered when parsing cues: ' + error.message);
19830 };
19831
19832 if (segmentInfo.segment.map) {
19833 var mapData = segmentInfo.segment.map.bytes;
19834
19835 if (decodeBytesToString) {
19836 mapData = uint8ToUtf8(mapData);
19837 }
19838
19839 parser.parse(mapData);
19840 }
19841
19842 var segmentData = segmentInfo.bytes;
19843
19844 if (decodeBytesToString) {
19845 segmentData = uint8ToUtf8(segmentData);
19846 }
19847
19848 parser.parse(segmentData);
19849 parser.flush();
19850 }
19851 /**
19852 * Updates the start and end times of any cues parsed by the WebVTT parser using
19853 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
19854 * from the SyncController
19855 *
19856 * @param {Object} segmentInfo
19857 * a segment info object that describes the current segment
19858 * @param {Object} mappingObj
19859 * object containing a mapping from TS to media time
19860 * @param {Object} playlist
19861 * the playlist object containing the segment
19862 * @private
19863 */
19864 ;
19865
19866 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
19867 var segment = segmentInfo.segment;
19868
19869 if (!mappingObj) {
19870 // If the sync controller does not have a mapping of TS to Media Time for the
19871 // timeline, then we don't have enough information to update the cue
19872 // start/end times
19873 return;
19874 }
19875
19876 if (!segmentInfo.cues.length) {
19877 // If there are no cues, we also do not have enough information to figure out
19878 // segment timing. Mark that the segment contains no cues so we don't re-request
19879 // an empty segment.
19880 segment.empty = true;
19881 return;
19882 }
19883
19884 var timestampmap = segmentInfo.timestampmap;
19885 var diff = timestampmap.MPEGTS / ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
19886 segmentInfo.cues.forEach(function (cue) {
19887 // First convert cue time to TS time using the timestamp-map provided within the vtt
19888 cue.startTime += diff;
19889 cue.endTime += diff;
19890 });
19891
19892 if (!playlist.syncInfo) {
19893 var firstStart = segmentInfo.cues[0].startTime;
19894 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
19895 playlist.syncInfo = {
19896 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
19897 time: Math.min(firstStart, lastStart - segment.duration)
19898 };
19899 }
19900 };
19901
19902 return VTTSegmentLoader;
19903}(SegmentLoader);
19904
19905/**
19906 * @file ad-cue-tags.js
19907 */
19908/**
19909 * Searches for an ad cue that overlaps with the given mediaTime
19910 *
19911 * @param {Object} track
19912 * the track to find the cue for
19913 *
19914 * @param {number} mediaTime
19915 * the time to find the cue at
19916 *
19917 * @return {Object|null}
19918 * the found cue or null
19919 */
19920
19921var findAdCue = function findAdCue(track, mediaTime) {
19922 var cues = track.cues;
19923
19924 for (var i = 0; i < cues.length; i++) {
19925 var cue = cues[i];
19926
19927 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
19928 return cue;
19929 }
19930 }
19931
19932 return null;
19933};
19934var updateAdCues = function updateAdCues(media, track, offset) {
19935 if (offset === void 0) {
19936 offset = 0;
19937 }
19938
19939 if (!media.segments) {
19940 return;
19941 }
19942
19943 var mediaTime = offset;
19944 var cue;
19945
19946 for (var i = 0; i < media.segments.length; i++) {
19947 var segment = media.segments[i];
19948
19949 if (!cue) {
19950 // Since the cues will span for at least the segment duration, adding a fudge
19951 // factor of half segment duration will prevent duplicate cues from being
19952 // created when timing info is not exact (e.g. cue start time initialized
19953 // at 10.006677, but next call mediaTime is 10.003332 )
19954 cue = findAdCue(track, mediaTime + segment.duration / 2);
19955 }
19956
19957 if (cue) {
19958 if ('cueIn' in segment) {
19959 // Found a CUE-IN so end the cue
19960 cue.endTime = mediaTime;
19961 cue.adEndTime = mediaTime;
19962 mediaTime += segment.duration;
19963 cue = null;
19964 continue;
19965 }
19966
19967 if (mediaTime < cue.endTime) {
19968 // Already processed this mediaTime for this cue
19969 mediaTime += segment.duration;
19970 continue;
19971 } // otherwise extend cue until a CUE-IN is found
19972
19973
19974 cue.endTime += segment.duration;
19975 } else {
19976 if ('cueOut' in segment) {
19977 cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
19978 cue.adStartTime = mediaTime; // Assumes tag format to be
19979 // #EXT-X-CUE-OUT:30
19980
19981 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
19982 track.addCue(cue);
19983 }
19984
19985 if ('cueOutCont' in segment) {
19986 // Entered into the middle of an ad cue
19987 // Assumes tag formate to be
19988 // #EXT-X-CUE-OUT-CONT:10/30
19989 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
19990 adOffset = _segment$cueOutCont$s[0],
19991 adTotal = _segment$cueOutCont$s[1];
19992
19993 cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, '');
19994 cue.adStartTime = mediaTime - adOffset;
19995 cue.adEndTime = cue.adStartTime + adTotal;
19996 track.addCue(cue);
19997 }
19998 }
19999
20000 mediaTime += segment.duration;
20001 }
20002};
20003
20004// synchronize expired playlist segments.
20005// the max media sequence diff is 48 hours of live stream
20006// content with two second segments. Anything larger than that
20007// will likely be invalid.
20008
20009var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
20010var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
20011// the equivalence display-time 0 === segment-index 0
20012{
20013 name: 'VOD',
20014 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20015 if (duration !== Infinity) {
20016 var syncPoint = {
20017 time: 0,
20018 segmentIndex: 0,
20019 partIndex: null
20020 };
20021 return syncPoint;
20022 }
20023
20024 return null;
20025 }
20026}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
20027{
20028 name: 'ProgramDateTime',
20029 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20030 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
20031 return null;
20032 }
20033
20034 var syncPoint = null;
20035 var lastDistance = null;
20036 var partsAndSegments = getPartsAndSegments(playlist);
20037 currentTime = currentTime || 0;
20038
20039 for (var i = 0; i < partsAndSegments.length; i++) {
20040 // start from the end and loop backwards for live
20041 // or start from the front and loop forwards for non-live
20042 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
20043 var partAndSegment = partsAndSegments[index];
20044 var segment = partAndSegment.segment;
20045 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
20046
20047 if (!datetimeMapping || !segment.dateTimeObject) {
20048 continue;
20049 }
20050
20051 var segmentTime = segment.dateTimeObject.getTime() / 1000;
20052 var start = segmentTime + datetimeMapping; // take part duration into account.
20053
20054 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
20055 for (var z = 0; z < partAndSegment.partIndex; z++) {
20056 start += segment.parts[z].duration;
20057 }
20058 }
20059
20060 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
20061 // currentTime and can stop looking for better candidates
20062
20063 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
20064 break;
20065 }
20066
20067 lastDistance = distance;
20068 syncPoint = {
20069 time: start,
20070 segmentIndex: partAndSegment.segmentIndex,
20071 partIndex: partAndSegment.partIndex
20072 };
20073 }
20074
20075 return syncPoint;
20076 }
20077}, // Stategy "Segment": We have a known time mapping for a timeline and a
20078// segment in the current timeline with timing data
20079{
20080 name: 'Segment',
20081 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20082 var syncPoint = null;
20083 var lastDistance = null;
20084 currentTime = currentTime || 0;
20085 var partsAndSegments = getPartsAndSegments(playlist);
20086
20087 for (var i = 0; i < partsAndSegments.length; i++) {
20088 // start from the end and loop backwards for live
20089 // or start from the front and loop forwards for non-live
20090 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
20091 var partAndSegment = partsAndSegments[index];
20092 var segment = partAndSegment.segment;
20093 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
20094
20095 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
20096 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
20097 // currentTime and can stop looking for better candidates
20098
20099 if (lastDistance !== null && lastDistance < distance) {
20100 break;
20101 }
20102
20103 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
20104 lastDistance = distance;
20105 syncPoint = {
20106 time: start,
20107 segmentIndex: partAndSegment.segmentIndex,
20108 partIndex: partAndSegment.partIndex
20109 };
20110 }
20111 }
20112 }
20113
20114 return syncPoint;
20115 }
20116}, // Stategy "Discontinuity": We have a discontinuity with a known
20117// display-time
20118{
20119 name: 'Discontinuity',
20120 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20121 var syncPoint = null;
20122 currentTime = currentTime || 0;
20123
20124 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20125 var lastDistance = null;
20126
20127 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20128 var segmentIndex = playlist.discontinuityStarts[i];
20129 var discontinuity = playlist.discontinuitySequence + i + 1;
20130 var discontinuitySync = syncController.discontinuities[discontinuity];
20131
20132 if (discontinuitySync) {
20133 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
20134 // currentTime and can stop looking for better candidates
20135
20136 if (lastDistance !== null && lastDistance < distance) {
20137 break;
20138 }
20139
20140 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
20141 lastDistance = distance;
20142 syncPoint = {
20143 time: discontinuitySync.time,
20144 segmentIndex: segmentIndex,
20145 partIndex: null
20146 };
20147 }
20148 }
20149 }
20150 }
20151
20152 return syncPoint;
20153 }
20154}, // Stategy "Playlist": We have a playlist with a known mapping of
20155// segment index to display time
20156{
20157 name: 'Playlist',
20158 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20159 if (playlist.syncInfo) {
20160 var syncPoint = {
20161 time: playlist.syncInfo.time,
20162 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
20163 partIndex: null
20164 };
20165 return syncPoint;
20166 }
20167
20168 return null;
20169 }
20170}];
20171
20172var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
20173 _inheritsLoose(SyncController, _videojs$EventTarget);
20174
20175 function SyncController(options) {
20176 var _this;
20177
20178 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
20179
20180 _this.timelines = [];
20181 _this.discontinuities = [];
20182 _this.timelineToDatetimeMappings = {};
20183 _this.logger_ = logger('SyncController');
20184 return _this;
20185 }
20186 /**
20187 * Find a sync-point for the playlist specified
20188 *
20189 * A sync-point is defined as a known mapping from display-time to
20190 * a segment-index in the current playlist.
20191 *
20192 * @param {Playlist} playlist
20193 * The playlist that needs a sync-point
20194 * @param {number} duration
20195 * Duration of the MediaSource (Infinite if playing a live source)
20196 * @param {number} currentTimeline
20197 * The last timeline from which a segment was loaded
20198 * @return {Object}
20199 * A sync-point object
20200 */
20201
20202
20203 var _proto = SyncController.prototype;
20204
20205 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
20206 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
20207
20208 if (!syncPoints.length) {
20209 // Signal that we need to attempt to get a sync-point manually
20210 // by fetching a segment in the playlist and constructing
20211 // a sync-point from that information
20212 return null;
20213 } // Now find the sync-point that is closest to the currentTime because
20214 // that should result in the most accurate guess about which segment
20215 // to fetch
20216
20217
20218 return this.selectSyncPoint_(syncPoints, {
20219 key: 'time',
20220 value: currentTime
20221 });
20222 }
20223 /**
20224 * Calculate the amount of time that has expired off the playlist during playback
20225 *
20226 * @param {Playlist} playlist
20227 * Playlist object to calculate expired from
20228 * @param {number} duration
20229 * Duration of the MediaSource (Infinity if playling a live source)
20230 * @return {number|null}
20231 * The amount of time that has expired off the playlist during playback. Null
20232 * if no sync-points for the playlist can be found.
20233 */
20234 ;
20235
20236 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
20237 if (!playlist || !playlist.segments) {
20238 return null;
20239 }
20240
20241 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
20242
20243 if (!syncPoints.length) {
20244 return null;
20245 }
20246
20247 var syncPoint = this.selectSyncPoint_(syncPoints, {
20248 key: 'segmentIndex',
20249 value: 0
20250 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
20251 // duration from index 0 to syncPoint.segmentIndex instead of adding.
20252
20253 if (syncPoint.segmentIndex > 0) {
20254 syncPoint.time *= -1;
20255 }
20256
20257 return Math.abs(syncPoint.time + sumDurations({
20258 defaultDuration: playlist.targetDuration,
20259 durationList: playlist.segments,
20260 startIndex: syncPoint.segmentIndex,
20261 endIndex: 0
20262 }));
20263 }
20264 /**
20265 * Runs each sync-point strategy and returns a list of sync-points returned by the
20266 * strategies
20267 *
20268 * @private
20269 * @param {Playlist} playlist
20270 * The playlist that needs a sync-point
20271 * @param {number} duration
20272 * Duration of the MediaSource (Infinity if playing a live source)
20273 * @param {number} currentTimeline
20274 * The last timeline from which a segment was loaded
20275 * @return {Array}
20276 * A list of sync-point objects
20277 */
20278 ;
20279
20280 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
20281 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
20282
20283 for (var i = 0; i < syncPointStrategies.length; i++) {
20284 var strategy = syncPointStrategies[i];
20285 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
20286
20287 if (syncPoint) {
20288 syncPoint.strategy = strategy.name;
20289 syncPoints.push({
20290 strategy: strategy.name,
20291 syncPoint: syncPoint
20292 });
20293 }
20294 }
20295
20296 return syncPoints;
20297 }
20298 /**
20299 * Selects the sync-point nearest the specified target
20300 *
20301 * @private
20302 * @param {Array} syncPoints
20303 * List of sync-points to select from
20304 * @param {Object} target
20305 * Object specifying the property and value we are targeting
20306 * @param {string} target.key
20307 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
20308 * @param {number} target.value
20309 * The value to target for the specified key.
20310 * @return {Object}
20311 * The sync-point nearest the target
20312 */
20313 ;
20314
20315 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
20316 var bestSyncPoint = syncPoints[0].syncPoint;
20317 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
20318 var bestStrategy = syncPoints[0].strategy;
20319
20320 for (var i = 1; i < syncPoints.length; i++) {
20321 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
20322
20323 if (newDistance < bestDistance) {
20324 bestDistance = newDistance;
20325 bestSyncPoint = syncPoints[i].syncPoint;
20326 bestStrategy = syncPoints[i].strategy;
20327 }
20328 }
20329
20330 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
20331 return bestSyncPoint;
20332 }
20333 /**
20334 * Save any meta-data present on the segments when segments leave
20335 * the live window to the playlist to allow for synchronization at the
20336 * playlist level later.
20337 *
20338 * @param {Playlist} oldPlaylist - The previous active playlist
20339 * @param {Playlist} newPlaylist - The updated and most current playlist
20340 */
20341 ;
20342
20343 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
20344 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
20345
20346 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
20347 videojs.log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
20348 return;
20349 } // When a segment expires from the playlist and it has a start time
20350 // save that information as a possible sync-point reference in future
20351
20352
20353 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
20354 var lastRemovedSegment = oldPlaylist.segments[i];
20355
20356 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
20357 newPlaylist.syncInfo = {
20358 mediaSequence: oldPlaylist.mediaSequence + i,
20359 time: lastRemovedSegment.start
20360 };
20361 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
20362 this.trigger('syncinfoupdate');
20363 break;
20364 }
20365 }
20366 }
20367 /**
20368 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
20369 * before segments start to load.
20370 *
20371 * @param {Playlist} playlist - The currently active playlist
20372 */
20373 ;
20374
20375 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
20376 // It's possible for the playlist to be updated before playback starts, meaning time
20377 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
20378 // crossed, then the old time zero mapping (for the prior timeline) would be retained
20379 // unless the mappings are cleared.
20380 this.timelineToDatetimeMappings = {};
20381
20382 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
20383 var firstSegment = playlist.segments[0];
20384 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
20385 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
20386 }
20387 }
20388 /**
20389 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
20390 * based on the latest timing information.
20391 *
20392 * @param {Object} options
20393 * Options object
20394 * @param {SegmentInfo} options.segmentInfo
20395 * The current active request information
20396 * @param {boolean} options.shouldSaveTimelineMapping
20397 * If there's a timeline change, determines if the timeline mapping should be
20398 * saved for timeline mapping and program date time mappings.
20399 */
20400 ;
20401
20402 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
20403 var segmentInfo = _ref.segmentInfo,
20404 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
20405 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
20406 var segment = segmentInfo.segment;
20407
20408 if (didCalculateSegmentTimeMapping) {
20409 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
20410 // now with segment timing information
20411
20412 if (!segmentInfo.playlist.syncInfo) {
20413 segmentInfo.playlist.syncInfo = {
20414 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
20415 time: segment.start
20416 };
20417 }
20418 }
20419
20420 var dateTime = segment.dateTimeObject;
20421
20422 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
20423 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
20424 }
20425 };
20426
20427 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
20428 if (typeof this.timelines[timeline] === 'undefined') {
20429 return null;
20430 }
20431
20432 return this.timelines[timeline].time;
20433 };
20434
20435 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
20436 if (typeof this.timelines[timeline] === 'undefined') {
20437 return null;
20438 }
20439
20440 return this.timelines[timeline].mapping;
20441 }
20442 /**
20443 * Use the "media time" for a segment to generate a mapping to "display time" and
20444 * save that display time to the segment.
20445 *
20446 * @private
20447 * @param {SegmentInfo} segmentInfo
20448 * The current active request information
20449 * @param {Object} timingInfo
20450 * The start and end time of the current segment in "media time"
20451 * @param {boolean} shouldSaveTimelineMapping
20452 * If there's a timeline change, determines if the timeline mapping should be
20453 * saved in timelines.
20454 * @return {boolean}
20455 * Returns false if segment time mapping could not be calculated
20456 */
20457 ;
20458
20459 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
20460 // TODO: remove side effects
20461 var segment = segmentInfo.segment;
20462 var part = segmentInfo.part;
20463 var mappingObj = this.timelines[segmentInfo.timeline];
20464 var start;
20465 var end;
20466
20467 if (typeof segmentInfo.timestampOffset === 'number') {
20468 mappingObj = {
20469 time: segmentInfo.startOfSegment,
20470 mapping: segmentInfo.startOfSegment - timingInfo.start
20471 };
20472
20473 if (shouldSaveTimelineMapping) {
20474 this.timelines[segmentInfo.timeline] = mappingObj;
20475 this.trigger('timestampoffset');
20476 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
20477 }
20478
20479 start = segmentInfo.startOfSegment;
20480 end = timingInfo.end + mappingObj.mapping;
20481 } else if (mappingObj) {
20482 start = timingInfo.start + mappingObj.mapping;
20483 end = timingInfo.end + mappingObj.mapping;
20484 } else {
20485 return false;
20486 }
20487
20488 if (part) {
20489 part.start = start;
20490 part.end = end;
20491 } // If we don't have a segment start yet or the start value we got
20492 // is less than our current segment.start value, save a new start value.
20493 // We have to do this because parts will have segment timing info saved
20494 // multiple times and we want segment start to be the earliest part start
20495 // value for that segment.
20496
20497
20498 if (!segment.start || start < segment.start) {
20499 segment.start = start;
20500 }
20501
20502 segment.end = end;
20503 return true;
20504 }
20505 /**
20506 * Each time we have discontinuity in the playlist, attempt to calculate the location
20507 * in display of the start of the discontinuity and save that. We also save an accuracy
20508 * value so that we save values with the most accuracy (closest to 0.)
20509 *
20510 * @private
20511 * @param {SegmentInfo} segmentInfo - The current active request information
20512 */
20513 ;
20514
20515 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
20516 var playlist = segmentInfo.playlist;
20517 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
20518 // the start of the range and it's accuracy is 0 (greater accuracy values
20519 // mean more approximation)
20520
20521 if (segment.discontinuity) {
20522 this.discontinuities[segment.timeline] = {
20523 time: segment.start,
20524 accuracy: 0
20525 };
20526 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20527 // Search for future discontinuities that we can provide better timing
20528 // information for and save that information for sync purposes
20529 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20530 var segmentIndex = playlist.discontinuityStarts[i];
20531 var discontinuity = playlist.discontinuitySequence + i + 1;
20532 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
20533 var accuracy = Math.abs(mediaIndexDiff);
20534
20535 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
20536 var time = void 0;
20537
20538 if (mediaIndexDiff < 0) {
20539 time = segment.start - sumDurations({
20540 defaultDuration: playlist.targetDuration,
20541 durationList: playlist.segments,
20542 startIndex: segmentInfo.mediaIndex,
20543 endIndex: segmentIndex
20544 });
20545 } else {
20546 time = segment.end + sumDurations({
20547 defaultDuration: playlist.targetDuration,
20548 durationList: playlist.segments,
20549 startIndex: segmentInfo.mediaIndex + 1,
20550 endIndex: segmentIndex
20551 });
20552 }
20553
20554 this.discontinuities[discontinuity] = {
20555 time: time,
20556 accuracy: accuracy
20557 };
20558 }
20559 }
20560 }
20561 };
20562
20563 _proto.dispose = function dispose() {
20564 this.trigger('dispose');
20565 this.off();
20566 };
20567
20568 return SyncController;
20569}(videojs.EventTarget);
20570
20571/**
20572 * The TimelineChangeController acts as a source for segment loaders to listen for and
20573 * keep track of latest and pending timeline changes. This is useful to ensure proper
20574 * sync, as each loader may need to make a consideration for what timeline the other
20575 * loader is on before making changes which could impact the other loader's media.
20576 *
20577 * @class TimelineChangeController
20578 * @extends videojs.EventTarget
20579 */
20580
20581var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
20582 _inheritsLoose(TimelineChangeController, _videojs$EventTarget);
20583
20584 function TimelineChangeController() {
20585 var _this;
20586
20587 _this = _videojs$EventTarget.call(this) || this;
20588 _this.pendingTimelineChanges_ = {};
20589 _this.lastTimelineChanges_ = {};
20590 return _this;
20591 }
20592
20593 var _proto = TimelineChangeController.prototype;
20594
20595 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
20596 this.pendingTimelineChanges_[type] = null;
20597 this.trigger('pendingtimelinechange');
20598 };
20599
20600 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
20601 var type = _ref.type,
20602 from = _ref.from,
20603 to = _ref.to;
20604
20605 if (typeof from === 'number' && typeof to === 'number') {
20606 this.pendingTimelineChanges_[type] = {
20607 type: type,
20608 from: from,
20609 to: to
20610 };
20611 this.trigger('pendingtimelinechange');
20612 }
20613
20614 return this.pendingTimelineChanges_[type];
20615 };
20616
20617 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
20618 var type = _ref2.type,
20619 from = _ref2.from,
20620 to = _ref2.to;
20621
20622 if (typeof from === 'number' && typeof to === 'number') {
20623 this.lastTimelineChanges_[type] = {
20624 type: type,
20625 from: from,
20626 to: to
20627 };
20628 delete this.pendingTimelineChanges_[type];
20629 this.trigger('timelinechange');
20630 }
20631
20632 return this.lastTimelineChanges_[type];
20633 };
20634
20635 _proto.dispose = function dispose() {
20636 this.trigger('dispose');
20637 this.pendingTimelineChanges_ = {};
20638 this.lastTimelineChanges_ = {};
20639 this.off();
20640 };
20641
20642 return TimelineChangeController;
20643}(videojs.EventTarget);
20644
20645/* rollup-plugin-worker-factory start for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
20646var workerCode = transform(getWorkerString(function () {
20647
20648 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
20649
20650 function createCommonjsModule(fn, basedir, module) {
20651 return module = {
20652 path: basedir,
20653 exports: {},
20654 require: function require(path, base) {
20655 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
20656 }
20657 }, fn(module, module.exports), module.exports;
20658 }
20659
20660 function commonjsRequire() {
20661 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
20662 }
20663
20664 var createClass = createCommonjsModule(function (module) {
20665 function _defineProperties(target, props) {
20666 for (var i = 0; i < props.length; i++) {
20667 var descriptor = props[i];
20668 descriptor.enumerable = descriptor.enumerable || false;
20669 descriptor.configurable = true;
20670 if ("value" in descriptor) descriptor.writable = true;
20671 Object.defineProperty(target, descriptor.key, descriptor);
20672 }
20673 }
20674
20675 function _createClass(Constructor, protoProps, staticProps) {
20676 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
20677 if (staticProps) _defineProperties(Constructor, staticProps);
20678 return Constructor;
20679 }
20680
20681 module.exports = _createClass;
20682 module.exports["default"] = module.exports, module.exports.__esModule = true;
20683 });
20684 var setPrototypeOf = createCommonjsModule(function (module) {
20685 function _setPrototypeOf(o, p) {
20686 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
20687 o.__proto__ = p;
20688 return o;
20689 };
20690
20691 module.exports["default"] = module.exports, module.exports.__esModule = true;
20692 return _setPrototypeOf(o, p);
20693 }
20694
20695 module.exports = _setPrototypeOf;
20696 module.exports["default"] = module.exports, module.exports.__esModule = true;
20697 });
20698 var inheritsLoose = createCommonjsModule(function (module) {
20699 function _inheritsLoose(subClass, superClass) {
20700 subClass.prototype = Object.create(superClass.prototype);
20701 subClass.prototype.constructor = subClass;
20702 setPrototypeOf(subClass, superClass);
20703 }
20704
20705 module.exports = _inheritsLoose;
20706 module.exports["default"] = module.exports, module.exports.__esModule = true;
20707 });
20708 /**
20709 * @file stream.js
20710 */
20711
20712 /**
20713 * A lightweight readable stream implemention that handles event dispatching.
20714 *
20715 * @class Stream
20716 */
20717
20718 var Stream = /*#__PURE__*/function () {
20719 function Stream() {
20720 this.listeners = {};
20721 }
20722 /**
20723 * Add a listener for a specified event type.
20724 *
20725 * @param {string} type the event name
20726 * @param {Function} listener the callback to be invoked when an event of
20727 * the specified type occurs
20728 */
20729
20730
20731 var _proto = Stream.prototype;
20732
20733 _proto.on = function on(type, listener) {
20734 if (!this.listeners[type]) {
20735 this.listeners[type] = [];
20736 }
20737
20738 this.listeners[type].push(listener);
20739 }
20740 /**
20741 * Remove a listener for a specified event type.
20742 *
20743 * @param {string} type the event name
20744 * @param {Function} listener a function previously registered for this
20745 * type of event through `on`
20746 * @return {boolean} if we could turn it off or not
20747 */
20748 ;
20749
20750 _proto.off = function off(type, listener) {
20751 if (!this.listeners[type]) {
20752 return false;
20753 }
20754
20755 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
20756 // In Video.js we slice listener functions
20757 // on trigger so that it does not mess up the order
20758 // while we loop through.
20759 //
20760 // Here we slice on off so that the loop in trigger
20761 // can continue using it's old reference to loop without
20762 // messing up the order.
20763
20764 this.listeners[type] = this.listeners[type].slice(0);
20765 this.listeners[type].splice(index, 1);
20766 return index > -1;
20767 }
20768 /**
20769 * Trigger an event of the specified type on this stream. Any additional
20770 * arguments to this function are passed as parameters to event listeners.
20771 *
20772 * @param {string} type the event name
20773 */
20774 ;
20775
20776 _proto.trigger = function trigger(type) {
20777 var callbacks = this.listeners[type];
20778
20779 if (!callbacks) {
20780 return;
20781 } // Slicing the arguments on every invocation of this method
20782 // can add a significant amount of overhead. Avoid the
20783 // intermediate object creation for the common case of a
20784 // single callback argument
20785
20786
20787 if (arguments.length === 2) {
20788 var length = callbacks.length;
20789
20790 for (var i = 0; i < length; ++i) {
20791 callbacks[i].call(this, arguments[1]);
20792 }
20793 } else {
20794 var args = Array.prototype.slice.call(arguments, 1);
20795 var _length = callbacks.length;
20796
20797 for (var _i = 0; _i < _length; ++_i) {
20798 callbacks[_i].apply(this, args);
20799 }
20800 }
20801 }
20802 /**
20803 * Destroys the stream and cleans up.
20804 */
20805 ;
20806
20807 _proto.dispose = function dispose() {
20808 this.listeners = {};
20809 }
20810 /**
20811 * Forwards all `data` events on this stream to the destination stream. The
20812 * destination stream should provide a method `push` to receive the data
20813 * events as they arrive.
20814 *
20815 * @param {Stream} destination the stream that will receive all `data` events
20816 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
20817 */
20818 ;
20819
20820 _proto.pipe = function pipe(destination) {
20821 this.on('data', function (data) {
20822 destination.push(data);
20823 });
20824 };
20825
20826 return Stream;
20827 }();
20828 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
20829
20830 /**
20831 * Returns the subarray of a Uint8Array without PKCS#7 padding.
20832 *
20833 * @param padded {Uint8Array} unencrypted bytes that have been padded
20834 * @return {Uint8Array} the unpadded bytes
20835 * @see http://tools.ietf.org/html/rfc5652
20836 */
20837
20838
20839 function unpad(padded) {
20840 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
20841 }
20842 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
20843
20844 /**
20845 * @file aes.js
20846 *
20847 * This file contains an adaptation of the AES decryption algorithm
20848 * from the Standford Javascript Cryptography Library. That work is
20849 * covered by the following copyright and permissions notice:
20850 *
20851 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
20852 * All rights reserved.
20853 *
20854 * Redistribution and use in source and binary forms, with or without
20855 * modification, are permitted provided that the following conditions are
20856 * met:
20857 *
20858 * 1. Redistributions of source code must retain the above copyright
20859 * notice, this list of conditions and the following disclaimer.
20860 *
20861 * 2. Redistributions in binary form must reproduce the above
20862 * copyright notice, this list of conditions and the following
20863 * disclaimer in the documentation and/or other materials provided
20864 * with the distribution.
20865 *
20866 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
20867 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20868 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20869 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
20870 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20871 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
20872 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
20873 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
20874 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
20875 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
20876 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
20877 *
20878 * The views and conclusions contained in the software and documentation
20879 * are those of the authors and should not be interpreted as representing
20880 * official policies, either expressed or implied, of the authors.
20881 */
20882
20883 /**
20884 * Expand the S-box tables.
20885 *
20886 * @private
20887 */
20888
20889
20890 var precompute = function precompute() {
20891 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
20892 var encTable = tables[0];
20893 var decTable = tables[1];
20894 var sbox = encTable[4];
20895 var sboxInv = decTable[4];
20896 var i;
20897 var x;
20898 var xInv;
20899 var d = [];
20900 var th = [];
20901 var x2;
20902 var x4;
20903 var x8;
20904 var s;
20905 var tEnc;
20906 var tDec; // Compute double and third tables
20907
20908 for (i = 0; i < 256; i++) {
20909 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
20910 }
20911
20912 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
20913 // Compute sbox
20914 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
20915 s = s >> 8 ^ s & 255 ^ 99;
20916 sbox[x] = s;
20917 sboxInv[s] = x; // Compute MixColumns
20918
20919 x8 = d[x4 = d[x2 = d[x]]];
20920 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
20921 tEnc = d[s] * 0x101 ^ s * 0x1010100;
20922
20923 for (i = 0; i < 4; i++) {
20924 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
20925 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
20926 }
20927 } // Compactify. Considerable speedup on Firefox.
20928
20929
20930 for (i = 0; i < 5; i++) {
20931 encTable[i] = encTable[i].slice(0);
20932 decTable[i] = decTable[i].slice(0);
20933 }
20934
20935 return tables;
20936 };
20937
20938 var aesTables = null;
20939 /**
20940 * Schedule out an AES key for both encryption and decryption. This
20941 * is a low-level class. Use a cipher mode to do bulk encryption.
20942 *
20943 * @class AES
20944 * @param key {Array} The key as an array of 4, 6 or 8 words.
20945 */
20946
20947 var AES = /*#__PURE__*/function () {
20948 function AES(key) {
20949 /**
20950 * The expanded S-box and inverse S-box tables. These will be computed
20951 * on the client so that we don't have to send them down the wire.
20952 *
20953 * There are two tables, _tables[0] is for encryption and
20954 * _tables[1] is for decryption.
20955 *
20956 * The first 4 sub-tables are the expanded S-box with MixColumns. The
20957 * last (_tables[01][4]) is the S-box itself.
20958 *
20959 * @private
20960 */
20961 // if we have yet to precompute the S-box tables
20962 // do so now
20963 if (!aesTables) {
20964 aesTables = precompute();
20965 } // then make a copy of that object for use
20966
20967
20968 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
20969 var i;
20970 var j;
20971 var tmp;
20972 var sbox = this._tables[0][4];
20973 var decTable = this._tables[1];
20974 var keyLen = key.length;
20975 var rcon = 1;
20976
20977 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
20978 throw new Error('Invalid aes key size');
20979 }
20980
20981 var encKey = key.slice(0);
20982 var decKey = [];
20983 this._key = [encKey, decKey]; // schedule encryption keys
20984
20985 for (i = keyLen; i < 4 * keyLen + 28; i++) {
20986 tmp = encKey[i - 1]; // apply sbox
20987
20988 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
20989 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
20990
20991 if (i % keyLen === 0) {
20992 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
20993 rcon = rcon << 1 ^ (rcon >> 7) * 283;
20994 }
20995 }
20996
20997 encKey[i] = encKey[i - keyLen] ^ tmp;
20998 } // schedule decryption keys
20999
21000
21001 for (j = 0; i; j++, i--) {
21002 tmp = encKey[j & 3 ? i : i - 4];
21003
21004 if (i <= 4 || j < 4) {
21005 decKey[j] = tmp;
21006 } else {
21007 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
21008 }
21009 }
21010 }
21011 /**
21012 * Decrypt 16 bytes, specified as four 32-bit words.
21013 *
21014 * @param {number} encrypted0 the first word to decrypt
21015 * @param {number} encrypted1 the second word to decrypt
21016 * @param {number} encrypted2 the third word to decrypt
21017 * @param {number} encrypted3 the fourth word to decrypt
21018 * @param {Int32Array} out the array to write the decrypted words
21019 * into
21020 * @param {number} offset the offset into the output array to start
21021 * writing results
21022 * @return {Array} The plaintext.
21023 */
21024
21025
21026 var _proto = AES.prototype;
21027
21028 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
21029 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
21030
21031 var a = encrypted0 ^ key[0];
21032 var b = encrypted3 ^ key[1];
21033 var c = encrypted2 ^ key[2];
21034 var d = encrypted1 ^ key[3];
21035 var a2;
21036 var b2;
21037 var c2; // key.length === 2 ?
21038
21039 var nInnerRounds = key.length / 4 - 2;
21040 var i;
21041 var kIndex = 4;
21042 var table = this._tables[1]; // load up the tables
21043
21044 var table0 = table[0];
21045 var table1 = table[1];
21046 var table2 = table[2];
21047 var table3 = table[3];
21048 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
21049
21050 for (i = 0; i < nInnerRounds; i++) {
21051 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
21052 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
21053 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
21054 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
21055 kIndex += 4;
21056 a = a2;
21057 b = b2;
21058 c = c2;
21059 } // Last round.
21060
21061
21062 for (i = 0; i < 4; i++) {
21063 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
21064 a2 = a;
21065 a = b;
21066 b = c;
21067 c = d;
21068 d = a2;
21069 }
21070 };
21071
21072 return AES;
21073 }();
21074 /**
21075 * A wrapper around the Stream class to use setTimeout
21076 * and run stream "jobs" Asynchronously
21077 *
21078 * @class AsyncStream
21079 * @extends Stream
21080 */
21081
21082
21083 var AsyncStream = /*#__PURE__*/function (_Stream) {
21084 inheritsLoose(AsyncStream, _Stream);
21085
21086 function AsyncStream() {
21087 var _this;
21088
21089 _this = _Stream.call(this, Stream) || this;
21090 _this.jobs = [];
21091 _this.delay = 1;
21092 _this.timeout_ = null;
21093 return _this;
21094 }
21095 /**
21096 * process an async job
21097 *
21098 * @private
21099 */
21100
21101
21102 var _proto = AsyncStream.prototype;
21103
21104 _proto.processJob_ = function processJob_() {
21105 this.jobs.shift()();
21106
21107 if (this.jobs.length) {
21108 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
21109 } else {
21110 this.timeout_ = null;
21111 }
21112 }
21113 /**
21114 * push a job into the stream
21115 *
21116 * @param {Function} job the job to push into the stream
21117 */
21118 ;
21119
21120 _proto.push = function push(job) {
21121 this.jobs.push(job);
21122
21123 if (!this.timeout_) {
21124 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
21125 }
21126 };
21127
21128 return AsyncStream;
21129 }(Stream);
21130 /**
21131 * Convert network-order (big-endian) bytes into their little-endian
21132 * representation.
21133 */
21134
21135
21136 var ntoh = function ntoh(word) {
21137 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
21138 };
21139 /**
21140 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
21141 *
21142 * @param {Uint8Array} encrypted the encrypted bytes
21143 * @param {Uint32Array} key the bytes of the decryption key
21144 * @param {Uint32Array} initVector the initialization vector (IV) to
21145 * use for the first round of CBC.
21146 * @return {Uint8Array} the decrypted bytes
21147 *
21148 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
21149 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
21150 * @see https://tools.ietf.org/html/rfc2315
21151 */
21152
21153
21154 var decrypt = function decrypt(encrypted, key, initVector) {
21155 // word-level access to the encrypted bytes
21156 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
21157 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
21158
21159 var decrypted = new Uint8Array(encrypted.byteLength);
21160 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
21161 // decrypted data
21162
21163 var init0;
21164 var init1;
21165 var init2;
21166 var init3;
21167 var encrypted0;
21168 var encrypted1;
21169 var encrypted2;
21170 var encrypted3; // iteration variable
21171
21172 var wordIx; // pull out the words of the IV to ensure we don't modify the
21173 // passed-in reference and easier access
21174
21175 init0 = initVector[0];
21176 init1 = initVector[1];
21177 init2 = initVector[2];
21178 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
21179 // to each decrypted block
21180
21181 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
21182 // convert big-endian (network order) words into little-endian
21183 // (javascript order)
21184 encrypted0 = ntoh(encrypted32[wordIx]);
21185 encrypted1 = ntoh(encrypted32[wordIx + 1]);
21186 encrypted2 = ntoh(encrypted32[wordIx + 2]);
21187 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
21188
21189 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
21190 // plaintext
21191
21192 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
21193 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
21194 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
21195 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
21196
21197 init0 = encrypted0;
21198 init1 = encrypted1;
21199 init2 = encrypted2;
21200 init3 = encrypted3;
21201 }
21202
21203 return decrypted;
21204 };
21205 /**
21206 * The `Decrypter` class that manages decryption of AES
21207 * data through `AsyncStream` objects and the `decrypt`
21208 * function
21209 *
21210 * @param {Uint8Array} encrypted the encrypted bytes
21211 * @param {Uint32Array} key the bytes of the decryption key
21212 * @param {Uint32Array} initVector the initialization vector (IV) to
21213 * @param {Function} done the function to run when done
21214 * @class Decrypter
21215 */
21216
21217
21218 var Decrypter = /*#__PURE__*/function () {
21219 function Decrypter(encrypted, key, initVector, done) {
21220 var step = Decrypter.STEP;
21221 var encrypted32 = new Int32Array(encrypted.buffer);
21222 var decrypted = new Uint8Array(encrypted.byteLength);
21223 var i = 0;
21224 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
21225
21226 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21227
21228 for (i = step; i < encrypted32.length; i += step) {
21229 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
21230 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21231 } // invoke the done() callback when everything is finished
21232
21233
21234 this.asyncStream_.push(function () {
21235 // remove pkcs#7 padding from the decrypted bytes
21236 done(null, unpad(decrypted));
21237 });
21238 }
21239 /**
21240 * a getter for step the maximum number of bytes to process at one time
21241 *
21242 * @return {number} the value of step 32000
21243 */
21244
21245
21246 var _proto = Decrypter.prototype;
21247 /**
21248 * @private
21249 */
21250
21251 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
21252 return function () {
21253 var bytes = decrypt(encrypted, key, initVector);
21254 decrypted.set(bytes, encrypted.byteOffset);
21255 };
21256 };
21257
21258 createClass(Decrypter, null, [{
21259 key: "STEP",
21260 get: function get() {
21261 // 4 * 8000;
21262 return 32000;
21263 }
21264 }]);
21265 return Decrypter;
21266 }();
21267
21268 var win;
21269
21270 if (typeof window !== "undefined") {
21271 win = window;
21272 } else if (typeof commonjsGlobal !== "undefined") {
21273 win = commonjsGlobal;
21274 } else if (typeof self !== "undefined") {
21275 win = self;
21276 } else {
21277 win = {};
21278 }
21279
21280 var window_1 = win;
21281
21282 var isArrayBufferView = function isArrayBufferView(obj) {
21283 if (ArrayBuffer.isView === 'function') {
21284 return ArrayBuffer.isView(obj);
21285 }
21286
21287 return obj && obj.buffer instanceof ArrayBuffer;
21288 };
21289
21290 var BigInt = window_1.BigInt || Number;
21291 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
21292
21293 (function () {
21294 var a = new Uint16Array([0xFFCC]);
21295 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
21296
21297 if (b[0] === 0xFF) {
21298 return 'big';
21299 }
21300
21301 if (b[0] === 0xCC) {
21302 return 'little';
21303 }
21304
21305 return 'unknown';
21306 })();
21307 /**
21308 * Creates an object for sending to a web worker modifying properties that are TypedArrays
21309 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
21310 *
21311 * @param {Object} message
21312 * Object of properties and values to send to the web worker
21313 * @return {Object}
21314 * Modified message with TypedArray values expanded
21315 * @function createTransferableMessage
21316 */
21317
21318
21319 var createTransferableMessage = function createTransferableMessage(message) {
21320 var transferable = {};
21321 Object.keys(message).forEach(function (key) {
21322 var value = message[key];
21323
21324 if (isArrayBufferView(value)) {
21325 transferable[key] = {
21326 bytes: value.buffer,
21327 byteOffset: value.byteOffset,
21328 byteLength: value.byteLength
21329 };
21330 } else {
21331 transferable[key] = value;
21332 }
21333 });
21334 return transferable;
21335 };
21336 /* global self */
21337
21338 /**
21339 * Our web worker interface so that things can talk to aes-decrypter
21340 * that will be running in a web worker. the scope is passed to this by
21341 * webworkify.
21342 */
21343
21344
21345 self.onmessage = function (event) {
21346 var data = event.data;
21347 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
21348 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
21349 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
21350 /* eslint-disable no-new, handle-callback-err */
21351
21352 new Decrypter(encrypted, key, iv, function (err, bytes) {
21353 self.postMessage(createTransferableMessage({
21354 source: data.source,
21355 decrypted: bytes
21356 }), [bytes.buffer]);
21357 });
21358 /* eslint-enable */
21359 };
21360}));
21361var Decrypter = factory(workerCode);
21362/* rollup-plugin-worker-factory end for worker!/Users/ddashkevich/projects/vhs-release/src/decrypter-worker.js */
21363
21364/**
21365 * Convert the properties of an HLS track into an audioTrackKind.
21366 *
21367 * @private
21368 */
21369
21370var audioTrackKind_ = function audioTrackKind_(properties) {
21371 var kind = properties.default ? 'main' : 'alternative';
21372
21373 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
21374 kind = 'main-desc';
21375 }
21376
21377 return kind;
21378};
21379/**
21380 * Pause provided segment loader and playlist loader if active
21381 *
21382 * @param {SegmentLoader} segmentLoader
21383 * SegmentLoader to pause
21384 * @param {Object} mediaType
21385 * Active media type
21386 * @function stopLoaders
21387 */
21388
21389
21390var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
21391 segmentLoader.abort();
21392 segmentLoader.pause();
21393
21394 if (mediaType && mediaType.activePlaylistLoader) {
21395 mediaType.activePlaylistLoader.pause();
21396 mediaType.activePlaylistLoader = null;
21397 }
21398};
21399/**
21400 * Start loading provided segment loader and playlist loader
21401 *
21402 * @param {PlaylistLoader} playlistLoader
21403 * PlaylistLoader to start loading
21404 * @param {Object} mediaType
21405 * Active media type
21406 * @function startLoaders
21407 */
21408
21409var startLoaders = function startLoaders(playlistLoader, mediaType) {
21410 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
21411 // playlist loader
21412 mediaType.activePlaylistLoader = playlistLoader;
21413 playlistLoader.load();
21414};
21415/**
21416 * Returns a function to be called when the media group changes. It performs a
21417 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
21418 * change of group is merely a rendition switch of the same content at another encoding,
21419 * rather than a change of content, such as switching audio from English to Spanish.
21420 *
21421 * @param {string} type
21422 * MediaGroup type
21423 * @param {Object} settings
21424 * Object containing required information for media groups
21425 * @return {Function}
21426 * Handler for a non-destructive resync of SegmentLoader when the active media
21427 * group changes.
21428 * @function onGroupChanged
21429 */
21430
21431var onGroupChanged = function onGroupChanged(type, settings) {
21432 return function () {
21433 var _settings$segmentLoad = settings.segmentLoaders,
21434 segmentLoader = _settings$segmentLoad[type],
21435 mainSegmentLoader = _settings$segmentLoad.main,
21436 mediaType = settings.mediaTypes[type];
21437 var activeTrack = mediaType.activeTrack();
21438 var activeGroup = mediaType.getActiveGroup();
21439 var previousActiveLoader = mediaType.activePlaylistLoader;
21440 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
21441
21442 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
21443 return;
21444 }
21445
21446 mediaType.lastGroup_ = activeGroup;
21447 mediaType.lastTrack_ = activeTrack;
21448 stopLoaders(segmentLoader, mediaType);
21449
21450 if (!activeGroup || activeGroup.isMasterPlaylist) {
21451 // there is no group active or active group is a main playlist and won't change
21452 return;
21453 }
21454
21455 if (!activeGroup.playlistLoader) {
21456 if (previousActiveLoader) {
21457 // The previous group had a playlist loader but the new active group does not
21458 // this means we are switching from demuxed to muxed audio. In this case we want to
21459 // do a destructive reset of the main segment loader and not restart the audio
21460 // loaders.
21461 mainSegmentLoader.resetEverything();
21462 }
21463
21464 return;
21465 } // Non-destructive resync
21466
21467
21468 segmentLoader.resyncLoader();
21469 startLoaders(activeGroup.playlistLoader, mediaType);
21470 };
21471};
21472var onGroupChanging = function onGroupChanging(type, settings) {
21473 return function () {
21474 var segmentLoader = settings.segmentLoaders[type],
21475 mediaType = settings.mediaTypes[type];
21476 mediaType.lastGroup_ = null;
21477 segmentLoader.abort();
21478 segmentLoader.pause();
21479 };
21480};
21481/**
21482 * Returns a function to be called when the media track changes. It performs a
21483 * destructive reset of the SegmentLoader to ensure we start loading as close to
21484 * currentTime as possible.
21485 *
21486 * @param {string} type
21487 * MediaGroup type
21488 * @param {Object} settings
21489 * Object containing required information for media groups
21490 * @return {Function}
21491 * Handler for a destructive reset of SegmentLoader when the active media
21492 * track changes.
21493 * @function onTrackChanged
21494 */
21495
21496var onTrackChanged = function onTrackChanged(type, settings) {
21497 return function () {
21498 var masterPlaylistLoader = settings.masterPlaylistLoader,
21499 _settings$segmentLoad2 = settings.segmentLoaders,
21500 segmentLoader = _settings$segmentLoad2[type],
21501 mainSegmentLoader = _settings$segmentLoad2.main,
21502 mediaType = settings.mediaTypes[type];
21503 var activeTrack = mediaType.activeTrack();
21504 var activeGroup = mediaType.getActiveGroup();
21505 var previousActiveLoader = mediaType.activePlaylistLoader;
21506 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
21507
21508 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
21509 return;
21510 }
21511
21512 mediaType.lastGroup_ = activeGroup;
21513 mediaType.lastTrack_ = activeTrack;
21514 stopLoaders(segmentLoader, mediaType);
21515
21516 if (!activeGroup) {
21517 // there is no group active so we do not want to restart loaders
21518 return;
21519 }
21520
21521 if (activeGroup.isMasterPlaylist) {
21522 // track did not change, do nothing
21523 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
21524 return;
21525 }
21526
21527 var mpc = settings.vhs.masterPlaylistController_;
21528 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
21529
21530 if (mpc.media() === newPlaylist) {
21531 return;
21532 }
21533
21534 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
21535 masterPlaylistLoader.pause();
21536 mainSegmentLoader.resetEverything();
21537 mpc.fastQualityChange_(newPlaylist);
21538 return;
21539 }
21540
21541 if (type === 'AUDIO') {
21542 if (!activeGroup.playlistLoader) {
21543 // when switching from demuxed audio/video to muxed audio/video (noted by no
21544 // playlist loader for the audio group), we want to do a destructive reset of the
21545 // main segment loader and not restart the audio loaders
21546 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
21547 // it should be stopped
21548
21549 mainSegmentLoader.resetEverything();
21550 return;
21551 } // although the segment loader is an audio segment loader, call the setAudio
21552 // function to ensure it is prepared to re-append the init segment (or handle other
21553 // config changes)
21554
21555
21556 segmentLoader.setAudio(true);
21557 mainSegmentLoader.setAudio(false);
21558 }
21559
21560 if (previousActiveLoader === activeGroup.playlistLoader) {
21561 // Nothing has actually changed. This can happen because track change events can fire
21562 // multiple times for a "single" change. One for enabling the new active track, and
21563 // one for disabling the track that was active
21564 startLoaders(activeGroup.playlistLoader, mediaType);
21565 return;
21566 }
21567
21568 if (segmentLoader.track) {
21569 // For WebVTT, set the new text track in the segmentloader
21570 segmentLoader.track(activeTrack);
21571 } // destructive reset
21572
21573
21574 segmentLoader.resetEverything();
21575 startLoaders(activeGroup.playlistLoader, mediaType);
21576 };
21577};
21578var onError = {
21579 /**
21580 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21581 * an error.
21582 *
21583 * @param {string} type
21584 * MediaGroup type
21585 * @param {Object} settings
21586 * Object containing required information for media groups
21587 * @return {Function}
21588 * Error handler. Logs warning (or error if the playlist is blacklisted) to
21589 * console and switches back to default audio track.
21590 * @function onError.AUDIO
21591 */
21592 AUDIO: function AUDIO(type, settings) {
21593 return function () {
21594 var segmentLoader = settings.segmentLoaders[type],
21595 mediaType = settings.mediaTypes[type],
21596 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
21597 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
21598
21599 var activeTrack = mediaType.activeTrack();
21600 var activeGroup = mediaType.activeGroup();
21601 var id = (activeGroup.filter(function (group) {
21602 return group.default;
21603 })[0] || activeGroup[0]).id;
21604 var defaultTrack = mediaType.tracks[id];
21605
21606 if (activeTrack === defaultTrack) {
21607 // Default track encountered an error. All we can do now is blacklist the current
21608 // rendition and hope another will switch audio groups
21609 blacklistCurrentPlaylist({
21610 message: 'Problem encountered loading the default audio track.'
21611 });
21612 return;
21613 }
21614
21615 videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
21616
21617 for (var trackId in mediaType.tracks) {
21618 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
21619 }
21620
21621 mediaType.onTrackChanged();
21622 };
21623 },
21624
21625 /**
21626 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21627 * an error.
21628 *
21629 * @param {string} type
21630 * MediaGroup type
21631 * @param {Object} settings
21632 * Object containing required information for media groups
21633 * @return {Function}
21634 * Error handler. Logs warning to console and disables the active subtitle track
21635 * @function onError.SUBTITLES
21636 */
21637 SUBTITLES: function SUBTITLES(type, settings) {
21638 return function () {
21639 var segmentLoader = settings.segmentLoaders[type],
21640 mediaType = settings.mediaTypes[type];
21641 videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
21642 stopLoaders(segmentLoader, mediaType);
21643 var track = mediaType.activeTrack();
21644
21645 if (track) {
21646 track.mode = 'disabled';
21647 }
21648
21649 mediaType.onTrackChanged();
21650 };
21651 }
21652};
21653var setupListeners = {
21654 /**
21655 * Setup event listeners for audio playlist loader
21656 *
21657 * @param {string} type
21658 * MediaGroup type
21659 * @param {PlaylistLoader|null} playlistLoader
21660 * PlaylistLoader to register listeners on
21661 * @param {Object} settings
21662 * Object containing required information for media groups
21663 * @function setupListeners.AUDIO
21664 */
21665 AUDIO: function AUDIO(type, playlistLoader, settings) {
21666 if (!playlistLoader) {
21667 // no playlist loader means audio will be muxed with the video
21668 return;
21669 }
21670
21671 var tech = settings.tech,
21672 requestOptions = settings.requestOptions,
21673 segmentLoader = settings.segmentLoaders[type];
21674 playlistLoader.on('loadedmetadata', function () {
21675 var media = playlistLoader.media();
21676 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
21677 // permits, start downloading segments
21678
21679 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21680 segmentLoader.load();
21681 }
21682 });
21683 playlistLoader.on('loadedplaylist', function () {
21684 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21685
21686 if (!tech.paused()) {
21687 segmentLoader.load();
21688 }
21689 });
21690 playlistLoader.on('error', onError[type](type, settings));
21691 },
21692
21693 /**
21694 * Setup event listeners for subtitle playlist loader
21695 *
21696 * @param {string} type
21697 * MediaGroup type
21698 * @param {PlaylistLoader|null} playlistLoader
21699 * PlaylistLoader to register listeners on
21700 * @param {Object} settings
21701 * Object containing required information for media groups
21702 * @function setupListeners.SUBTITLES
21703 */
21704 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
21705 var tech = settings.tech,
21706 requestOptions = settings.requestOptions,
21707 segmentLoader = settings.segmentLoaders[type],
21708 mediaType = settings.mediaTypes[type];
21709 playlistLoader.on('loadedmetadata', function () {
21710 var media = playlistLoader.media();
21711 segmentLoader.playlist(media, requestOptions);
21712 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
21713 // permits, start downloading segments
21714
21715 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21716 segmentLoader.load();
21717 }
21718 });
21719 playlistLoader.on('loadedplaylist', function () {
21720 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21721
21722 if (!tech.paused()) {
21723 segmentLoader.load();
21724 }
21725 });
21726 playlistLoader.on('error', onError[type](type, settings));
21727 }
21728};
21729var initialize = {
21730 /**
21731 * Setup PlaylistLoaders and AudioTracks for the audio groups
21732 *
21733 * @param {string} type
21734 * MediaGroup type
21735 * @param {Object} settings
21736 * Object containing required information for media groups
21737 * @function initialize.AUDIO
21738 */
21739 'AUDIO': function AUDIO(type, settings) {
21740 var vhs = settings.vhs,
21741 sourceType = settings.sourceType,
21742 segmentLoader = settings.segmentLoaders[type],
21743 requestOptions = settings.requestOptions,
21744 mediaGroups = settings.master.mediaGroups,
21745 _settings$mediaTypes$ = settings.mediaTypes[type],
21746 groups = _settings$mediaTypes$.groups,
21747 tracks = _settings$mediaTypes$.tracks,
21748 logger_ = _settings$mediaTypes$.logger_,
21749 masterPlaylistLoader = settings.masterPlaylistLoader;
21750 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
21751
21752 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
21753 mediaGroups[type] = {
21754 main: {
21755 default: {
21756 default: true
21757 }
21758 }
21759 };
21760
21761 if (audioOnlyMaster) {
21762 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
21763 }
21764 }
21765
21766 for (var groupId in mediaGroups[type]) {
21767 if (!groups[groupId]) {
21768 groups[groupId] = [];
21769 }
21770
21771 for (var variantLabel in mediaGroups[type][groupId]) {
21772 var properties = mediaGroups[type][groupId][variantLabel];
21773 var playlistLoader = void 0;
21774
21775 if (audioOnlyMaster) {
21776 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
21777 properties.isMasterPlaylist = true;
21778 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
21779 // use the resolved media playlist object
21780 } else if (sourceType === 'vhs-json' && properties.playlists) {
21781 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
21782 } else if (properties.resolvedUri) {
21783 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
21784 // should we even have properties.playlists in this check.
21785 } else if (properties.playlists && sourceType === 'dash') {
21786 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21787 } else {
21788 // no resolvedUri means the audio is muxed with the video when using this
21789 // audio track
21790 playlistLoader = null;
21791 }
21792
21793 properties = videojs.mergeOptions({
21794 id: variantLabel,
21795 playlistLoader: playlistLoader
21796 }, properties);
21797 setupListeners[type](type, properties.playlistLoader, settings);
21798 groups[groupId].push(properties);
21799
21800 if (typeof tracks[variantLabel] === 'undefined') {
21801 var track = new videojs.AudioTrack({
21802 id: variantLabel,
21803 kind: audioTrackKind_(properties),
21804 enabled: false,
21805 language: properties.language,
21806 default: properties.default,
21807 label: variantLabel
21808 });
21809 tracks[variantLabel] = track;
21810 }
21811 }
21812 } // setup single error event handler for the segment loader
21813
21814
21815 segmentLoader.on('error', onError[type](type, settings));
21816 },
21817
21818 /**
21819 * Setup PlaylistLoaders and TextTracks for the subtitle groups
21820 *
21821 * @param {string} type
21822 * MediaGroup type
21823 * @param {Object} settings
21824 * Object containing required information for media groups
21825 * @function initialize.SUBTITLES
21826 */
21827 'SUBTITLES': function SUBTITLES(type, settings) {
21828 var tech = settings.tech,
21829 vhs = settings.vhs,
21830 sourceType = settings.sourceType,
21831 segmentLoader = settings.segmentLoaders[type],
21832 requestOptions = settings.requestOptions,
21833 mediaGroups = settings.master.mediaGroups,
21834 _settings$mediaTypes$2 = settings.mediaTypes[type],
21835 groups = _settings$mediaTypes$2.groups,
21836 tracks = _settings$mediaTypes$2.tracks,
21837 masterPlaylistLoader = settings.masterPlaylistLoader;
21838
21839 for (var groupId in mediaGroups[type]) {
21840 if (!groups[groupId]) {
21841 groups[groupId] = [];
21842 }
21843
21844 for (var variantLabel in mediaGroups[type][groupId]) {
21845 if (mediaGroups[type][groupId][variantLabel].forced) {
21846 // Subtitle playlists with the forced attribute are not selectable in Safari.
21847 // According to Apple's HLS Authoring Specification:
21848 // If content has forced subtitles and regular subtitles in a given language,
21849 // the regular subtitles track in that language MUST contain both the forced
21850 // subtitles and the regular subtitles for that language.
21851 // Because of this requirement and that Safari does not add forced subtitles,
21852 // forced subtitles are skipped here to maintain consistent experience across
21853 // all platforms
21854 continue;
21855 }
21856
21857 var properties = mediaGroups[type][groupId][variantLabel];
21858 var playlistLoader = void 0;
21859
21860 if (sourceType === 'hls') {
21861 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
21862 } else if (sourceType === 'dash') {
21863 var playlists = properties.playlists.filter(function (p) {
21864 return p.excludeUntil !== Infinity;
21865 });
21866
21867 if (!playlists.length) {
21868 return;
21869 }
21870
21871 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21872 } else if (sourceType === 'vhs-json') {
21873 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
21874 // as provided, otherwise use the resolved URI to load the playlist
21875 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
21876 }
21877
21878 properties = videojs.mergeOptions({
21879 id: variantLabel,
21880 playlistLoader: playlistLoader
21881 }, properties);
21882 setupListeners[type](type, properties.playlistLoader, settings);
21883 groups[groupId].push(properties);
21884
21885 if (typeof tracks[variantLabel] === 'undefined') {
21886 var track = tech.addRemoteTextTrack({
21887 id: variantLabel,
21888 kind: 'subtitles',
21889 default: properties.default && properties.autoselect,
21890 language: properties.language,
21891 label: variantLabel
21892 }, false).track;
21893 tracks[variantLabel] = track;
21894 }
21895 }
21896 } // setup single error event handler for the segment loader
21897
21898
21899 segmentLoader.on('error', onError[type](type, settings));
21900 },
21901
21902 /**
21903 * Setup TextTracks for the closed-caption groups
21904 *
21905 * @param {String} type
21906 * MediaGroup type
21907 * @param {Object} settings
21908 * Object containing required information for media groups
21909 * @function initialize['CLOSED-CAPTIONS']
21910 */
21911 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
21912 var tech = settings.tech,
21913 mediaGroups = settings.master.mediaGroups,
21914 _settings$mediaTypes$3 = settings.mediaTypes[type],
21915 groups = _settings$mediaTypes$3.groups,
21916 tracks = _settings$mediaTypes$3.tracks;
21917
21918 for (var groupId in mediaGroups[type]) {
21919 if (!groups[groupId]) {
21920 groups[groupId] = [];
21921 }
21922
21923 for (var variantLabel in mediaGroups[type][groupId]) {
21924 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
21925
21926 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
21927 continue;
21928 }
21929
21930 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
21931 var newProps = {
21932 label: variantLabel,
21933 language: properties.language,
21934 instreamId: properties.instreamId,
21935 default: properties.default && properties.autoselect
21936 };
21937
21938 if (captionServices[newProps.instreamId]) {
21939 newProps = videojs.mergeOptions(newProps, captionServices[newProps.instreamId]);
21940 }
21941
21942 if (newProps.default === undefined) {
21943 delete newProps.default;
21944 } // No PlaylistLoader is required for Closed-Captions because the captions are
21945 // embedded within the video stream
21946
21947
21948 groups[groupId].push(videojs.mergeOptions({
21949 id: variantLabel
21950 }, properties));
21951
21952 if (typeof tracks[variantLabel] === 'undefined') {
21953 var track = tech.addRemoteTextTrack({
21954 id: newProps.instreamId,
21955 kind: 'captions',
21956 default: newProps.default,
21957 language: newProps.language,
21958 label: newProps.label
21959 }, false).track;
21960 tracks[variantLabel] = track;
21961 }
21962 }
21963 }
21964 }
21965};
21966
21967var groupMatch = function groupMatch(list, media) {
21968 for (var i = 0; i < list.length; i++) {
21969 if (playlistMatch(media, list[i])) {
21970 return true;
21971 }
21972
21973 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
21974 return true;
21975 }
21976 }
21977
21978 return false;
21979};
21980/**
21981 * Returns a function used to get the active group of the provided type
21982 *
21983 * @param {string} type
21984 * MediaGroup type
21985 * @param {Object} settings
21986 * Object containing required information for media groups
21987 * @return {Function}
21988 * Function that returns the active media group for the provided type. Takes an
21989 * optional parameter {TextTrack} track. If no track is provided, a list of all
21990 * variants in the group, otherwise the variant corresponding to the provided
21991 * track is returned.
21992 * @function activeGroup
21993 */
21994
21995
21996var activeGroup = function activeGroup(type, settings) {
21997 return function (track) {
21998 var masterPlaylistLoader = settings.masterPlaylistLoader,
21999 groups = settings.mediaTypes[type].groups;
22000 var media = masterPlaylistLoader.media();
22001
22002 if (!media) {
22003 return null;
22004 }
22005
22006 var variants = null; // set to variants to main media active group
22007
22008 if (media.attributes[type]) {
22009 variants = groups[media.attributes[type]];
22010 }
22011
22012 var groupKeys = Object.keys(groups);
22013
22014 if (!variants) {
22015 // find the masterPlaylistLoader media
22016 // that is in a media group if we are dealing
22017 // with audio only
22018 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
22019 for (var i = 0; i < groupKeys.length; i++) {
22020 var groupPropertyList = groups[groupKeys[i]];
22021
22022 if (groupMatch(groupPropertyList, media)) {
22023 variants = groupPropertyList;
22024 break;
22025 }
22026 } // use the main group if it exists
22027
22028 } else if (groups.main) {
22029 variants = groups.main; // only one group, use that one
22030 } else if (groupKeys.length === 1) {
22031 variants = groups[groupKeys[0]];
22032 }
22033 }
22034
22035 if (typeof track === 'undefined') {
22036 return variants;
22037 }
22038
22039 if (track === null || !variants) {
22040 // An active track was specified so a corresponding group is expected. track === null
22041 // means no track is currently active so there is no corresponding group
22042 return null;
22043 }
22044
22045 return variants.filter(function (props) {
22046 return props.id === track.id;
22047 })[0] || null;
22048 };
22049};
22050var activeTrack = {
22051 /**
22052 * Returns a function used to get the active track of type provided
22053 *
22054 * @param {string} type
22055 * MediaGroup type
22056 * @param {Object} settings
22057 * Object containing required information for media groups
22058 * @return {Function}
22059 * Function that returns the active media track for the provided type. Returns
22060 * null if no track is active
22061 * @function activeTrack.AUDIO
22062 */
22063 AUDIO: function AUDIO(type, settings) {
22064 return function () {
22065 var tracks = settings.mediaTypes[type].tracks;
22066
22067 for (var id in tracks) {
22068 if (tracks[id].enabled) {
22069 return tracks[id];
22070 }
22071 }
22072
22073 return null;
22074 };
22075 },
22076
22077 /**
22078 * Returns a function used to get the active track of type provided
22079 *
22080 * @param {string} type
22081 * MediaGroup type
22082 * @param {Object} settings
22083 * Object containing required information for media groups
22084 * @return {Function}
22085 * Function that returns the active media track for the provided type. Returns
22086 * null if no track is active
22087 * @function activeTrack.SUBTITLES
22088 */
22089 SUBTITLES: function SUBTITLES(type, settings) {
22090 return function () {
22091 var tracks = settings.mediaTypes[type].tracks;
22092
22093 for (var id in tracks) {
22094 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
22095 return tracks[id];
22096 }
22097 }
22098
22099 return null;
22100 };
22101 }
22102};
22103var getActiveGroup = function getActiveGroup(type, _ref) {
22104 var mediaTypes = _ref.mediaTypes;
22105 return function () {
22106 var activeTrack_ = mediaTypes[type].activeTrack();
22107
22108 if (!activeTrack_) {
22109 return null;
22110 }
22111
22112 return mediaTypes[type].activeGroup(activeTrack_);
22113 };
22114};
22115/**
22116 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
22117 * Closed-Captions) specified in the master manifest.
22118 *
22119 * @param {Object} settings
22120 * Object containing required information for setting up the media groups
22121 * @param {Tech} settings.tech
22122 * The tech of the player
22123 * @param {Object} settings.requestOptions
22124 * XHR request options used by the segment loaders
22125 * @param {PlaylistLoader} settings.masterPlaylistLoader
22126 * PlaylistLoader for the master source
22127 * @param {VhsHandler} settings.vhs
22128 * VHS SourceHandler
22129 * @param {Object} settings.master
22130 * The parsed master manifest
22131 * @param {Object} settings.mediaTypes
22132 * Object to store the loaders, tracks, and utility methods for each media type
22133 * @param {Function} settings.blacklistCurrentPlaylist
22134 * Blacklists the current rendition and forces a rendition switch.
22135 * @function setupMediaGroups
22136 */
22137
22138var setupMediaGroups = function setupMediaGroups(settings) {
22139 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
22140 initialize[type](type, settings);
22141 });
22142 var mediaTypes = settings.mediaTypes,
22143 masterPlaylistLoader = settings.masterPlaylistLoader,
22144 tech = settings.tech,
22145 vhs = settings.vhs,
22146 _settings$segmentLoad3 = settings.segmentLoaders,
22147 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
22148 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
22149
22150 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22151 mediaTypes[type].activeGroup = activeGroup(type, settings);
22152 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
22153 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
22154 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
22155 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
22156 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
22157 }); // DO NOT enable the default subtitle or caption track.
22158 // DO enable the default audio track
22159
22160 var audioGroup = mediaTypes.AUDIO.activeGroup();
22161
22162 if (audioGroup) {
22163 var groupId = (audioGroup.filter(function (group) {
22164 return group.default;
22165 })[0] || audioGroup[0]).id;
22166 mediaTypes.AUDIO.tracks[groupId].enabled = true;
22167 mediaTypes.AUDIO.onGroupChanged();
22168 mediaTypes.AUDIO.onTrackChanged();
22169 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
22170 // track is changed, but needs to be handled here since the track may not be considered
22171 // changed on the first call to onTrackChanged
22172
22173 if (!activeAudioGroup.playlistLoader) {
22174 // either audio is muxed with video or the stream is audio only
22175 mainSegmentLoader.setAudio(true);
22176 } else {
22177 // audio is demuxed
22178 mainSegmentLoader.setAudio(false);
22179 audioSegmentLoader.setAudio(true);
22180 }
22181 }
22182
22183 masterPlaylistLoader.on('mediachange', function () {
22184 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22185 return mediaTypes[type].onGroupChanged();
22186 });
22187 });
22188 masterPlaylistLoader.on('mediachanging', function () {
22189 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22190 return mediaTypes[type].onGroupChanging();
22191 });
22192 }); // custom audio track change event handler for usage event
22193
22194 var onAudioTrackChanged = function onAudioTrackChanged() {
22195 mediaTypes.AUDIO.onTrackChanged();
22196 tech.trigger({
22197 type: 'usage',
22198 name: 'vhs-audio-change'
22199 });
22200 tech.trigger({
22201 type: 'usage',
22202 name: 'hls-audio-change'
22203 });
22204 };
22205
22206 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
22207 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
22208 vhs.on('dispose', function () {
22209 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
22210 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
22211 }); // clear existing audio tracks and add the ones we just created
22212
22213 tech.clearTracks('audio');
22214
22215 for (var id in mediaTypes.AUDIO.tracks) {
22216 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
22217 }
22218};
22219/**
22220 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
22221 * media type
22222 *
22223 * @return {Object}
22224 * Object to store the loaders, tracks, and utility methods for each media type
22225 * @function createMediaTypes
22226 */
22227
22228var createMediaTypes = function createMediaTypes() {
22229 var mediaTypes = {};
22230 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
22231 mediaTypes[type] = {
22232 groups: {},
22233 tracks: {},
22234 activePlaylistLoader: null,
22235 activeGroup: noop,
22236 activeTrack: noop,
22237 getActiveGroup: noop,
22238 onGroupChanged: noop,
22239 onTrackChanged: noop,
22240 lastTrack_: null,
22241 logger_: logger("MediaGroups[" + type + "]")
22242 };
22243 });
22244 return mediaTypes;
22245};
22246
22247var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
22248var Vhs$1; // SegmentLoader stats that need to have each loader's
22249// values summed to calculate the final value
22250
22251var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
22252
22253var sumLoaderStat = function sumLoaderStat(stat) {
22254 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
22255};
22256
22257var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
22258 var currentPlaylist = _ref.currentPlaylist,
22259 buffered = _ref.buffered,
22260 currentTime = _ref.currentTime,
22261 nextPlaylist = _ref.nextPlaylist,
22262 bufferLowWaterLine = _ref.bufferLowWaterLine,
22263 bufferHighWaterLine = _ref.bufferHighWaterLine,
22264 duration = _ref.duration,
22265 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
22266 log = _ref.log;
22267
22268 // we have no other playlist to switch to
22269 if (!nextPlaylist) {
22270 videojs.log.warn('We received no playlist to switch to. Please check your stream.');
22271 return false;
22272 }
22273
22274 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
22275
22276 if (!currentPlaylist) {
22277 log(sharedLogLine + " as current playlist is not set");
22278 return true;
22279 } // no need to switch if playlist is the same
22280
22281
22282 if (nextPlaylist.id === currentPlaylist.id) {
22283 return false;
22284 } // determine if current time is in a buffered range.
22285
22286
22287 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
22288 // This is because in LIVE, the player plays 3 segments from the end of the
22289 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
22290 // in those segments, a viewer will never experience a rendition upswitch.
22291
22292 if (!currentPlaylist.endList) {
22293 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
22294 // doubles the time to first playback.
22295 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
22296 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
22297 return false;
22298 }
22299
22300 log(sharedLogLine + " as current playlist is live");
22301 return true;
22302 }
22303
22304 var forwardBuffer = timeAheadOf(buffered, currentTime);
22305 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
22306 // duration is below the max potential low water line
22307
22308 if (duration < maxBufferLowWaterLine) {
22309 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
22310 return true;
22311 }
22312
22313 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
22314 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
22315 // we can switch down
22316
22317 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
22318 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
22319
22320 if (experimentalBufferBasedABR) {
22321 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
22322 }
22323
22324 log(logLine);
22325 return true;
22326 } // and if our buffer is higher than the low water line,
22327 // we can switch up
22328
22329
22330 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
22331 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
22332
22333 if (experimentalBufferBasedABR) {
22334 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
22335 }
22336
22337 log(_logLine);
22338 return true;
22339 }
22340
22341 log("not " + sharedLogLine + " as no switching criteria met");
22342 return false;
22343};
22344/**
22345 * the master playlist controller controller all interactons
22346 * between playlists and segmentloaders. At this time this mainly
22347 * involves a master playlist and a series of audio playlists
22348 * if they are available
22349 *
22350 * @class MasterPlaylistController
22351 * @extends videojs.EventTarget
22352 */
22353
22354
22355var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
22356 _inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
22357
22358 function MasterPlaylistController(options) {
22359 var _this;
22360
22361 _this = _videojs$EventTarget.call(this) || this;
22362 var src = options.src,
22363 handleManifestRedirects = options.handleManifestRedirects,
22364 withCredentials = options.withCredentials,
22365 tech = options.tech,
22366 bandwidth = options.bandwidth,
22367 externVhs = options.externVhs,
22368 useCueTags = options.useCueTags,
22369 blacklistDuration = options.blacklistDuration,
22370 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
22371 sourceType = options.sourceType,
22372 cacheEncryptionKeys = options.cacheEncryptionKeys,
22373 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
22374 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
22375 captionServices = options.captionServices;
22376
22377 if (!src) {
22378 throw new Error('A non-empty playlist URL or JSON manifest string is required');
22379 }
22380
22381 var maxPlaylistRetries = options.maxPlaylistRetries;
22382
22383 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
22384 maxPlaylistRetries = Infinity;
22385 }
22386
22387 Vhs$1 = externVhs;
22388 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
22389 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
22390 _this.withCredentials = withCredentials;
22391 _this.tech_ = tech;
22392 _this.vhs_ = tech.vhs;
22393 _this.sourceType_ = sourceType;
22394 _this.useCueTags_ = useCueTags;
22395 _this.blacklistDuration = blacklistDuration;
22396 _this.maxPlaylistRetries = maxPlaylistRetries;
22397 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
22398
22399 if (_this.useCueTags_) {
22400 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
22401 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
22402 }
22403
22404 _this.requestOptions_ = {
22405 withCredentials: withCredentials,
22406 handleManifestRedirects: handleManifestRedirects,
22407 maxPlaylistRetries: maxPlaylistRetries,
22408 timeout: null
22409 };
22410
22411 _this.on('error', _this.pauseLoading);
22412
22413 _this.mediaTypes_ = createMediaTypes();
22414 _this.mediaSource = new window$1.MediaSource();
22415 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized(_this));
22416 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized(_this));
22417 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized(_this));
22418
22419 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
22420
22421
22422 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
22423
22424 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
22425 // everything, and the MediaSource should not be detached without a proper disposal
22426
22427
22428 _this.seekable_ = videojs.createTimeRanges();
22429 _this.hasPlayed_ = false;
22430 _this.syncController_ = new SyncController(options);
22431 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
22432 kind: 'metadata',
22433 label: 'segment-metadata'
22434 }, false).track;
22435 _this.decrypter_ = new Decrypter();
22436 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
22437 _this.inbandTextTracks_ = {};
22438 _this.timelineChangeController_ = new TimelineChangeController();
22439 var segmentLoaderSettings = {
22440 vhs: _this.vhs_,
22441 parse708captions: options.parse708captions,
22442 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
22443 captionServices: captionServices,
22444 mediaSource: _this.mediaSource,
22445 currentTime: _this.tech_.currentTime.bind(_this.tech_),
22446 seekable: function seekable() {
22447 return _this.seekable();
22448 },
22449 seeking: function seeking() {
22450 return _this.tech_.seeking();
22451 },
22452 duration: function duration() {
22453 return _this.duration();
22454 },
22455 hasPlayed: function hasPlayed() {
22456 return _this.hasPlayed_;
22457 },
22458 goalBufferLength: function goalBufferLength() {
22459 return _this.goalBufferLength();
22460 },
22461 bandwidth: bandwidth,
22462 syncController: _this.syncController_,
22463 decrypter: _this.decrypter_,
22464 sourceType: _this.sourceType_,
22465 inbandTextTracks: _this.inbandTextTracks_,
22466 cacheEncryptionKeys: cacheEncryptionKeys,
22467 sourceUpdater: _this.sourceUpdater_,
22468 timelineChangeController: _this.timelineChangeController_,
22469 experimentalExactManifestTimings: options.experimentalExactManifestTimings
22470 }; // The source type check not only determines whether a special DASH playlist loader
22471 // should be used, but also covers the case where the provided src is a vhs-json
22472 // manifest object (instead of a URL). In the case of vhs-json, the default
22473 // PlaylistLoader should be used.
22474
22475 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
22476
22477 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
22478 // combined audio/video or just video when alternate audio track is selected
22479
22480
22481 _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22482 segmentMetadataTrack: _this.segmentMetadataTrack_,
22483 loaderType: 'main'
22484 }), options); // alternate audio track
22485
22486 _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22487 loaderType: 'audio'
22488 }), options);
22489 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22490 loaderType: 'vtt',
22491 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks,
22492 loadVttJs: function loadVttJs() {
22493 return new Promise(function (resolve, reject) {
22494 function onLoad() {
22495 tech.off('vttjserror', onError);
22496 resolve();
22497 }
22498
22499 function onError() {
22500 tech.off('vttjsloaded', onLoad);
22501 reject();
22502 }
22503
22504 tech.one('vttjsloaded', onLoad);
22505 tech.one('vttjserror', onError); // safe to call multiple times, script will be loaded only once:
22506
22507 tech.addWebVttScript_();
22508 });
22509 }
22510 }), options);
22511
22512 _this.setupSegmentLoaderListeners_();
22513
22514 if (_this.experimentalBufferBasedABR) {
22515 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
22516 return _this.startABRTimer_();
22517 });
22518
22519 _this.tech_.on('pause', function () {
22520 return _this.stopABRTimer_();
22521 });
22522
22523 _this.tech_.on('play', function () {
22524 return _this.startABRTimer_();
22525 });
22526 } // Create SegmentLoader stat-getters
22527 // mediaRequests_
22528 // mediaRequestsAborted_
22529 // mediaRequestsTimedout_
22530 // mediaRequestsErrored_
22531 // mediaTransferDuration_
22532 // mediaBytesTransferred_
22533 // mediaAppends_
22534
22535
22536 loaderStats.forEach(function (stat) {
22537 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized(_this), stat);
22538 });
22539 _this.logger_ = logger('MPC');
22540 _this.triggeredFmp4Usage = false;
22541
22542 if (_this.tech_.preload() === 'none') {
22543 _this.loadOnPlay_ = function () {
22544 _this.loadOnPlay_ = null;
22545
22546 _this.masterPlaylistLoader_.load();
22547 };
22548
22549 _this.tech_.one('play', _this.loadOnPlay_);
22550 } else {
22551 _this.masterPlaylistLoader_.load();
22552 }
22553
22554 _this.timeToLoadedData__ = -1;
22555 _this.mainAppendsToLoadedData__ = -1;
22556 _this.audioAppendsToLoadedData__ = -1;
22557 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
22558
22559 _this.tech_.one(event, function () {
22560 var timeToLoadedDataStart = Date.now();
22561
22562 _this.tech_.one('loadeddata', function () {
22563 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
22564 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
22565 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
22566 });
22567 });
22568
22569 return _this;
22570 }
22571
22572 var _proto = MasterPlaylistController.prototype;
22573
22574 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
22575 return this.mainAppendsToLoadedData__;
22576 };
22577
22578 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
22579 return this.audioAppendsToLoadedData__;
22580 };
22581
22582 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
22583 var main = this.mainAppendsToLoadedData_();
22584 var audio = this.audioAppendsToLoadedData_();
22585
22586 if (main === -1 || audio === -1) {
22587 return -1;
22588 }
22589
22590 return main + audio;
22591 };
22592
22593 _proto.timeToLoadedData_ = function timeToLoadedData_() {
22594 return this.timeToLoadedData__;
22595 }
22596 /**
22597 * Run selectPlaylist and switch to the new playlist if we should
22598 *
22599 * @param {string} [reason=abr] a reason for why the ABR check is made
22600 * @private
22601 */
22602 ;
22603
22604 _proto.checkABR_ = function checkABR_(reason) {
22605 if (reason === void 0) {
22606 reason = 'abr';
22607 }
22608
22609 var nextPlaylist = this.selectPlaylist();
22610
22611 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
22612 this.switchMedia_(nextPlaylist, reason);
22613 }
22614 };
22615
22616 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
22617 var oldMedia = this.media();
22618 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
22619 var newId = playlist.id || playlist.uri;
22620
22621 if (oldId && oldId !== newId) {
22622 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
22623 this.tech_.trigger({
22624 type: 'usage',
22625 name: "vhs-rendition-change-" + cause
22626 });
22627 }
22628
22629 this.masterPlaylistLoader_.media(playlist, delay);
22630 }
22631 /**
22632 * Start a timer that periodically calls checkABR_
22633 *
22634 * @private
22635 */
22636 ;
22637
22638 _proto.startABRTimer_ = function startABRTimer_() {
22639 var _this2 = this;
22640
22641 this.stopABRTimer_();
22642 this.abrTimer_ = window$1.setInterval(function () {
22643 return _this2.checkABR_();
22644 }, 250);
22645 }
22646 /**
22647 * Stop the timer that periodically calls checkABR_
22648 *
22649 * @private
22650 */
22651 ;
22652
22653 _proto.stopABRTimer_ = function stopABRTimer_() {
22654 // if we're scrubbing, we don't need to pause.
22655 // This getter will be added to Video.js in version 7.11.
22656 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
22657 return;
22658 }
22659
22660 window$1.clearInterval(this.abrTimer_);
22661 this.abrTimer_ = null;
22662 }
22663 /**
22664 * Get a list of playlists for the currently selected audio playlist
22665 *
22666 * @return {Array} the array of audio playlists
22667 */
22668 ;
22669
22670 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
22671 var master = this.master();
22672 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
22673 // assume that the audio tracks are contained in masters
22674 // playlist array, use that or an empty array.
22675
22676 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
22677 return defaultPlaylists;
22678 }
22679
22680 var AUDIO = master.mediaGroups.AUDIO;
22681 var groupKeys = Object.keys(AUDIO);
22682 var track; // get the current active track
22683
22684 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
22685 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
22686 } else {
22687 // default group is `main` or just the first group.
22688 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
22689
22690 for (var label in defaultGroup) {
22691 if (defaultGroup[label].default) {
22692 track = {
22693 label: label
22694 };
22695 break;
22696 }
22697 }
22698 } // no active track no playlists.
22699
22700
22701 if (!track) {
22702 return defaultPlaylists;
22703 }
22704
22705 var playlists = []; // get all of the playlists that are possible for the
22706 // active track.
22707
22708 for (var group in AUDIO) {
22709 if (AUDIO[group][track.label]) {
22710 var properties = AUDIO[group][track.label];
22711
22712 if (properties.playlists && properties.playlists.length) {
22713 playlists.push.apply(playlists, properties.playlists);
22714 } else if (properties.uri) {
22715 playlists.push(properties);
22716 } else if (master.playlists.length) {
22717 // if an audio group does not have a uri
22718 // see if we have main playlists that use it as a group.
22719 // if we do then add those to the playlists list.
22720 for (var i = 0; i < master.playlists.length; i++) {
22721 var playlist = master.playlists[i];
22722
22723 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
22724 playlists.push(playlist);
22725 }
22726 }
22727 }
22728 }
22729 }
22730
22731 if (!playlists.length) {
22732 return defaultPlaylists;
22733 }
22734
22735 return playlists;
22736 }
22737 /**
22738 * Register event handlers on the master playlist loader. A helper
22739 * function for construction time.
22740 *
22741 * @private
22742 */
22743 ;
22744
22745 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
22746 var _this3 = this;
22747
22748 this.masterPlaylistLoader_.on('loadedmetadata', function () {
22749 var media = _this3.masterPlaylistLoader_.media();
22750
22751 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22752 // timeout the request.
22753
22754 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22755 _this3.requestOptions_.timeout = 0;
22756 } else {
22757 _this3.requestOptions_.timeout = requestTimeout;
22758 } // if this isn't a live video and preload permits, start
22759 // downloading segments
22760
22761
22762 if (media.endList && _this3.tech_.preload() !== 'none') {
22763 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22764
22765 _this3.mainSegmentLoader_.load();
22766 }
22767
22768 setupMediaGroups({
22769 sourceType: _this3.sourceType_,
22770 segmentLoaders: {
22771 AUDIO: _this3.audioSegmentLoader_,
22772 SUBTITLES: _this3.subtitleSegmentLoader_,
22773 main: _this3.mainSegmentLoader_
22774 },
22775 tech: _this3.tech_,
22776 requestOptions: _this3.requestOptions_,
22777 masterPlaylistLoader: _this3.masterPlaylistLoader_,
22778 vhs: _this3.vhs_,
22779 master: _this3.master(),
22780 mediaTypes: _this3.mediaTypes_,
22781 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
22782 });
22783
22784 _this3.triggerPresenceUsage_(_this3.master(), media);
22785
22786 _this3.setupFirstPlay();
22787
22788 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
22789 _this3.trigger('selectedinitialmedia');
22790 } else {
22791 // We must wait for the active audio playlist loader to
22792 // finish setting up before triggering this event so the
22793 // representations API and EME setup is correct
22794 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
22795 _this3.trigger('selectedinitialmedia');
22796 });
22797 }
22798 });
22799 this.masterPlaylistLoader_.on('loadedplaylist', function () {
22800 if (_this3.loadOnPlay_) {
22801 _this3.tech_.off('play', _this3.loadOnPlay_);
22802 }
22803
22804 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
22805
22806 if (!updatedPlaylist) {
22807 // exclude any variants that are not supported by the browser before selecting
22808 // an initial media as the playlist selectors do not consider browser support
22809 _this3.excludeUnsupportedVariants_();
22810
22811 var selectedMedia;
22812
22813 if (_this3.enableLowInitialPlaylist) {
22814 selectedMedia = _this3.selectInitialPlaylist();
22815 }
22816
22817 if (!selectedMedia) {
22818 selectedMedia = _this3.selectPlaylist();
22819 }
22820
22821 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
22822 return;
22823 }
22824
22825 _this3.initialMedia_ = selectedMedia;
22826
22827 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
22828 // fire again since the playlist will be requested. In the case of vhs-json
22829 // (where the manifest object is provided as the source), when the media
22830 // playlist's `segments` list is already available, a media playlist won't be
22831 // requested, and loadedplaylist won't fire again, so the playlist handler must be
22832 // called on its own here.
22833
22834
22835 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
22836
22837 if (!haveJsonSource) {
22838 return;
22839 }
22840
22841 updatedPlaylist = _this3.initialMedia_;
22842 }
22843
22844 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
22845 });
22846 this.masterPlaylistLoader_.on('error', function () {
22847 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
22848 });
22849 this.masterPlaylistLoader_.on('mediachanging', function () {
22850 _this3.mainSegmentLoader_.abort();
22851
22852 _this3.mainSegmentLoader_.pause();
22853 });
22854 this.masterPlaylistLoader_.on('mediachange', function () {
22855 var media = _this3.masterPlaylistLoader_.media();
22856
22857 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22858 // timeout the request.
22859
22860 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22861 _this3.requestOptions_.timeout = 0;
22862 } else {
22863 _this3.requestOptions_.timeout = requestTimeout;
22864 }
22865
22866 _this3.masterPlaylistLoader_.load(); // TODO: Create a new event on the PlaylistLoader that signals
22867 // that the segments have changed in some way and use that to
22868 // update the SegmentLoader instead of doing it twice here and
22869 // on `loadedplaylist`
22870
22871
22872 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22873
22874 _this3.mainSegmentLoader_.load();
22875
22876 _this3.tech_.trigger({
22877 type: 'mediachange',
22878 bubbles: true
22879 });
22880 });
22881 this.masterPlaylistLoader_.on('playlistunchanged', function () {
22882 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
22883 // excluded for not-changing. We likely just have a really slowly updating
22884 // playlist.
22885
22886
22887 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
22888 return;
22889 }
22890
22891 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
22892
22893 if (playlistOutdated) {
22894 // Playlist has stopped updating and we're stuck at its end. Try to
22895 // blacklist it and switch to another playlist in the hope that that
22896 // one is updating (and give the player a chance to re-adjust to the
22897 // safe live point).
22898 _this3.blacklistCurrentPlaylist({
22899 message: 'Playlist no longer updating.',
22900 reason: 'playlist-unchanged'
22901 }); // useful for monitoring QoS
22902
22903
22904 _this3.tech_.trigger('playliststuck');
22905 }
22906 });
22907 this.masterPlaylistLoader_.on('renditiondisabled', function () {
22908 _this3.tech_.trigger({
22909 type: 'usage',
22910 name: 'vhs-rendition-disabled'
22911 });
22912
22913 _this3.tech_.trigger({
22914 type: 'usage',
22915 name: 'hls-rendition-disabled'
22916 });
22917 });
22918 this.masterPlaylistLoader_.on('renditionenabled', function () {
22919 _this3.tech_.trigger({
22920 type: 'usage',
22921 name: 'vhs-rendition-enabled'
22922 });
22923
22924 _this3.tech_.trigger({
22925 type: 'usage',
22926 name: 'hls-rendition-enabled'
22927 });
22928 });
22929 }
22930 /**
22931 * Given an updated media playlist (whether it was loaded for the first time, or
22932 * refreshed for live playlists), update any relevant properties and state to reflect
22933 * changes in the media that should be accounted for (e.g., cues and duration).
22934 *
22935 * @param {Object} updatedPlaylist the updated media playlist object
22936 *
22937 * @private
22938 */
22939 ;
22940
22941 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
22942 if (this.useCueTags_) {
22943 this.updateAdCues_(updatedPlaylist);
22944 } // TODO: Create a new event on the PlaylistLoader that signals
22945 // that the segments have changed in some way and use that to
22946 // update the SegmentLoader instead of doing it twice here and
22947 // on `mediachange`
22948
22949
22950 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
22951 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
22952 // as it is possible that it was temporarily stopped while waiting for
22953 // a playlist (e.g., in case the playlist errored and we re-requested it).
22954
22955 if (!this.tech_.paused()) {
22956 this.mainSegmentLoader_.load();
22957
22958 if (this.audioSegmentLoader_) {
22959 this.audioSegmentLoader_.load();
22960 }
22961 }
22962 }
22963 /**
22964 * A helper function for triggerring presence usage events once per source
22965 *
22966 * @private
22967 */
22968 ;
22969
22970 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
22971 var mediaGroups = master.mediaGroups || {};
22972 var defaultDemuxed = true;
22973 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
22974
22975 for (var mediaGroup in mediaGroups.AUDIO) {
22976 for (var label in mediaGroups.AUDIO[mediaGroup]) {
22977 var properties = mediaGroups.AUDIO[mediaGroup][label];
22978
22979 if (!properties.uri) {
22980 defaultDemuxed = false;
22981 }
22982 }
22983 }
22984
22985 if (defaultDemuxed) {
22986 this.tech_.trigger({
22987 type: 'usage',
22988 name: 'vhs-demuxed'
22989 });
22990 this.tech_.trigger({
22991 type: 'usage',
22992 name: 'hls-demuxed'
22993 });
22994 }
22995
22996 if (Object.keys(mediaGroups.SUBTITLES).length) {
22997 this.tech_.trigger({
22998 type: 'usage',
22999 name: 'vhs-webvtt'
23000 });
23001 this.tech_.trigger({
23002 type: 'usage',
23003 name: 'hls-webvtt'
23004 });
23005 }
23006
23007 if (Vhs$1.Playlist.isAes(media)) {
23008 this.tech_.trigger({
23009 type: 'usage',
23010 name: 'vhs-aes'
23011 });
23012 this.tech_.trigger({
23013 type: 'usage',
23014 name: 'hls-aes'
23015 });
23016 }
23017
23018 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
23019 this.tech_.trigger({
23020 type: 'usage',
23021 name: 'vhs-alternate-audio'
23022 });
23023 this.tech_.trigger({
23024 type: 'usage',
23025 name: 'hls-alternate-audio'
23026 });
23027 }
23028
23029 if (this.useCueTags_) {
23030 this.tech_.trigger({
23031 type: 'usage',
23032 name: 'vhs-playlist-cue-tags'
23033 });
23034 this.tech_.trigger({
23035 type: 'usage',
23036 name: 'hls-playlist-cue-tags'
23037 });
23038 }
23039 };
23040
23041 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
23042 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
23043 var currentTime = this.tech_.currentTime();
23044 var bufferLowWaterLine = this.bufferLowWaterLine();
23045 var bufferHighWaterLine = this.bufferHighWaterLine();
23046 var buffered = this.tech_.buffered();
23047 return shouldSwitchToMedia({
23048 buffered: buffered,
23049 currentTime: currentTime,
23050 currentPlaylist: currentPlaylist,
23051 nextPlaylist: nextPlaylist,
23052 bufferLowWaterLine: bufferLowWaterLine,
23053 bufferHighWaterLine: bufferHighWaterLine,
23054 duration: this.duration(),
23055 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
23056 log: this.logger_
23057 });
23058 }
23059 /**
23060 * Register event handlers on the segment loaders. A helper function
23061 * for construction time.
23062 *
23063 * @private
23064 */
23065 ;
23066
23067 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
23068 var _this4 = this;
23069
23070 this.mainSegmentLoader_.on('bandwidthupdate', function () {
23071 // Whether or not buffer based ABR or another ABR is used, on a bandwidth change it's
23072 // useful to check to see if a rendition switch should be made.
23073 _this4.checkABR_('bandwidthupdate');
23074
23075 _this4.tech_.trigger('bandwidthupdate');
23076 });
23077 this.mainSegmentLoader_.on('timeout', function () {
23078 if (_this4.experimentalBufferBasedABR) {
23079 // If a rendition change is needed, then it would've be done on `bandwidthupdate`.
23080 // Here the only consideration is that for buffer based ABR there's no guarantee
23081 // of an immediate switch (since the bandwidth is averaged with a timeout
23082 // bandwidth value of 1), so force a load on the segment loader to keep it going.
23083 _this4.mainSegmentLoader_.load();
23084 }
23085 }); // `progress` events are not reliable enough of a bandwidth measure to trigger buffer
23086 // based ABR.
23087
23088 if (!this.experimentalBufferBasedABR) {
23089 this.mainSegmentLoader_.on('progress', function () {
23090 _this4.trigger('progress');
23091 });
23092 }
23093
23094 this.mainSegmentLoader_.on('error', function () {
23095 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
23096 });
23097 this.mainSegmentLoader_.on('appenderror', function () {
23098 _this4.error = _this4.mainSegmentLoader_.error_;
23099
23100 _this4.trigger('error');
23101 });
23102 this.mainSegmentLoader_.on('syncinfoupdate', function () {
23103 _this4.onSyncInfoUpdate_();
23104 });
23105 this.mainSegmentLoader_.on('timestampoffset', function () {
23106 _this4.tech_.trigger({
23107 type: 'usage',
23108 name: 'vhs-timestamp-offset'
23109 });
23110
23111 _this4.tech_.trigger({
23112 type: 'usage',
23113 name: 'hls-timestamp-offset'
23114 });
23115 });
23116 this.audioSegmentLoader_.on('syncinfoupdate', function () {
23117 _this4.onSyncInfoUpdate_();
23118 });
23119 this.audioSegmentLoader_.on('appenderror', function () {
23120 _this4.error = _this4.audioSegmentLoader_.error_;
23121
23122 _this4.trigger('error');
23123 });
23124 this.mainSegmentLoader_.on('ended', function () {
23125 _this4.logger_('main segment loader ended');
23126
23127 _this4.onEndOfStream();
23128 });
23129 this.mainSegmentLoader_.on('earlyabort', function (event) {
23130 // never try to early abort with the new ABR algorithm
23131 if (_this4.experimentalBufferBasedABR) {
23132 return;
23133 }
23134
23135 _this4.delegateLoaders_('all', ['abort']);
23136
23137 _this4.blacklistCurrentPlaylist({
23138 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
23139 }, ABORT_EARLY_BLACKLIST_SECONDS);
23140 });
23141
23142 var updateCodecs = function updateCodecs() {
23143 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
23144 return _this4.tryToCreateSourceBuffers_();
23145 }
23146
23147 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
23148
23149
23150 if (!codecs) {
23151 return;
23152 }
23153
23154 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
23155 };
23156
23157 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
23158 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
23159 this.mainSegmentLoader_.on('fmp4', function () {
23160 if (!_this4.triggeredFmp4Usage) {
23161 _this4.tech_.trigger({
23162 type: 'usage',
23163 name: 'vhs-fmp4'
23164 });
23165
23166 _this4.tech_.trigger({
23167 type: 'usage',
23168 name: 'hls-fmp4'
23169 });
23170
23171 _this4.triggeredFmp4Usage = true;
23172 }
23173 });
23174 this.audioSegmentLoader_.on('fmp4', function () {
23175 if (!_this4.triggeredFmp4Usage) {
23176 _this4.tech_.trigger({
23177 type: 'usage',
23178 name: 'vhs-fmp4'
23179 });
23180
23181 _this4.tech_.trigger({
23182 type: 'usage',
23183 name: 'hls-fmp4'
23184 });
23185
23186 _this4.triggeredFmp4Usage = true;
23187 }
23188 });
23189 this.audioSegmentLoader_.on('ended', function () {
23190 _this4.logger_('audioSegmentLoader ended');
23191
23192 _this4.onEndOfStream();
23193 });
23194 };
23195
23196 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
23197 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
23198 }
23199 /**
23200 * Call load on our SegmentLoaders
23201 */
23202 ;
23203
23204 _proto.load = function load() {
23205 this.mainSegmentLoader_.load();
23206
23207 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23208 this.audioSegmentLoader_.load();
23209 }
23210
23211 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23212 this.subtitleSegmentLoader_.load();
23213 }
23214 }
23215 /**
23216 * Re-tune playback quality level for the current player
23217 * conditions without performing destructive actions, like
23218 * removing already buffered content
23219 *
23220 * @private
23221 * @deprecated
23222 */
23223 ;
23224
23225 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
23226 if (media === void 0) {
23227 media = this.selectPlaylist();
23228 }
23229
23230 this.fastQualityChange_(media);
23231 }
23232 /**
23233 * Re-tune playback quality level for the current player
23234 * conditions. This method will perform destructive actions like removing
23235 * already buffered content in order to readjust the currently active
23236 * playlist quickly. This is good for manual quality changes
23237 *
23238 * @private
23239 */
23240 ;
23241
23242 _proto.fastQualityChange_ = function fastQualityChange_(media) {
23243 var _this5 = this;
23244
23245 if (media === void 0) {
23246 media = this.selectPlaylist();
23247 }
23248
23249 if (media === this.masterPlaylistLoader_.media()) {
23250 this.logger_('skipping fastQualityChange because new media is same as old');
23251 return;
23252 }
23253
23254 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
23255 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
23256 // ahead is roughly the minimum that will accomplish this across a variety of content
23257 // in IE and Edge, but seeking in place is sufficient on all other browsers)
23258 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
23259 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
23260
23261 this.mainSegmentLoader_.resetEverything(function () {
23262 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
23263 // from the previously enabled rendition to load before the new playlist has finished loading
23264 if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
23265 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
23266 } else {
23267 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
23268 }
23269 }); // don't need to reset audio as it is reset when media changes
23270 }
23271 /**
23272 * Begin playback.
23273 */
23274 ;
23275
23276 _proto.play = function play() {
23277 if (this.setupFirstPlay()) {
23278 return;
23279 }
23280
23281 if (this.tech_.ended()) {
23282 this.tech_.setCurrentTime(0);
23283 }
23284
23285 if (this.hasPlayed_) {
23286 this.load();
23287 }
23288
23289 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
23290 // seek forward to the live point
23291
23292 if (this.tech_.duration() === Infinity) {
23293 if (this.tech_.currentTime() < seekable.start(0)) {
23294 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
23295 }
23296 }
23297 }
23298 /**
23299 * Seek to the latest media position if this is a live video and the
23300 * player and video are loaded and initialized.
23301 */
23302 ;
23303
23304 _proto.setupFirstPlay = function setupFirstPlay() {
23305 var _this6 = this;
23306
23307 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
23308 // If 1) there is no active media
23309 // 2) the player is paused
23310 // 3) the first play has already been setup
23311 // then exit early
23312
23313 if (!media || this.tech_.paused() || this.hasPlayed_) {
23314 return false;
23315 } // when the video is a live stream
23316
23317
23318 if (!media.endList) {
23319 var seekable = this.seekable();
23320
23321 if (!seekable.length) {
23322 // without a seekable range, the player cannot seek to begin buffering at the live
23323 // point
23324 return false;
23325 }
23326
23327 if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
23328 // IE11 throws an InvalidStateError if you try to set currentTime while the
23329 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
23330 this.tech_.one('loadedmetadata', function () {
23331 _this6.trigger('firstplay');
23332
23333 _this6.tech_.setCurrentTime(seekable.end(0));
23334
23335 _this6.hasPlayed_ = true;
23336 });
23337 return false;
23338 } // trigger firstplay to inform the source handler to ignore the next seek event
23339
23340
23341 this.trigger('firstplay'); // seek to the live point
23342
23343 this.tech_.setCurrentTime(seekable.end(0));
23344 }
23345
23346 this.hasPlayed_ = true; // we can begin loading now that everything is ready
23347
23348 this.load();
23349 return true;
23350 }
23351 /**
23352 * handle the sourceopen event on the MediaSource
23353 *
23354 * @private
23355 */
23356 ;
23357
23358 _proto.handleSourceOpen_ = function handleSourceOpen_() {
23359 // Only attempt to create the source buffer if none already exist.
23360 // handleSourceOpen is also called when we are "re-opening" a source buffer
23361 // after `endOfStream` has been called (in response to a seek for instance)
23362 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
23363 // code in video.js but is required because play() must be invoked
23364 // *after* the media source has opened.
23365
23366 if (this.tech_.autoplay()) {
23367 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
23368 // on browsers which return a promise
23369
23370 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
23371 playPromise.then(null, function (e) {});
23372 }
23373 }
23374
23375 this.trigger('sourceopen');
23376 }
23377 /**
23378 * handle the sourceended event on the MediaSource
23379 *
23380 * @private
23381 */
23382 ;
23383
23384 _proto.handleSourceEnded_ = function handleSourceEnded_() {
23385 if (!this.inbandTextTracks_.metadataTrack_) {
23386 return;
23387 }
23388
23389 var cues = this.inbandTextTracks_.metadataTrack_.cues;
23390
23391 if (!cues || !cues.length) {
23392 return;
23393 }
23394
23395 var duration = this.duration();
23396 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
23397 }
23398 /**
23399 * handle the durationchange event on the MediaSource
23400 *
23401 * @private
23402 */
23403 ;
23404
23405 _proto.handleDurationChange_ = function handleDurationChange_() {
23406 this.tech_.trigger('durationchange');
23407 }
23408 /**
23409 * Calls endOfStream on the media source when all active stream types have called
23410 * endOfStream
23411 *
23412 * @param {string} streamType
23413 * Stream type of the segment loader that called endOfStream
23414 * @private
23415 */
23416 ;
23417
23418 _proto.onEndOfStream = function onEndOfStream() {
23419 var isEndOfStream = this.mainSegmentLoader_.ended_;
23420
23421 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23422 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
23423
23424 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
23425 // if we do not know if the main segment loader contains video yet or if we
23426 // definitively know the main segment loader contains video, then we need to wait
23427 // for both main and audio segment loaders to call endOfStream
23428 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
23429 } else {
23430 // otherwise just rely on the audio loader
23431 isEndOfStream = this.audioSegmentLoader_.ended_;
23432 }
23433 }
23434
23435 if (!isEndOfStream) {
23436 return;
23437 }
23438
23439 this.stopABRTimer_();
23440 this.sourceUpdater_.endOfStream();
23441 }
23442 /**
23443 * Check if a playlist has stopped being updated
23444 *
23445 * @param {Object} playlist the media playlist object
23446 * @return {boolean} whether the playlist has stopped being updated or not
23447 */
23448 ;
23449
23450 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
23451 var seekable = this.seekable();
23452
23453 if (!seekable.length) {
23454 // playlist doesn't have enough information to determine whether we are stuck
23455 return false;
23456 }
23457
23458 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
23459
23460 if (expired === null) {
23461 return false;
23462 } // does not use the safe live end to calculate playlist end, since we
23463 // don't want to say we are stuck while there is still content
23464
23465
23466 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
23467 var currentTime = this.tech_.currentTime();
23468 var buffered = this.tech_.buffered();
23469
23470 if (!buffered.length) {
23471 // return true if the playhead reached the absolute end of the playlist
23472 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
23473 }
23474
23475 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
23476 // end of playlist
23477
23478 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
23479 }
23480 /**
23481 * Blacklists a playlist when an error occurs for a set amount of time
23482 * making it unavailable for selection by the rendition selection algorithm
23483 * and then forces a new playlist (rendition) selection.
23484 *
23485 * @param {Object=} error an optional error that may include the playlist
23486 * to blacklist
23487 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
23488 * playlist
23489 */
23490 ;
23491
23492 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
23493 if (error === void 0) {
23494 error = {};
23495 }
23496
23497 // If the `error` was generated by the playlist loader, it will contain
23498 // the playlist we were trying to load (but failed) and that should be
23499 // blacklisted instead of the currently selected playlist which is likely
23500 // out-of-date in this scenario
23501 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
23502 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
23503 // trying to load the master OR while we were disposing of the tech
23504
23505 if (!currentPlaylist) {
23506 this.error = error;
23507
23508 if (this.mediaSource.readyState !== 'open') {
23509 this.trigger('error');
23510 } else {
23511 this.sourceUpdater_.endOfStream('network');
23512 }
23513
23514 return;
23515 }
23516
23517 currentPlaylist.playlistErrors_++;
23518 var playlists = this.masterPlaylistLoader_.master.playlists;
23519 var enabledPlaylists = playlists.filter(isEnabled);
23520 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
23521 // forever
23522
23523 if (playlists.length === 1 && blacklistDuration !== Infinity) {
23524 videojs.log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
23525 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
23526
23527 return this.masterPlaylistLoader_.load(isFinalRendition);
23528 }
23529
23530 if (isFinalRendition) {
23531 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
23532 // it, instead of erring the player or retrying this playlist, clear out the current
23533 // blacklist. This allows other playlists to be attempted in case any have been
23534 // fixed.
23535 var reincluded = false;
23536 playlists.forEach(function (playlist) {
23537 // skip current playlist which is about to be blacklisted
23538 if (playlist === currentPlaylist) {
23539 return;
23540 }
23541
23542 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
23543
23544 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
23545 reincluded = true;
23546 delete playlist.excludeUntil;
23547 }
23548 });
23549
23550 if (reincluded) {
23551 videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
23552 // playlist. This is needed for users relying on the retryplaylist event to catch a
23553 // case where the player might be stuck and looping through "dead" playlists.
23554
23555 this.tech_.trigger('retryplaylist');
23556 }
23557 } // Blacklist this playlist
23558
23559
23560 var excludeUntil;
23561
23562 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
23563 excludeUntil = Infinity;
23564 } else {
23565 excludeUntil = Date.now() + blacklistDuration * 1000;
23566 }
23567
23568 currentPlaylist.excludeUntil = excludeUntil;
23569
23570 if (error.reason) {
23571 currentPlaylist.lastExcludeReason_ = error.reason;
23572 }
23573
23574 this.tech_.trigger('blacklistplaylist');
23575 this.tech_.trigger({
23576 type: 'usage',
23577 name: 'vhs-rendition-blacklisted'
23578 });
23579 this.tech_.trigger({
23580 type: 'usage',
23581 name: 'hls-rendition-blacklisted'
23582 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
23583 // Would be something like media().id !=== currentPlaylist.id and we would need something
23584 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
23585 // from loading a new playlist on any blacklist.
23586 // Select a new playlist
23587
23588 var nextPlaylist = this.selectPlaylist();
23589
23590 if (!nextPlaylist) {
23591 this.error = 'Playback cannot continue. No available working or supported playlists.';
23592 this.trigger('error');
23593 return;
23594 }
23595
23596 var logFn = error.internal ? this.logger_ : videojs.log.warn;
23597 var errorMessage = error.message ? ' ' + error.message : '';
23598 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
23599
23600 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
23601 this.delegateLoaders_('audio', ['abort', 'pause']);
23602 } // if subtitle group changed reset subtitle loaders
23603
23604
23605 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
23606 this.delegateLoaders_('subtitle', ['abort', 'pause']);
23607 }
23608
23609 this.delegateLoaders_('main', ['abort', 'pause']);
23610 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
23611 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
23612
23613 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
23614 }
23615 /**
23616 * Pause all segment/playlist loaders
23617 */
23618 ;
23619
23620 _proto.pauseLoading = function pauseLoading() {
23621 this.delegateLoaders_('all', ['abort', 'pause']);
23622 this.stopABRTimer_();
23623 }
23624 /**
23625 * Call a set of functions in order on playlist loaders, segment loaders,
23626 * or both types of loaders.
23627 *
23628 * @param {string} filter
23629 * Filter loaders that should call fnNames using a string. Can be:
23630 * * all - run on all loaders
23631 * * audio - run on all audio loaders
23632 * * subtitle - run on all subtitle loaders
23633 * * main - run on the main/master loaders
23634 *
23635 * @param {Array|string} fnNames
23636 * A string or array of function names to call.
23637 */
23638 ;
23639
23640 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
23641 var _this7 = this;
23642
23643 var loaders = [];
23644 var dontFilterPlaylist = filter === 'all';
23645
23646 if (dontFilterPlaylist || filter === 'main') {
23647 loaders.push(this.masterPlaylistLoader_);
23648 }
23649
23650 var mediaTypes = [];
23651
23652 if (dontFilterPlaylist || filter === 'audio') {
23653 mediaTypes.push('AUDIO');
23654 }
23655
23656 if (dontFilterPlaylist || filter === 'subtitle') {
23657 mediaTypes.push('CLOSED-CAPTIONS');
23658 mediaTypes.push('SUBTITLES');
23659 }
23660
23661 mediaTypes.forEach(function (mediaType) {
23662 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
23663
23664 if (loader) {
23665 loaders.push(loader);
23666 }
23667 });
23668 ['main', 'audio', 'subtitle'].forEach(function (name) {
23669 var loader = _this7[name + "SegmentLoader_"];
23670
23671 if (loader && (filter === name || filter === 'all')) {
23672 loaders.push(loader);
23673 }
23674 });
23675 loaders.forEach(function (loader) {
23676 return fnNames.forEach(function (fnName) {
23677 if (typeof loader[fnName] === 'function') {
23678 loader[fnName]();
23679 }
23680 });
23681 });
23682 }
23683 /**
23684 * set the current time on all segment loaders
23685 *
23686 * @param {TimeRange} currentTime the current time to set
23687 * @return {TimeRange} the current time
23688 */
23689 ;
23690
23691 _proto.setCurrentTime = function setCurrentTime(currentTime) {
23692 var buffered = findRange(this.tech_.buffered(), currentTime);
23693
23694 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
23695 // return immediately if the metadata is not ready yet
23696 return 0;
23697 } // it's clearly an edge-case but don't thrown an error if asked to
23698 // seek within an empty playlist
23699
23700
23701 if (!this.masterPlaylistLoader_.media().segments) {
23702 return 0;
23703 } // if the seek location is already buffered, continue buffering as usual
23704
23705
23706 if (buffered && buffered.length) {
23707 return currentTime;
23708 } // cancel outstanding requests so we begin buffering at the new
23709 // location
23710
23711
23712 this.mainSegmentLoader_.resetEverything();
23713 this.mainSegmentLoader_.abort();
23714
23715 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23716 this.audioSegmentLoader_.resetEverything();
23717 this.audioSegmentLoader_.abort();
23718 }
23719
23720 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23721 this.subtitleSegmentLoader_.resetEverything();
23722 this.subtitleSegmentLoader_.abort();
23723 } // start segment loader loading in case they are paused
23724
23725
23726 this.load();
23727 }
23728 /**
23729 * get the current duration
23730 *
23731 * @return {TimeRange} the duration
23732 */
23733 ;
23734
23735 _proto.duration = function duration() {
23736 if (!this.masterPlaylistLoader_) {
23737 return 0;
23738 }
23739
23740 var media = this.masterPlaylistLoader_.media();
23741
23742 if (!media) {
23743 // no playlists loaded yet, so can't determine a duration
23744 return 0;
23745 } // Don't rely on the media source for duration in the case of a live playlist since
23746 // setting the native MediaSource's duration to infinity ends up with consequences to
23747 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23748 //
23749 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23750 // however, few browsers have support for setLiveSeekableRange()
23751 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23752 //
23753 // Until a time when the duration of the media source can be set to infinity, and a
23754 // seekable range specified across browsers, just return Infinity.
23755
23756
23757 if (!media.endList) {
23758 return Infinity;
23759 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
23760 // available). If it's not available, fall back to a playlist-calculated estimate.
23761
23762
23763 if (this.mediaSource) {
23764 return this.mediaSource.duration;
23765 }
23766
23767 return Vhs$1.Playlist.duration(media);
23768 }
23769 /**
23770 * check the seekable range
23771 *
23772 * @return {TimeRange} the seekable range
23773 */
23774 ;
23775
23776 _proto.seekable = function seekable() {
23777 return this.seekable_;
23778 };
23779
23780 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
23781 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
23782 //
23783 // A fix was made to this function where a check for
23784 // this.sourceUpdater_.hasCreatedSourceBuffers
23785 // was added to ensure that both source buffers were created before seekable was
23786 // updated. However, it originally had a bug where it was checking for a true and
23787 // returning early instead of checking for false. Setting it to check for false to
23788 // return early though created other issues. A call to play() would check for seekable
23789 // end without verifying that a seekable range was present. In addition, even checking
23790 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
23791 // due to a media update calling load on the segment loaders, skipping a seek to live,
23792 // thereby starting live streams at the beginning of the stream rather than at the end.
23793 //
23794 // This conditional should be fixed to wait for the creation of two source buffers at
23795 // the same time as the other sections of code are fixed to properly seek to live and
23796 // not throw an error due to checking for a seekable end when no seekable range exists.
23797 //
23798 // For now, fall back to the older behavior, with the understanding that the seekable
23799 // range may not be completely correct, leading to a suboptimal initial live point.
23800
23801 if (!this.masterPlaylistLoader_) {
23802 return;
23803 }
23804
23805 var media = this.masterPlaylistLoader_.media();
23806
23807 if (!media) {
23808 return;
23809 }
23810
23811 var expired = this.syncController_.getExpiredTime(media, this.duration());
23812
23813 if (expired === null) {
23814 // not enough information to update seekable
23815 return;
23816 }
23817
23818 var master = this.masterPlaylistLoader_.master;
23819 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23820
23821 if (mainSeekable.length === 0) {
23822 return;
23823 }
23824
23825 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23826 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
23827 expired = this.syncController_.getExpiredTime(media, this.duration());
23828
23829 if (expired === null) {
23830 return;
23831 }
23832
23833 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23834
23835 if (audioSeekable.length === 0) {
23836 return;
23837 }
23838 }
23839
23840 var oldEnd;
23841 var oldStart;
23842
23843 if (this.seekable_ && this.seekable_.length) {
23844 oldEnd = this.seekable_.end(0);
23845 oldStart = this.seekable_.start(0);
23846 }
23847
23848 if (!audioSeekable) {
23849 // seekable has been calculated based on buffering video data so it
23850 // can be returned directly
23851 this.seekable_ = mainSeekable;
23852 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
23853 // seekables are pretty far off, rely on main
23854 this.seekable_ = mainSeekable;
23855 } else {
23856 this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
23857 } // seekable is the same as last time
23858
23859
23860 if (this.seekable_ && this.seekable_.length) {
23861 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
23862 return;
23863 }
23864 }
23865
23866 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
23867 this.tech_.trigger('seekablechanged');
23868 }
23869 /**
23870 * Update the player duration
23871 */
23872 ;
23873
23874 _proto.updateDuration = function updateDuration(isLive) {
23875 if (this.updateDuration_) {
23876 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23877 this.updateDuration_ = null;
23878 }
23879
23880 if (this.mediaSource.readyState !== 'open') {
23881 this.updateDuration_ = this.updateDuration.bind(this, isLive);
23882 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
23883 return;
23884 }
23885
23886 if (isLive) {
23887 var seekable = this.seekable();
23888
23889 if (!seekable.length) {
23890 return;
23891 } // Even in the case of a live playlist, the native MediaSource's duration should not
23892 // be set to Infinity (even though this would be expected for a live playlist), since
23893 // setting the native MediaSource's duration to infinity ends up with consequences to
23894 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23895 //
23896 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23897 // however, few browsers have support for setLiveSeekableRange()
23898 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23899 //
23900 // Until a time when the duration of the media source can be set to infinity, and a
23901 // seekable range specified across browsers, the duration should be greater than or
23902 // equal to the last possible seekable value.
23903 // MediaSource duration starts as NaN
23904 // It is possible (and probable) that this case will never be reached for many
23905 // sources, since the MediaSource reports duration as the highest value without
23906 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
23907 // we buffered times 0 to 100 with real times of 100 to 200, even though current
23908 // time will be between 0 and 100, the native media source may report the duration
23909 // as 200. However, since we report duration separate from the media source (as
23910 // Infinity), and as long as the native media source duration value is greater than
23911 // our reported seekable range, seeks will work as expected. The large number as
23912 // duration for live is actually a strategy used by some players to work around the
23913 // issue of live seekable ranges cited above.
23914
23915
23916 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
23917 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
23918 }
23919
23920 return;
23921 }
23922
23923 var buffered = this.tech_.buffered();
23924 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
23925
23926 if (buffered.length > 0) {
23927 duration = Math.max(duration, buffered.end(buffered.length - 1));
23928 }
23929
23930 if (this.mediaSource.duration !== duration) {
23931 this.sourceUpdater_.setDuration(duration);
23932 }
23933 }
23934 /**
23935 * dispose of the MasterPlaylistController and everything
23936 * that it controls
23937 */
23938 ;
23939
23940 _proto.dispose = function dispose() {
23941 var _this8 = this;
23942
23943 this.trigger('dispose');
23944 this.decrypter_.terminate();
23945 this.masterPlaylistLoader_.dispose();
23946 this.mainSegmentLoader_.dispose();
23947
23948 if (this.loadOnPlay_) {
23949 this.tech_.off('play', this.loadOnPlay_);
23950 }
23951
23952 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
23953 var groups = _this8.mediaTypes_[type].groups;
23954
23955 for (var id in groups) {
23956 groups[id].forEach(function (group) {
23957 if (group.playlistLoader) {
23958 group.playlistLoader.dispose();
23959 }
23960 });
23961 }
23962 });
23963 this.audioSegmentLoader_.dispose();
23964 this.subtitleSegmentLoader_.dispose();
23965 this.sourceUpdater_.dispose();
23966 this.timelineChangeController_.dispose();
23967 this.stopABRTimer_();
23968
23969 if (this.updateDuration_) {
23970 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23971 }
23972
23973 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
23974
23975 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
23976 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
23977 this.off();
23978 }
23979 /**
23980 * return the master playlist object if we have one
23981 *
23982 * @return {Object} the master playlist object that we parsed
23983 */
23984 ;
23985
23986 _proto.master = function master() {
23987 return this.masterPlaylistLoader_.master;
23988 }
23989 /**
23990 * return the currently selected playlist
23991 *
23992 * @return {Object} the currently selected playlist object that we parsed
23993 */
23994 ;
23995
23996 _proto.media = function media() {
23997 // playlist loader will not return media if it has not been fully loaded
23998 return this.masterPlaylistLoader_.media() || this.initialMedia_;
23999 };
24000
24001 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
24002 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
24003 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
24004 // otherwise check on the segment loader.
24005
24006 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
24007
24008 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
24009 return false;
24010 }
24011
24012 return true;
24013 };
24014
24015 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
24016 var _this9 = this;
24017
24018 var media = {
24019 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
24020 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
24021 }; // set "main" media equal to video
24022
24023 media.video = media.main;
24024 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
24025 var codecs = {};
24026 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
24027
24028 if (media.main.hasVideo) {
24029 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
24030 }
24031
24032 if (media.main.isMuxed) {
24033 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
24034 }
24035
24036 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
24037 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
24038
24039 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
24040 } // no codecs, no playback.
24041
24042
24043 if (!codecs.audio && !codecs.video) {
24044 this.blacklistCurrentPlaylist({
24045 playlist: this.media(),
24046 message: 'Could not determine codecs for playlist.',
24047 blacklistDuration: Infinity
24048 });
24049 return;
24050 } // fmp4 relies on browser support, while ts relies on muxer support
24051
24052
24053 var supportFunction = function supportFunction(isFmp4, codec) {
24054 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
24055 };
24056
24057 var unsupportedCodecs = {};
24058 var unsupportedAudio;
24059 ['video', 'audio'].forEach(function (type) {
24060 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
24061 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
24062 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
24063 unsupportedCodecs[supporter].push(codecs[type]);
24064
24065 if (type === 'audio') {
24066 unsupportedAudio = supporter;
24067 }
24068 }
24069 });
24070
24071 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
24072 var audioGroup = this.media().attributes.AUDIO;
24073 this.master().playlists.forEach(function (variant) {
24074 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
24075
24076 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
24077 variant.excludeUntil = Infinity;
24078 }
24079 });
24080 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
24081 } // if we have any unsupported codecs blacklist this playlist.
24082
24083
24084 if (Object.keys(unsupportedCodecs).length) {
24085 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
24086 if (acc) {
24087 acc += ', ';
24088 }
24089
24090 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
24091 return acc;
24092 }, '') + '.';
24093 this.blacklistCurrentPlaylist({
24094 playlist: this.media(),
24095 internal: true,
24096 message: message,
24097 blacklistDuration: Infinity
24098 });
24099 return;
24100 } // check if codec switching is happening
24101
24102
24103 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
24104 var switchMessages = [];
24105 ['video', 'audio'].forEach(function (type) {
24106 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
24107 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
24108
24109 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
24110 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
24111 }
24112 });
24113
24114 if (switchMessages.length) {
24115 this.blacklistCurrentPlaylist({
24116 playlist: this.media(),
24117 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
24118 blacklistDuration: Infinity,
24119 internal: true
24120 });
24121 return;
24122 }
24123 } // TODO: when using the muxer shouldn't we just return
24124 // the codecs that the muxer outputs?
24125
24126
24127 return codecs;
24128 }
24129 /**
24130 * Create source buffers and exlude any incompatible renditions.
24131 *
24132 * @private
24133 */
24134 ;
24135
24136 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
24137 // media source is not ready yet or sourceBuffers are already
24138 // created.
24139 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
24140 return;
24141 }
24142
24143 if (!this.areMediaTypesKnown_()) {
24144 return;
24145 }
24146
24147 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
24148
24149 if (!codecs) {
24150 return;
24151 }
24152
24153 this.sourceUpdater_.createSourceBuffers(codecs);
24154 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
24155 this.excludeIncompatibleVariants_(codecString);
24156 }
24157 /**
24158 * Excludes playlists with codecs that are unsupported by the muxer and browser.
24159 */
24160 ;
24161
24162 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
24163 var _this10 = this;
24164
24165 var playlists = this.master().playlists;
24166 var ids = []; // TODO: why don't we have a property to loop through all
24167 // playlist? Why did we ever mix indexes and keys?
24168
24169 Object.keys(playlists).forEach(function (key) {
24170 var variant = playlists[key]; // check if we already processed this playlist.
24171
24172 if (ids.indexOf(variant.id) !== -1) {
24173 return;
24174 }
24175
24176 ids.push(variant.id);
24177 var codecs = codecsForPlaylist(_this10.master, variant);
24178 var unsupported = [];
24179
24180 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
24181 unsupported.push("audio codec " + codecs.audio);
24182 }
24183
24184 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
24185 unsupported.push("video codec " + codecs.video);
24186 }
24187
24188 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
24189 unsupported.push("text codec " + codecs.text);
24190 }
24191
24192 if (unsupported.length) {
24193 variant.excludeUntil = Infinity;
24194
24195 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
24196 }
24197 });
24198 }
24199 /**
24200 * Blacklist playlists that are known to be codec or
24201 * stream-incompatible with the SourceBuffer configuration. For
24202 * instance, Media Source Extensions would cause the video element to
24203 * stall waiting for video data if you switched from a variant with
24204 * video and audio to an audio-only one.
24205 *
24206 * @param {Object} media a media playlist compatible with the current
24207 * set of SourceBuffers. Variants in the current master playlist that
24208 * do not appear to have compatible codec or stream configurations
24209 * will be excluded from the default playlist selection algorithm
24210 * indefinitely.
24211 * @private
24212 */
24213 ;
24214
24215 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
24216 var _this11 = this;
24217
24218 var ids = [];
24219 var playlists = this.master().playlists;
24220 var codecs = unwrapCodecList(parseCodecs(codecString));
24221 var codecCount_ = codecCount(codecs);
24222 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
24223 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
24224 Object.keys(playlists).forEach(function (key) {
24225 var variant = playlists[key]; // check if we already processed this playlist.
24226 // or it if it is already excluded forever.
24227
24228 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
24229 return;
24230 }
24231
24232 ids.push(variant.id);
24233 var blacklistReasons = []; // get codecs from the playlist for this variant
24234
24235 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
24236 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
24237 // variant is incompatible. Wait for mux.js to probe
24238
24239 if (!variantCodecs.audio && !variantCodecs.video) {
24240 return;
24241 } // TODO: we can support this by removing the
24242 // old media source and creating a new one, but it will take some work.
24243 // The number of streams cannot change
24244
24245
24246 if (variantCodecCount !== codecCount_) {
24247 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
24248 } // only exclude playlists by codec change, if codecs cannot switch
24249 // during playback.
24250
24251
24252 if (!_this11.sourceUpdater_.canChangeType()) {
24253 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
24254 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
24255
24256 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
24257 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
24258 } // the audio codec cannot change
24259
24260
24261 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
24262 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
24263 }
24264 }
24265
24266 if (blacklistReasons.length) {
24267 variant.excludeUntil = Infinity;
24268
24269 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
24270 }
24271 });
24272 };
24273
24274 _proto.updateAdCues_ = function updateAdCues_(media) {
24275 var offset = 0;
24276 var seekable = this.seekable();
24277
24278 if (seekable.length) {
24279 offset = seekable.start(0);
24280 }
24281
24282 updateAdCues(media, this.cueTagsTrack_, offset);
24283 }
24284 /**
24285 * Calculates the desired forward buffer length based on current time
24286 *
24287 * @return {number} Desired forward buffer length in seconds
24288 */
24289 ;
24290
24291 _proto.goalBufferLength = function goalBufferLength() {
24292 var currentTime = this.tech_.currentTime();
24293 var initial = Config.GOAL_BUFFER_LENGTH;
24294 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
24295 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
24296 return Math.min(initial + currentTime * rate, max);
24297 }
24298 /**
24299 * Calculates the desired buffer low water line based on current time
24300 *
24301 * @return {number} Desired buffer low water line in seconds
24302 */
24303 ;
24304
24305 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
24306 var currentTime = this.tech_.currentTime();
24307 var initial = Config.BUFFER_LOW_WATER_LINE;
24308 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
24309 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
24310 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
24311 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
24312 };
24313
24314 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
24315 return Config.BUFFER_HIGH_WATER_LINE;
24316 };
24317
24318 return MasterPlaylistController;
24319}(videojs.EventTarget);
24320
24321/**
24322 * Returns a function that acts as the Enable/disable playlist function.
24323 *
24324 * @param {PlaylistLoader} loader - The master playlist loader
24325 * @param {string} playlistID - id of the playlist
24326 * @param {Function} changePlaylistFn - A function to be called after a
24327 * playlist's enabled-state has been changed. Will NOT be called if a
24328 * playlist's enabled-state is unchanged
24329 * @param {boolean=} enable - Value to set the playlist enabled-state to
24330 * or if undefined returns the current enabled-state for the playlist
24331 * @return {Function} Function for setting/getting enabled
24332 */
24333
24334var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
24335 return function (enable) {
24336 var playlist = loader.master.playlists[playlistID];
24337 var incompatible = isIncompatible(playlist);
24338 var currentlyEnabled = isEnabled(playlist);
24339
24340 if (typeof enable === 'undefined') {
24341 return currentlyEnabled;
24342 }
24343
24344 if (enable) {
24345 delete playlist.disabled;
24346 } else {
24347 playlist.disabled = true;
24348 }
24349
24350 if (enable !== currentlyEnabled && !incompatible) {
24351 // Ensure the outside world knows about our changes
24352 changePlaylistFn();
24353
24354 if (enable) {
24355 loader.trigger('renditionenabled');
24356 } else {
24357 loader.trigger('renditiondisabled');
24358 }
24359 }
24360
24361 return enable;
24362 };
24363};
24364/**
24365 * The representation object encapsulates the publicly visible information
24366 * in a media playlist along with a setter/getter-type function (enabled)
24367 * for changing the enabled-state of a particular playlist entry
24368 *
24369 * @class Representation
24370 */
24371
24372
24373var Representation = function Representation(vhsHandler, playlist, id) {
24374 var mpc = vhsHandler.masterPlaylistController_,
24375 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
24376
24377 var changeType = smoothQualityChange ? 'smooth' : 'fast';
24378 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
24379
24380 if (playlist.attributes) {
24381 var resolution = playlist.attributes.RESOLUTION;
24382 this.width = resolution && resolution.width;
24383 this.height = resolution && resolution.height;
24384 this.bandwidth = playlist.attributes.BANDWIDTH;
24385 this.frameRate = playlist.attributes['FRAME-RATE'];
24386 }
24387
24388 this.codecs = codecsForPlaylist(mpc.master(), playlist);
24389 this.playlist = playlist; // The id is simply the ordinality of the media playlist
24390 // within the master playlist
24391
24392 this.id = id; // Partially-apply the enableFunction to create a playlist-
24393 // specific variant
24394
24395 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
24396};
24397/**
24398 * A mixin function that adds the `representations` api to an instance
24399 * of the VhsHandler class
24400 *
24401 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
24402 * representation API into
24403 */
24404
24405
24406var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
24407 // Add a single API-specific function to the VhsHandler instance
24408 vhsHandler.representations = function () {
24409 var master = vhsHandler.masterPlaylistController_.master();
24410 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
24411
24412 if (!playlists) {
24413 return [];
24414 }
24415
24416 return playlists.filter(function (media) {
24417 return !isIncompatible(media);
24418 }).map(function (e, i) {
24419 return new Representation(vhsHandler, e, e.id);
24420 });
24421 };
24422};
24423
24424/**
24425 * @file playback-watcher.js
24426 *
24427 * Playback starts, and now my watch begins. It shall not end until my death. I shall
24428 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
24429 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
24430 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
24431 * my life and honor to the Playback Watch, for this Player and all the Players to come.
24432 */
24433
24434var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
24435/**
24436 * @class PlaybackWatcher
24437 */
24438
24439var PlaybackWatcher = /*#__PURE__*/function () {
24440 /**
24441 * Represents an PlaybackWatcher object.
24442 *
24443 * @class
24444 * @param {Object} options an object that includes the tech and settings
24445 */
24446 function PlaybackWatcher(options) {
24447 var _this = this;
24448
24449 this.masterPlaylistController_ = options.masterPlaylistController;
24450 this.tech_ = options.tech;
24451 this.seekable = options.seekable;
24452 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
24453 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
24454 this.media = options.media;
24455 this.consecutiveUpdates = 0;
24456 this.lastRecordedTime = null;
24457 this.timer_ = null;
24458 this.checkCurrentTimeTimeout_ = null;
24459 this.logger_ = logger('PlaybackWatcher');
24460 this.logger_('initialize');
24461
24462 var playHandler = function playHandler() {
24463 return _this.monitorCurrentTime_();
24464 };
24465
24466 var canPlayHandler = function canPlayHandler() {
24467 return _this.monitorCurrentTime_();
24468 };
24469
24470 var waitingHandler = function waitingHandler() {
24471 return _this.techWaiting_();
24472 };
24473
24474 var cancelTimerHandler = function cancelTimerHandler() {
24475 return _this.cancelTimer_();
24476 };
24477
24478 var mpc = this.masterPlaylistController_;
24479 var loaderTypes = ['main', 'subtitle', 'audio'];
24480 var loaderChecks = {};
24481 loaderTypes.forEach(function (type) {
24482 loaderChecks[type] = {
24483 reset: function reset() {
24484 return _this.resetSegmentDownloads_(type);
24485 },
24486 updateend: function updateend() {
24487 return _this.checkSegmentDownloads_(type);
24488 }
24489 };
24490 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
24491 // isn't changing we want to reset. We cannot assume that the new rendition
24492 // will also be stalled, until after new appends.
24493
24494 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
24495 // This prevents one segment playlists (single vtt or single segment content)
24496 // from being detected as stalling. As the buffer will not change in those cases, since
24497 // the buffer is the entire video duration.
24498
24499 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
24500 });
24501 /**
24502 * We check if a seek was into a gap through the following steps:
24503 * 1. We get a seeking event and we do not get a seeked event. This means that
24504 * a seek was attempted but not completed.
24505 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
24506 * removed everything from our buffer and appended a segment, and should be ready
24507 * to check for gaps.
24508 */
24509
24510 var setSeekingHandlers = function setSeekingHandlers(fn) {
24511 ['main', 'audio'].forEach(function (type) {
24512 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
24513 });
24514 };
24515
24516 this.seekingAppendCheck_ = function () {
24517 if (_this.fixesBadSeeks_()) {
24518 _this.consecutiveUpdates = 0;
24519 _this.lastRecordedTime = _this.tech_.currentTime();
24520 setSeekingHandlers('off');
24521 }
24522 };
24523
24524 this.clearSeekingAppendCheck_ = function () {
24525 return setSeekingHandlers('off');
24526 };
24527
24528 this.watchForBadSeeking_ = function () {
24529 _this.clearSeekingAppendCheck_();
24530
24531 setSeekingHandlers('on');
24532 };
24533
24534 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
24535 this.tech_.on('seeking', this.watchForBadSeeking_);
24536 this.tech_.on('waiting', waitingHandler);
24537 this.tech_.on(timerCancelEvents, cancelTimerHandler);
24538 this.tech_.on('canplay', canPlayHandler);
24539 /*
24540 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
24541 is surfaced in one of two ways:
24542 1) The `waiting` event is fired before the player has buffered content, making it impossible
24543 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
24544 we can check if playback is stalled due to a gap, and skip the gap if necessary.
24545 2) A source with a gap at the beginning of the stream is loaded programatically while the player
24546 is in a playing state. To catch this case, it's important that our one-time play listener is setup
24547 even if the player is in a playing state
24548 */
24549
24550 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
24551
24552 this.dispose = function () {
24553 _this.clearSeekingAppendCheck_();
24554
24555 _this.logger_('dispose');
24556
24557 _this.tech_.off('waiting', waitingHandler);
24558
24559 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
24560
24561 _this.tech_.off('canplay', canPlayHandler);
24562
24563 _this.tech_.off('play', playHandler);
24564
24565 _this.tech_.off('seeking', _this.watchForBadSeeking_);
24566
24567 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
24568
24569 loaderTypes.forEach(function (type) {
24570 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
24571 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
24572
24573 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
24574 });
24575
24576 if (_this.checkCurrentTimeTimeout_) {
24577 window$1.clearTimeout(_this.checkCurrentTimeTimeout_);
24578 }
24579
24580 _this.cancelTimer_();
24581 };
24582 }
24583 /**
24584 * Periodically check current time to see if playback stopped
24585 *
24586 * @private
24587 */
24588
24589
24590 var _proto = PlaybackWatcher.prototype;
24591
24592 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
24593 this.checkCurrentTime_();
24594
24595 if (this.checkCurrentTimeTimeout_) {
24596 window$1.clearTimeout(this.checkCurrentTimeTimeout_);
24597 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
24598
24599
24600 this.checkCurrentTimeTimeout_ = window$1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
24601 }
24602 /**
24603 * Reset stalled download stats for a specific type of loader
24604 *
24605 * @param {string} type
24606 * The segment loader type to check.
24607 *
24608 * @listens SegmentLoader#playlistupdate
24609 * @listens Tech#seeking
24610 * @listens Tech#seeked
24611 */
24612 ;
24613
24614 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
24615 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
24616
24617 if (this[type + "StalledDownloads_"] > 0) {
24618 this.logger_("resetting possible stalled download count for " + type + " loader");
24619 }
24620
24621 this[type + "StalledDownloads_"] = 0;
24622 this[type + "Buffered_"] = loader.buffered_();
24623 }
24624 /**
24625 * Checks on every segment `appendsdone` to see
24626 * if segment appends are making progress. If they are not
24627 * and we are still downloading bytes. We blacklist the playlist.
24628 *
24629 * @param {string} type
24630 * The segment loader type to check.
24631 *
24632 * @listens SegmentLoader#appendsdone
24633 */
24634 ;
24635
24636 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
24637 var mpc = this.masterPlaylistController_;
24638 var loader = mpc[type + "SegmentLoader_"];
24639 var buffered = loader.buffered_();
24640 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
24641 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
24642 // the buffered value for this loader changed
24643 // appends are working
24644
24645 if (isBufferedDifferent) {
24646 this.resetSegmentDownloads_(type);
24647 return;
24648 }
24649
24650 this[type + "StalledDownloads_"]++;
24651 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
24652 playlistId: loader.playlist_ && loader.playlist_.id,
24653 buffered: timeRangesToArray(buffered)
24654 }); // after 10 possibly stalled appends with no reset, exclude
24655
24656 if (this[type + "StalledDownloads_"] < 10) {
24657 return;
24658 }
24659
24660 this.logger_(type + " loader stalled download exclusion");
24661 this.resetSegmentDownloads_(type);
24662 this.tech_.trigger({
24663 type: 'usage',
24664 name: "vhs-" + type + "-download-exclusion"
24665 });
24666
24667 if (type === 'subtitle') {
24668 return;
24669 } // TODO: should we exclude audio tracks rather than main tracks
24670 // when type is audio?
24671
24672
24673 mpc.blacklistCurrentPlaylist({
24674 message: "Excessive " + type + " segment downloading detected."
24675 }, Infinity);
24676 }
24677 /**
24678 * The purpose of this function is to emulate the "waiting" event on
24679 * browsers that do not emit it when they are waiting for more
24680 * data to continue playback
24681 *
24682 * @private
24683 */
24684 ;
24685
24686 _proto.checkCurrentTime_ = function checkCurrentTime_() {
24687 if (this.tech_.paused() || this.tech_.seeking()) {
24688 return;
24689 }
24690
24691 var currentTime = this.tech_.currentTime();
24692 var buffered = this.tech_.buffered();
24693
24694 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
24695 // If current time is at the end of the final buffered region, then any playback
24696 // stall is most likely caused by buffering in a low bandwidth environment. The tech
24697 // should fire a `waiting` event in this scenario, but due to browser and tech
24698 // inconsistencies. Calling `techWaiting_` here allows us to simulate
24699 // responding to a native `waiting` event when the tech fails to emit one.
24700 return this.techWaiting_();
24701 }
24702
24703 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
24704 this.consecutiveUpdates++;
24705 this.waiting_();
24706 } else if (currentTime === this.lastRecordedTime) {
24707 this.consecutiveUpdates++;
24708 } else {
24709 this.consecutiveUpdates = 0;
24710 this.lastRecordedTime = currentTime;
24711 }
24712 }
24713 /**
24714 * Cancels any pending timers and resets the 'timeupdate' mechanism
24715 * designed to detect that we are stalled
24716 *
24717 * @private
24718 */
24719 ;
24720
24721 _proto.cancelTimer_ = function cancelTimer_() {
24722 this.consecutiveUpdates = 0;
24723
24724 if (this.timer_) {
24725 this.logger_('cancelTimer_');
24726 clearTimeout(this.timer_);
24727 }
24728
24729 this.timer_ = null;
24730 }
24731 /**
24732 * Fixes situations where there's a bad seek
24733 *
24734 * @return {boolean} whether an action was taken to fix the seek
24735 * @private
24736 */
24737 ;
24738
24739 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
24740 var seeking = this.tech_.seeking();
24741
24742 if (!seeking) {
24743 return false;
24744 } // TODO: It's possible that these seekable checks should be moved out of this function
24745 // and into a function that runs on seekablechange. It's also possible that we only need
24746 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
24747 // seekable range.
24748
24749
24750 var seekable = this.seekable();
24751 var currentTime = this.tech_.currentTime();
24752 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
24753 var seekTo;
24754
24755 if (isAfterSeekableRange) {
24756 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
24757
24758 seekTo = seekableEnd;
24759 }
24760
24761 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24762 var seekableStart = seekable.start(0); // sync to the beginning of the live window
24763 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
24764
24765 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
24766 // happen in live with a 3 segment playlist), then don't use a time delta
24767 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
24768 }
24769
24770 if (typeof seekTo !== 'undefined') {
24771 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
24772 this.tech_.setCurrentTime(seekTo);
24773 return true;
24774 }
24775
24776 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
24777 var buffered = this.tech_.buffered();
24778 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
24779 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
24780 var media = this.media(); // verify that at least two segment durations or one part duration have been
24781 // appended before checking for a gap.
24782
24783 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
24784 // appended before checking for a gap.
24785
24786 var bufferedToCheck = [audioBuffered, videoBuffered];
24787
24788 for (var i = 0; i < bufferedToCheck.length; i++) {
24789 // skip null buffered
24790 if (!bufferedToCheck[i]) {
24791 continue;
24792 }
24793
24794 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
24795 // duration behind we haven't appended enough to call this a bad seek.
24796
24797 if (timeAhead < minAppendedDuration) {
24798 return false;
24799 }
24800 }
24801
24802 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
24803 // to seek over the gap
24804
24805 if (nextRange.length === 0) {
24806 return false;
24807 }
24808
24809 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
24810 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
24811 this.tech_.setCurrentTime(seekTo);
24812 return true;
24813 }
24814 /**
24815 * Handler for situations when we determine the player is waiting.
24816 *
24817 * @private
24818 */
24819 ;
24820
24821 _proto.waiting_ = function waiting_() {
24822 if (this.techWaiting_()) {
24823 return;
24824 } // All tech waiting checks failed. Use last resort correction
24825
24826
24827 var currentTime = this.tech_.currentTime();
24828 var buffered = this.tech_.buffered();
24829 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
24830 // region with no indication that anything is amiss (seen in Firefox). Seeking to
24831 // currentTime is usually enough to kickstart the player. This checks that the player
24832 // is currently within a buffered region before attempting a corrective seek.
24833 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
24834 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
24835 // make sure there is ~3 seconds of forward buffer before taking any corrective action
24836 // to avoid triggering an `unknownwaiting` event when the network is slow.
24837
24838 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
24839 this.cancelTimer_();
24840 this.tech_.setCurrentTime(currentTime);
24841 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
24842
24843 this.tech_.trigger({
24844 type: 'usage',
24845 name: 'vhs-unknown-waiting'
24846 });
24847 this.tech_.trigger({
24848 type: 'usage',
24849 name: 'hls-unknown-waiting'
24850 });
24851 return;
24852 }
24853 }
24854 /**
24855 * Handler for situations when the tech fires a `waiting` event
24856 *
24857 * @return {boolean}
24858 * True if an action (or none) was needed to correct the waiting. False if no
24859 * checks passed
24860 * @private
24861 */
24862 ;
24863
24864 _proto.techWaiting_ = function techWaiting_() {
24865 var seekable = this.seekable();
24866 var currentTime = this.tech_.currentTime();
24867
24868 if (this.tech_.seeking() || this.timer_ !== null) {
24869 // Tech is seeking or already waiting on another action, no action needed
24870 return true;
24871 }
24872
24873 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24874 var livePoint = seekable.end(seekable.length - 1);
24875 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
24876 this.cancelTimer_();
24877 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
24878
24879 this.tech_.trigger({
24880 type: 'usage',
24881 name: 'vhs-live-resync'
24882 });
24883 this.tech_.trigger({
24884 type: 'usage',
24885 name: 'hls-live-resync'
24886 });
24887 return true;
24888 }
24889
24890 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
24891 var buffered = this.tech_.buffered();
24892 var videoUnderflow = this.videoUnderflow_({
24893 audioBuffered: sourceUpdater.audioBuffered(),
24894 videoBuffered: sourceUpdater.videoBuffered(),
24895 currentTime: currentTime
24896 });
24897
24898 if (videoUnderflow) {
24899 // Even though the video underflowed and was stuck in a gap, the audio overplayed
24900 // the gap, leading currentTime into a buffered range. Seeking to currentTime
24901 // allows the video to catch up to the audio position without losing any audio
24902 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
24903 this.cancelTimer_();
24904 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
24905
24906 this.tech_.trigger({
24907 type: 'usage',
24908 name: 'vhs-video-underflow'
24909 });
24910 this.tech_.trigger({
24911 type: 'usage',
24912 name: 'hls-video-underflow'
24913 });
24914 return true;
24915 }
24916
24917 var nextRange = findNextRange(buffered, currentTime); // check for gap
24918
24919 if (nextRange.length > 0) {
24920 var difference = nextRange.start(0) - currentTime;
24921 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
24922 this.cancelTimer_();
24923 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
24924 return true;
24925 } // All checks failed. Returning false to indicate failure to correct waiting
24926
24927
24928 return false;
24929 };
24930
24931 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
24932 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
24933 allowSeeksWithinUnsafeLiveWindow = false;
24934 }
24935
24936 if (!seekable.length) {
24937 // we can't make a solid case if there's no seekable, default to false
24938 return false;
24939 }
24940
24941 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
24942 var isLive = !playlist.endList;
24943
24944 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
24945 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
24946 }
24947
24948 if (currentTime > allowedEnd) {
24949 return true;
24950 }
24951
24952 return false;
24953 };
24954
24955 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
24956 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
24957 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
24958 return true;
24959 }
24960
24961 return false;
24962 };
24963
24964 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
24965 var videoBuffered = _ref.videoBuffered,
24966 audioBuffered = _ref.audioBuffered,
24967 currentTime = _ref.currentTime;
24968
24969 // audio only content will not have video underflow :)
24970 if (!videoBuffered) {
24971 return;
24972 }
24973
24974 var gap; // find a gap in demuxed content.
24975
24976 if (videoBuffered.length && audioBuffered.length) {
24977 // in Chrome audio will continue to play for ~3s when we run out of video
24978 // so we have to check that the video buffer did have some buffer in the
24979 // past.
24980 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
24981 var videoRange = findRange(videoBuffered, currentTime);
24982 var audioRange = findRange(audioBuffered, currentTime);
24983
24984 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
24985 gap = {
24986 start: lastVideoRange.end(0),
24987 end: audioRange.end(0)
24988 };
24989 } // find a gap in muxed content.
24990
24991 } else {
24992 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
24993 // stuck in a gap due to video underflow.
24994
24995 if (!nextRange.length) {
24996 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
24997 }
24998 }
24999
25000 if (gap) {
25001 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
25002 return true;
25003 }
25004
25005 return false;
25006 }
25007 /**
25008 * Timer callback. If playback still has not proceeded, then we seek
25009 * to the start of the next buffered region.
25010 *
25011 * @private
25012 */
25013 ;
25014
25015 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
25016 var buffered = this.tech_.buffered();
25017 var currentTime = this.tech_.currentTime();
25018 var nextRange = findNextRange(buffered, currentTime);
25019 this.cancelTimer_();
25020
25021 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
25022 return;
25023 }
25024
25025 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
25026
25027 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
25028 this.tech_.trigger({
25029 type: 'usage',
25030 name: 'vhs-gap-skip'
25031 });
25032 this.tech_.trigger({
25033 type: 'usage',
25034 name: 'hls-gap-skip'
25035 });
25036 };
25037
25038 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
25039 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
25040 // playing for ~3 seconds after the video gap starts. This is done to account for
25041 // video buffer underflow/underrun (note that this is not done when there is audio
25042 // buffer underflow/underrun -- in that case the video will stop as soon as it
25043 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
25044 // video stalls). The player's time will reflect the playthrough of audio, so the
25045 // time will appear as if we are in a buffered region, even if we are stuck in a
25046 // "gap."
25047 //
25048 // Example:
25049 // video buffer: 0 => 10.1, 10.2 => 20
25050 // audio buffer: 0 => 20
25051 // overall buffer: 0 => 10.1, 10.2 => 20
25052 // current time: 13
25053 //
25054 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
25055 // however, the audio continued playing until it reached ~3 seconds past the gap
25056 // (13 seconds), at which point it stops as well. Since current time is past the
25057 // gap, findNextRange will return no ranges.
25058 //
25059 // To check for this issue, we see if there is a gap that starts somewhere within
25060 // a 3 second range (3 seconds +/- 1 second) back from our current time.
25061 var gaps = findGaps(buffered);
25062
25063 for (var i = 0; i < gaps.length; i++) {
25064 var start = gaps.start(i);
25065 var end = gaps.end(i); // gap is starts no more than 4 seconds back
25066
25067 if (currentTime - start < 4 && currentTime - start > 2) {
25068 return {
25069 start: start,
25070 end: end
25071 };
25072 }
25073 }
25074
25075 return null;
25076 };
25077
25078 return PlaybackWatcher;
25079}();
25080
25081var defaultOptions = {
25082 errorInterval: 30,
25083 getSource: function getSource(next) {
25084 var tech = this.tech({
25085 IWillNotUseThisInPlugins: true
25086 });
25087 var sourceObj = tech.currentSource_ || this.currentSource();
25088 return next(sourceObj);
25089 }
25090};
25091/**
25092 * Main entry point for the plugin
25093 *
25094 * @param {Player} player a reference to a videojs Player instance
25095 * @param {Object} [options] an object with plugin options
25096 * @private
25097 */
25098
25099var initPlugin = function initPlugin(player, options) {
25100 var lastCalled = 0;
25101 var seekTo = 0;
25102 var localOptions = videojs.mergeOptions(defaultOptions, options);
25103 player.ready(function () {
25104 player.trigger({
25105 type: 'usage',
25106 name: 'vhs-error-reload-initialized'
25107 });
25108 player.trigger({
25109 type: 'usage',
25110 name: 'hls-error-reload-initialized'
25111 });
25112 });
25113 /**
25114 * Player modifications to perform that must wait until `loadedmetadata`
25115 * has been triggered
25116 *
25117 * @private
25118 */
25119
25120 var loadedMetadataHandler = function loadedMetadataHandler() {
25121 if (seekTo) {
25122 player.currentTime(seekTo);
25123 }
25124 };
25125 /**
25126 * Set the source on the player element, play, and seek if necessary
25127 *
25128 * @param {Object} sourceObj An object specifying the source url and mime-type to play
25129 * @private
25130 */
25131
25132
25133 var setSource = function setSource(sourceObj) {
25134 if (sourceObj === null || sourceObj === undefined) {
25135 return;
25136 }
25137
25138 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
25139 player.one('loadedmetadata', loadedMetadataHandler);
25140 player.src(sourceObj);
25141 player.trigger({
25142 type: 'usage',
25143 name: 'vhs-error-reload'
25144 });
25145 player.trigger({
25146 type: 'usage',
25147 name: 'hls-error-reload'
25148 });
25149 player.play();
25150 };
25151 /**
25152 * Attempt to get a source from either the built-in getSource function
25153 * or a custom function provided via the options
25154 *
25155 * @private
25156 */
25157
25158
25159 var errorHandler = function errorHandler() {
25160 // Do not attempt to reload the source if a source-reload occurred before
25161 // 'errorInterval' time has elapsed since the last source-reload
25162 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
25163 player.trigger({
25164 type: 'usage',
25165 name: 'vhs-error-reload-canceled'
25166 });
25167 player.trigger({
25168 type: 'usage',
25169 name: 'hls-error-reload-canceled'
25170 });
25171 return;
25172 }
25173
25174 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
25175 videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
25176 return;
25177 }
25178
25179 lastCalled = Date.now();
25180 return localOptions.getSource.call(player, setSource);
25181 };
25182 /**
25183 * Unbind any event handlers that were bound by the plugin
25184 *
25185 * @private
25186 */
25187
25188
25189 var cleanupEvents = function cleanupEvents() {
25190 player.off('loadedmetadata', loadedMetadataHandler);
25191 player.off('error', errorHandler);
25192 player.off('dispose', cleanupEvents);
25193 };
25194 /**
25195 * Cleanup before re-initializing the plugin
25196 *
25197 * @param {Object} [newOptions] an object with plugin options
25198 * @private
25199 */
25200
25201
25202 var reinitPlugin = function reinitPlugin(newOptions) {
25203 cleanupEvents();
25204 initPlugin(player, newOptions);
25205 };
25206
25207 player.on('error', errorHandler);
25208 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
25209 // initializing the plugin
25210
25211 player.reloadSourceOnError = reinitPlugin;
25212};
25213/**
25214 * Reload the source when an error is detected as long as there
25215 * wasn't an error previously within the last 30 seconds
25216 *
25217 * @param {Object} [options] an object with plugin options
25218 */
25219
25220
25221var reloadSourceOnError = function reloadSourceOnError(options) {
25222 initPlugin(this, options);
25223};
25224
25225var version$4 = "2.16.0";
25226
25227var version$3 = "6.0.1";
25228
25229var version$2 = "0.22.1";
25230
25231var version$1 = "4.8.0";
25232
25233var version = "3.1.3";
25234
25235var Vhs = {
25236 PlaylistLoader: PlaylistLoader,
25237 Playlist: Playlist,
25238 utils: utils,
25239 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
25240 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
25241 lastBandwidthSelector: lastBandwidthSelector,
25242 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
25243 comparePlaylistBandwidth: comparePlaylistBandwidth,
25244 comparePlaylistResolution: comparePlaylistResolution,
25245 xhr: xhrFactory()
25246}; // Define getter/setters for config properties
25247
25248Object.keys(Config).forEach(function (prop) {
25249 Object.defineProperty(Vhs, prop, {
25250 get: function get() {
25251 videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
25252 return Config[prop];
25253 },
25254 set: function set(value) {
25255 videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
25256
25257 if (typeof value !== 'number' || value < 0) {
25258 videojs.log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
25259 return;
25260 }
25261
25262 Config[prop] = value;
25263 }
25264 });
25265});
25266var LOCAL_STORAGE_KEY = 'videojs-vhs';
25267/**
25268 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
25269 *
25270 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
25271 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
25272 * @function handleVhsMediaChange
25273 */
25274
25275var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
25276 var newPlaylist = playlistLoader.media();
25277 var selectedIndex = -1;
25278
25279 for (var i = 0; i < qualityLevels.length; i++) {
25280 if (qualityLevels[i].id === newPlaylist.id) {
25281 selectedIndex = i;
25282 break;
25283 }
25284 }
25285
25286 qualityLevels.selectedIndex_ = selectedIndex;
25287 qualityLevels.trigger({
25288 selectedIndex: selectedIndex,
25289 type: 'change'
25290 });
25291};
25292/**
25293 * Adds quality levels to list once playlist metadata is available
25294 *
25295 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
25296 * @param {Object} vhs Vhs object to listen to for media events.
25297 * @function handleVhsLoadedMetadata
25298 */
25299
25300
25301var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
25302 vhs.representations().forEach(function (rep) {
25303 qualityLevels.addQualityLevel(rep);
25304 });
25305 handleVhsMediaChange(qualityLevels, vhs.playlists);
25306}; // HLS is a source handler, not a tech. Make sure attempts to use it
25307// as one do not cause exceptions.
25308
25309
25310Vhs.canPlaySource = function () {
25311 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25312};
25313
25314var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
25315 if (!keySystemOptions) {
25316 return keySystemOptions;
25317 }
25318
25319 var codecs = {};
25320
25321 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
25322 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
25323 }
25324
25325 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
25326 codecs.audio = audioPlaylist.attributes.CODECS;
25327 }
25328
25329 var videoContentType = getMimeForCodec(codecs.video);
25330 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
25331
25332 var keySystemContentTypes = {};
25333
25334 for (var keySystem in keySystemOptions) {
25335 keySystemContentTypes[keySystem] = {};
25336
25337 if (audioContentType) {
25338 keySystemContentTypes[keySystem].audioContentType = audioContentType;
25339 }
25340
25341 if (videoContentType) {
25342 keySystemContentTypes[keySystem].videoContentType = videoContentType;
25343 } // Default to using the video playlist's PSSH even though they may be different, as
25344 // videojs-contrib-eme will only accept one in the options.
25345 //
25346 // This shouldn't be an issue for most cases as early intialization will handle all
25347 // unique PSSH values, and if they aren't, then encrypted events should have the
25348 // specific information needed for the unique license.
25349
25350
25351 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
25352 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
25353 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
25354 // so we need to prevent overwriting the URL entirely
25355
25356
25357 if (typeof keySystemOptions[keySystem] === 'string') {
25358 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
25359 }
25360 }
25361
25362 return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
25363};
25364/**
25365 * @typedef {Object} KeySystems
25366 *
25367 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
25368 * Note: not all options are listed here.
25369 *
25370 * @property {Uint8Array} [pssh]
25371 * Protection System Specific Header
25372 */
25373
25374/**
25375 * Goes through all the playlists and collects an array of KeySystems options objects
25376 * containing each playlist's keySystems and their pssh values, if available.
25377 *
25378 * @param {Object[]} playlists
25379 * The playlists to look through
25380 * @param {string[]} keySystems
25381 * The keySystems to collect pssh values for
25382 *
25383 * @return {KeySystems[]}
25384 * An array of KeySystems objects containing available key systems and their
25385 * pssh values
25386 */
25387
25388
25389var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
25390 return playlists.reduce(function (keySystemsArr, playlist) {
25391 if (!playlist.contentProtection) {
25392 return keySystemsArr;
25393 }
25394
25395 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
25396 var keySystemOptions = playlist.contentProtection[keySystem];
25397
25398 if (keySystemOptions && keySystemOptions.pssh) {
25399 keySystemsObj[keySystem] = {
25400 pssh: keySystemOptions.pssh
25401 };
25402 }
25403
25404 return keySystemsObj;
25405 }, {});
25406
25407 if (Object.keys(keySystemsOptions).length) {
25408 keySystemsArr.push(keySystemsOptions);
25409 }
25410
25411 return keySystemsArr;
25412 }, []);
25413};
25414/**
25415 * Returns a promise that waits for the
25416 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
25417 *
25418 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
25419 * browsers.
25420 *
25421 * As per the above ticket, this is particularly important for Chrome, where, if
25422 * unencrypted content is appended before encrypted content and the key session has not
25423 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
25424 * during playback.
25425 *
25426 * @param {Object} player
25427 * The player instance
25428 * @param {Object[]} sourceKeySystems
25429 * The key systems options from the player source
25430 * @param {Object} [audioMedia]
25431 * The active audio media playlist (optional)
25432 * @param {Object[]} mainPlaylists
25433 * The playlists found on the master playlist object
25434 *
25435 * @return {Object}
25436 * Promise that resolves when the key session has been created
25437 */
25438
25439
25440var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
25441 var player = _ref.player,
25442 sourceKeySystems = _ref.sourceKeySystems,
25443 audioMedia = _ref.audioMedia,
25444 mainPlaylists = _ref.mainPlaylists;
25445
25446 if (!player.eme.initializeMediaKeys) {
25447 return Promise.resolve();
25448 } // TODO should all audio PSSH values be initialized for DRM?
25449 //
25450 // All unique video rendition pssh values are initialized for DRM, but here only
25451 // the initial audio playlist license is initialized. In theory, an encrypted
25452 // event should be fired if the user switches to an alternative audio playlist
25453 // where a license is required, but this case hasn't yet been tested. In addition, there
25454 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
25455 // languages).
25456
25457
25458 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
25459 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
25460 var initializationFinishedPromises = [];
25461 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
25462 // only place where it should not be deduped is for ms-prefixed APIs, but the early
25463 // return for IE11 above, and the existence of modern EME APIs in addition to
25464 // ms-prefixed APIs on Edge should prevent this from being a concern.
25465 // initializeMediaKeys also won't use the webkit-prefixed APIs.
25466
25467 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
25468 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
25469 player.tech_.one('keysessioncreated', resolve);
25470 }));
25471 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
25472 player.eme.initializeMediaKeys({
25473 keySystems: keySystemsOptions
25474 }, function (err) {
25475 if (err) {
25476 reject(err);
25477 return;
25478 }
25479
25480 resolve();
25481 });
25482 }));
25483 }); // The reasons Promise.race is chosen over Promise.any:
25484 //
25485 // * Promise.any is only available in Safari 14+.
25486 // * None of these promises are expected to reject. If they do reject, it might be
25487 // better here for the race to surface the rejection, rather than mask it by using
25488 // Promise.any.
25489
25490 return Promise.race([// If a session was previously created, these will all finish resolving without
25491 // creating a new session, otherwise it will take until the end of all license
25492 // requests, which is why the key session check is used (to make setup much faster).
25493 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
25494 Promise.race(keySessionCreatedPromises)]);
25495};
25496/**
25497 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
25498 * there are keySystems on the source, sets up source options to prepare the source for
25499 * eme.
25500 *
25501 * @param {Object} player
25502 * The player instance
25503 * @param {Object[]} sourceKeySystems
25504 * The key systems options from the player source
25505 * @param {Object} media
25506 * The active media playlist
25507 * @param {Object} [audioMedia]
25508 * The active audio media playlist (optional)
25509 *
25510 * @return {boolean}
25511 * Whether or not options were configured and EME is available
25512 */
25513
25514var setupEmeOptions = function setupEmeOptions(_ref2) {
25515 var player = _ref2.player,
25516 sourceKeySystems = _ref2.sourceKeySystems,
25517 media = _ref2.media,
25518 audioMedia = _ref2.audioMedia;
25519 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
25520
25521 if (!sourceOptions) {
25522 return false;
25523 }
25524
25525 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
25526 // do nothing.
25527
25528 if (sourceOptions && !player.eme) {
25529 videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
25530 return false;
25531 }
25532
25533 return true;
25534};
25535
25536var getVhsLocalStorage = function getVhsLocalStorage() {
25537 if (!window$1.localStorage) {
25538 return null;
25539 }
25540
25541 var storedObject = window$1.localStorage.getItem(LOCAL_STORAGE_KEY);
25542
25543 if (!storedObject) {
25544 return null;
25545 }
25546
25547 try {
25548 return JSON.parse(storedObject);
25549 } catch (e) {
25550 // someone may have tampered with the value
25551 return null;
25552 }
25553};
25554
25555var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
25556 if (!window$1.localStorage) {
25557 return false;
25558 }
25559
25560 var objectToStore = getVhsLocalStorage();
25561 objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
25562
25563 try {
25564 window$1.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
25565 } catch (e) {
25566 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
25567 // storage is set to 0).
25568 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
25569 // No need to perform any operation.
25570 return false;
25571 }
25572
25573 return objectToStore;
25574};
25575/**
25576 * Parses VHS-supported media types from data URIs. See
25577 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
25578 * for information on data URIs.
25579 *
25580 * @param {string} dataUri
25581 * The data URI
25582 *
25583 * @return {string|Object}
25584 * The parsed object/string, or the original string if no supported media type
25585 * was found
25586 */
25587
25588
25589var expandDataUri = function expandDataUri(dataUri) {
25590 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
25591 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
25592 } // no known case for this data URI, return the string as-is
25593
25594
25595 return dataUri;
25596};
25597/**
25598 * Whether the browser has built-in HLS support.
25599 */
25600
25601
25602Vhs.supportsNativeHls = function () {
25603 if (!document || !document.createElement) {
25604 return false;
25605 }
25606
25607 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
25608
25609 if (!videojs.getTech('Html5').isSupported()) {
25610 return false;
25611 } // HLS manifests can go by many mime-types
25612
25613
25614 var canPlay = [// Apple santioned
25615 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
25616 'audio/mpegurl', // Very common
25617 'audio/x-mpegurl', // Very common
25618 'application/x-mpegurl', // Included for completeness
25619 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
25620 return canPlay.some(function (canItPlay) {
25621 return /maybe|probably/i.test(video.canPlayType(canItPlay));
25622 });
25623}();
25624
25625Vhs.supportsNativeDash = function () {
25626 if (!document || !document.createElement || !videojs.getTech('Html5').isSupported()) {
25627 return false;
25628 }
25629
25630 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
25631}();
25632
25633Vhs.supportsTypeNatively = function (type) {
25634 if (type === 'hls') {
25635 return Vhs.supportsNativeHls;
25636 }
25637
25638 if (type === 'dash') {
25639 return Vhs.supportsNativeDash;
25640 }
25641
25642 return false;
25643};
25644/**
25645 * HLS is a source handler, not a tech. Make sure attempts to use it
25646 * as one do not cause exceptions.
25647 */
25648
25649
25650Vhs.isSupported = function () {
25651 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25652};
25653
25654var Component = videojs.getComponent('Component');
25655/**
25656 * The Vhs Handler object, where we orchestrate all of the parts
25657 * of HLS to interact with video.js
25658 *
25659 * @class VhsHandler
25660 * @extends videojs.Component
25661 * @param {Object} source the soruce object
25662 * @param {Tech} tech the parent tech object
25663 * @param {Object} options optional and required options
25664 */
25665
25666var VhsHandler = /*#__PURE__*/function (_Component) {
25667 _inheritsLoose(VhsHandler, _Component);
25668
25669 function VhsHandler(source, tech, options) {
25670 var _this;
25671
25672 _this = _Component.call(this, tech, videojs.mergeOptions(options.hls, options.vhs)) || this;
25673
25674 if (options.hls && Object.keys(options.hls).length) {
25675 videojs.log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
25676 } // if a tech level `initialBandwidth` option was passed
25677 // use that over the VHS level `bandwidth` option
25678
25679
25680 if (typeof options.initialBandwidth === 'number') {
25681 _this.options_.bandwidth = options.initialBandwidth;
25682 }
25683
25684 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
25685 // backwards-compatibility
25686
25687 if (tech.options_ && tech.options_.playerId) {
25688 var _player = videojs(tech.options_.playerId);
25689
25690 if (!_player.hasOwnProperty('hls')) {
25691 Object.defineProperty(_player, 'hls', {
25692 get: function get() {
25693 videojs.log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
25694 tech.trigger({
25695 type: 'usage',
25696 name: 'hls-player-access'
25697 });
25698 return _assertThisInitialized(_this);
25699 },
25700 configurable: true
25701 });
25702 }
25703
25704 if (!_player.hasOwnProperty('vhs')) {
25705 Object.defineProperty(_player, 'vhs', {
25706 get: function get() {
25707 videojs.log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
25708 tech.trigger({
25709 type: 'usage',
25710 name: 'vhs-player-access'
25711 });
25712 return _assertThisInitialized(_this);
25713 },
25714 configurable: true
25715 });
25716 }
25717
25718 if (!_player.hasOwnProperty('dash')) {
25719 Object.defineProperty(_player, 'dash', {
25720 get: function get() {
25721 videojs.log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
25722 return _assertThisInitialized(_this);
25723 },
25724 configurable: true
25725 });
25726 }
25727
25728 _this.player_ = _player;
25729 }
25730
25731 _this.tech_ = tech;
25732 _this.source_ = source;
25733 _this.stats = {};
25734 _this.ignoreNextSeekingEvent_ = false;
25735
25736 _this.setOptions_();
25737
25738 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
25739 tech.overrideNativeAudioTracks(true);
25740 tech.overrideNativeVideoTracks(true);
25741 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
25742 // overriding native HLS only works if audio tracks have been emulated
25743 // error early if we're misconfigured
25744 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
25745 } // listen for fullscreenchange events for this player so that we
25746 // can adjust our quality selection quickly
25747
25748
25749 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
25750 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
25751
25752 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
25753 _this.masterPlaylistController_.fastQualityChange_();
25754 } else {
25755 // When leaving fullscreen, since the in page pixel dimensions should be smaller
25756 // than full screen, see if there should be a rendition switch down to preserve
25757 // bandwidth.
25758 _this.masterPlaylistController_.checkABR_();
25759 }
25760 });
25761
25762 _this.on(_this.tech_, 'seeking', function () {
25763 if (this.ignoreNextSeekingEvent_) {
25764 this.ignoreNextSeekingEvent_ = false;
25765 return;
25766 }
25767
25768 this.setCurrentTime(this.tech_.currentTime());
25769 });
25770
25771 _this.on(_this.tech_, 'error', function () {
25772 // verify that the error was real and we are loaded
25773 // enough to have mpc loaded.
25774 if (this.tech_.error() && this.masterPlaylistController_) {
25775 this.masterPlaylistController_.pauseLoading();
25776 }
25777 });
25778
25779 _this.on(_this.tech_, 'play', _this.play);
25780
25781 return _this;
25782 }
25783
25784 var _proto = VhsHandler.prototype;
25785
25786 _proto.setOptions_ = function setOptions_() {
25787 var _this2 = this;
25788
25789 // defaults
25790 this.options_.withCredentials = this.options_.withCredentials || false;
25791 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
25792 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
25793 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
25794 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
25795 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
25796 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
25797 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
25798 this.options_.customTagParsers = this.options_.customTagParsers || [];
25799 this.options_.customTagMappers = this.options_.customTagMappers || [];
25800 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
25801
25802 if (typeof this.options_.blacklistDuration !== 'number') {
25803 this.options_.blacklistDuration = 5 * 60;
25804 }
25805
25806 if (typeof this.options_.bandwidth !== 'number') {
25807 if (this.options_.useBandwidthFromLocalStorage) {
25808 var storedObject = getVhsLocalStorage();
25809
25810 if (storedObject && storedObject.bandwidth) {
25811 this.options_.bandwidth = storedObject.bandwidth;
25812 this.tech_.trigger({
25813 type: 'usage',
25814 name: 'vhs-bandwidth-from-local-storage'
25815 });
25816 this.tech_.trigger({
25817 type: 'usage',
25818 name: 'hls-bandwidth-from-local-storage'
25819 });
25820 }
25821
25822 if (storedObject && storedObject.throughput) {
25823 this.options_.throughput = storedObject.throughput;
25824 this.tech_.trigger({
25825 type: 'usage',
25826 name: 'vhs-throughput-from-local-storage'
25827 });
25828 this.tech_.trigger({
25829 type: 'usage',
25830 name: 'hls-throughput-from-local-storage'
25831 });
25832 }
25833 }
25834 } // if bandwidth was not set by options or pulled from local storage, start playlist
25835 // selection at a reasonable bandwidth
25836
25837
25838 if (typeof this.options_.bandwidth !== 'number') {
25839 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
25840 } // If the bandwidth number is unchanged from the initial setting
25841 // then this takes precedence over the enableLowInitialPlaylist option
25842
25843
25844 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
25845
25846 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
25847 if (typeof _this2.source_[option] !== 'undefined') {
25848 _this2.options_[option] = _this2.source_[option];
25849 }
25850 });
25851 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
25852 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
25853 }
25854 /**
25855 * called when player.src gets called, handle a new source
25856 *
25857 * @param {Object} src the source object to handle
25858 */
25859 ;
25860
25861 _proto.src = function src(_src, type) {
25862 var _this3 = this;
25863
25864 // do nothing if the src is falsey
25865 if (!_src) {
25866 return;
25867 }
25868
25869 this.setOptions_(); // add master playlist controller options
25870
25871 this.options_.src = expandDataUri(this.source_.src);
25872 this.options_.tech = this.tech_;
25873 this.options_.externVhs = Vhs;
25874 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
25875
25876 this.options_.seekTo = function (time) {
25877 _this3.tech_.setCurrentTime(time);
25878 };
25879
25880 if (this.options_.smoothQualityChange) {
25881 videojs.log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
25882 }
25883
25884 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
25885 var playbackWatcherOptions = videojs.mergeOptions({
25886 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
25887 }, this.options_, {
25888 seekable: function seekable() {
25889 return _this3.seekable();
25890 },
25891 media: function media() {
25892 return _this3.masterPlaylistController_.media();
25893 },
25894 masterPlaylistController: this.masterPlaylistController_
25895 });
25896 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
25897 this.masterPlaylistController_.on('error', function () {
25898 var player = videojs.players[_this3.tech_.options_.playerId];
25899 var error = _this3.masterPlaylistController_.error;
25900
25901 if (typeof error === 'object' && !error.code) {
25902 error.code = 3;
25903 } else if (typeof error === 'string') {
25904 error = {
25905 message: error,
25906 code: 3
25907 };
25908 }
25909
25910 player.error(error);
25911 });
25912 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
25913 // compatibility with < v2
25914
25915 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
25916 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
25917
25918 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
25919 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
25920 // controller. Using a custom property for backwards compatibility
25921 // with < v2
25922
25923 Object.defineProperties(this, {
25924 selectPlaylist: {
25925 get: function get() {
25926 return this.masterPlaylistController_.selectPlaylist;
25927 },
25928 set: function set(selectPlaylist) {
25929 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
25930 }
25931 },
25932 throughput: {
25933 get: function get() {
25934 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
25935 },
25936 set: function set(throughput) {
25937 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
25938 // for the cumulative average
25939
25940 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
25941 }
25942 },
25943 bandwidth: {
25944 get: function get() {
25945 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
25946 var networkInformation = window$1.navigator.connection || window$1.navigator.mozConnection || window$1.navigator.webkitConnection;
25947 var tenMbpsAsBitsPerSecond = 10e6;
25948
25949 if (this.options_.useNetworkInformationApi && networkInformation) {
25950 // downlink returns Mbps
25951 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
25952 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
25953 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
25954 // high quality streams are not filtered out.
25955
25956 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
25957 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
25958 } else {
25959 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
25960 }
25961 }
25962
25963 return playerBandwidthEst;
25964 },
25965 set: function set(bandwidth) {
25966 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
25967 // `count` is set to zero that current value of `rate` isn't included
25968 // in the cumulative average
25969
25970 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
25971 rate: 0,
25972 count: 0
25973 };
25974 }
25975 },
25976
25977 /**
25978 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
25979 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
25980 * the entire process after that - decryption, transmuxing, and appending - provided
25981 * by `throughput`.
25982 *
25983 * Since the two process are serial, the overall system bandwidth is given by:
25984 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
25985 */
25986 systemBandwidth: {
25987 get: function get() {
25988 var invBandwidth = 1 / (this.bandwidth || 1);
25989 var invThroughput;
25990
25991 if (this.throughput > 0) {
25992 invThroughput = 1 / this.throughput;
25993 } else {
25994 invThroughput = 0;
25995 }
25996
25997 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
25998 return systemBitrate;
25999 },
26000 set: function set() {
26001 videojs.log.error('The "systemBandwidth" property is read-only');
26002 }
26003 }
26004 });
26005
26006 if (this.options_.bandwidth) {
26007 this.bandwidth = this.options_.bandwidth;
26008 }
26009
26010 if (this.options_.throughput) {
26011 this.throughput = this.options_.throughput;
26012 }
26013
26014 Object.defineProperties(this.stats, {
26015 bandwidth: {
26016 get: function get() {
26017 return _this3.bandwidth || 0;
26018 },
26019 enumerable: true
26020 },
26021 mediaRequests: {
26022 get: function get() {
26023 return _this3.masterPlaylistController_.mediaRequests_() || 0;
26024 },
26025 enumerable: true
26026 },
26027 mediaRequestsAborted: {
26028 get: function get() {
26029 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
26030 },
26031 enumerable: true
26032 },
26033 mediaRequestsTimedout: {
26034 get: function get() {
26035 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
26036 },
26037 enumerable: true
26038 },
26039 mediaRequestsErrored: {
26040 get: function get() {
26041 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
26042 },
26043 enumerable: true
26044 },
26045 mediaTransferDuration: {
26046 get: function get() {
26047 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
26048 },
26049 enumerable: true
26050 },
26051 mediaBytesTransferred: {
26052 get: function get() {
26053 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
26054 },
26055 enumerable: true
26056 },
26057 mediaSecondsLoaded: {
26058 get: function get() {
26059 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
26060 },
26061 enumerable: true
26062 },
26063 mediaAppends: {
26064 get: function get() {
26065 return _this3.masterPlaylistController_.mediaAppends_() || 0;
26066 },
26067 enumerable: true
26068 },
26069 mainAppendsToLoadedData: {
26070 get: function get() {
26071 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
26072 },
26073 enumerable: true
26074 },
26075 audioAppendsToLoadedData: {
26076 get: function get() {
26077 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
26078 },
26079 enumerable: true
26080 },
26081 appendsToLoadedData: {
26082 get: function get() {
26083 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
26084 },
26085 enumerable: true
26086 },
26087 timeToLoadedData: {
26088 get: function get() {
26089 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
26090 },
26091 enumerable: true
26092 },
26093 buffered: {
26094 get: function get() {
26095 return timeRangesToArray(_this3.tech_.buffered());
26096 },
26097 enumerable: true
26098 },
26099 currentTime: {
26100 get: function get() {
26101 return _this3.tech_.currentTime();
26102 },
26103 enumerable: true
26104 },
26105 currentSource: {
26106 get: function get() {
26107 return _this3.tech_.currentSource_;
26108 },
26109 enumerable: true
26110 },
26111 currentTech: {
26112 get: function get() {
26113 return _this3.tech_.name_;
26114 },
26115 enumerable: true
26116 },
26117 duration: {
26118 get: function get() {
26119 return _this3.tech_.duration();
26120 },
26121 enumerable: true
26122 },
26123 master: {
26124 get: function get() {
26125 return _this3.playlists.master;
26126 },
26127 enumerable: true
26128 },
26129 playerDimensions: {
26130 get: function get() {
26131 return _this3.tech_.currentDimensions();
26132 },
26133 enumerable: true
26134 },
26135 seekable: {
26136 get: function get() {
26137 return timeRangesToArray(_this3.tech_.seekable());
26138 },
26139 enumerable: true
26140 },
26141 timestamp: {
26142 get: function get() {
26143 return Date.now();
26144 },
26145 enumerable: true
26146 },
26147 videoPlaybackQuality: {
26148 get: function get() {
26149 return _this3.tech_.getVideoPlaybackQuality();
26150 },
26151 enumerable: true
26152 }
26153 });
26154 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
26155 this.tech_.on('bandwidthupdate', function () {
26156 if (_this3.options_.useBandwidthFromLocalStorage) {
26157 updateVhsLocalStorage({
26158 bandwidth: _this3.bandwidth,
26159 throughput: Math.round(_this3.throughput)
26160 });
26161 }
26162 });
26163 this.masterPlaylistController_.on('selectedinitialmedia', function () {
26164 // Add the manual rendition mix-in to VhsHandler
26165 renditionSelectionMixin(_this3);
26166 });
26167 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
26168 _this3.setupEme_();
26169 }); // the bandwidth of the primary segment loader is our best
26170 // estimate of overall bandwidth
26171
26172 this.on(this.masterPlaylistController_, 'progress', function () {
26173 this.tech_.trigger('progress');
26174 }); // In the live case, we need to ignore the very first `seeking` event since
26175 // that will be the result of the seek-to-live behavior
26176
26177 this.on(this.masterPlaylistController_, 'firstplay', function () {
26178 this.ignoreNextSeekingEvent_ = true;
26179 });
26180 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
26181 // this can occur if someone sets the src in player.ready(), for instance
26182
26183 if (!this.tech_.el()) {
26184 return;
26185 }
26186
26187 this.mediaSourceUrl_ = window$1.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
26188 this.tech_.src(this.mediaSourceUrl_);
26189 };
26190
26191 _proto.createKeySessions_ = function createKeySessions_() {
26192 var _this4 = this;
26193
26194 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
26195 this.logger_('waiting for EME key session creation');
26196 waitForKeySessionCreation({
26197 player: this.player_,
26198 sourceKeySystems: this.source_.keySystems,
26199 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
26200 mainPlaylists: this.playlists.master.playlists
26201 }).then(function () {
26202 _this4.logger_('created EME key session');
26203
26204 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
26205 }).catch(function (err) {
26206 _this4.logger_('error while creating EME key session', err);
26207
26208 _this4.player_.error({
26209 message: 'Failed to initialize media keys for EME',
26210 code: 3
26211 });
26212 });
26213 };
26214
26215 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
26216 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
26217 // the key is in the manifest. While this should've happened on initial source load, it
26218 // may happen again in live streams where the keys change, and the manifest info
26219 // reflects the update.
26220 //
26221 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
26222 // already requested keys for, we don't have to worry about this generating extraneous
26223 // requests.
26224 this.logger_('waitingforkey fired, attempting to create any new key sessions');
26225 this.createKeySessions_();
26226 }
26227 /**
26228 * If necessary and EME is available, sets up EME options and waits for key session
26229 * creation.
26230 *
26231 * This function also updates the source updater so taht it can be used, as for some
26232 * browsers, EME must be configured before content is appended (if appending unencrypted
26233 * content before encrypted content).
26234 */
26235 ;
26236
26237 _proto.setupEme_ = function setupEme_() {
26238 var _this5 = this;
26239
26240 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
26241 var didSetupEmeOptions = setupEmeOptions({
26242 player: this.player_,
26243 sourceKeySystems: this.source_.keySystems,
26244 media: this.playlists.media(),
26245 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
26246 });
26247 this.player_.tech_.on('keystatuschange', function (e) {
26248 if (e.status !== 'output-restricted') {
26249 return;
26250 }
26251
26252 var masterPlaylist = _this5.masterPlaylistController_.master();
26253
26254 if (!masterPlaylist || !masterPlaylist.playlists) {
26255 return;
26256 }
26257
26258 var excludedHDPlaylists = []; // Assume all HD streams are unplayable and exclude them from ABR selection
26259
26260 masterPlaylist.playlists.forEach(function (playlist) {
26261 if (playlist && playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height >= 720) {
26262 if (!playlist.excludeUntil || playlist.excludeUntil < Infinity) {
26263 playlist.excludeUntil = Infinity;
26264 excludedHDPlaylists.push(playlist);
26265 }
26266 }
26267 });
26268
26269 if (excludedHDPlaylists.length) {
26270 var _videojs$log;
26271
26272 (_videojs$log = videojs.log).warn.apply(_videojs$log, ['DRM keystatus changed to "output-restricted." Removing the following HD playlists ' + 'that will most likely fail to play and clearing the buffer. ' + 'This may be due to HDCP restrictions on the stream and the capabilities of the current device.'].concat(excludedHDPlaylists)); // Clear the buffer before switching playlists, since it may already contain unplayable segments
26273
26274
26275 _this5.masterPlaylistController_.fastQualityChange_();
26276 }
26277 });
26278 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
26279 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
26280 // promises.
26281
26282 if (videojs.browser.IE_VERSION === 11 || !didSetupEmeOptions) {
26283 // If EME options were not set up, we've done all we could to initialize EME.
26284 this.masterPlaylistController_.sourceUpdater_.initializedEme();
26285 return;
26286 }
26287
26288 this.createKeySessions_();
26289 }
26290 /**
26291 * Initializes the quality levels and sets listeners to update them.
26292 *
26293 * @method setupQualityLevels_
26294 * @private
26295 */
26296 ;
26297
26298 _proto.setupQualityLevels_ = function setupQualityLevels_() {
26299 var _this6 = this;
26300
26301 var player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
26302 // or qualityLevels_ listeners have already been setup, do nothing.
26303
26304 if (!player || !player.qualityLevels || this.qualityLevels_) {
26305 return;
26306 }
26307
26308 this.qualityLevels_ = player.qualityLevels();
26309 this.masterPlaylistController_.on('selectedinitialmedia', function () {
26310 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
26311 });
26312 this.playlists.on('mediachange', function () {
26313 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
26314 });
26315 }
26316 /**
26317 * return the version
26318 */
26319 ;
26320
26321 VhsHandler.version = function version$5() {
26322 return {
26323 '@videojs/http-streaming': version$4,
26324 'mux.js': version$3,
26325 'mpd-parser': version$2,
26326 'm3u8-parser': version$1,
26327 'aes-decrypter': version
26328 };
26329 }
26330 /**
26331 * return the version
26332 */
26333 ;
26334
26335 _proto.version = function version() {
26336 return this.constructor.version();
26337 };
26338
26339 _proto.canChangeType = function canChangeType() {
26340 return SourceUpdater.canChangeType();
26341 }
26342 /**
26343 * Begin playing the video.
26344 */
26345 ;
26346
26347 _proto.play = function play() {
26348 this.masterPlaylistController_.play();
26349 }
26350 /**
26351 * a wrapper around the function in MasterPlaylistController
26352 */
26353 ;
26354
26355 _proto.setCurrentTime = function setCurrentTime(currentTime) {
26356 this.masterPlaylistController_.setCurrentTime(currentTime);
26357 }
26358 /**
26359 * a wrapper around the function in MasterPlaylistController
26360 */
26361 ;
26362
26363 _proto.duration = function duration() {
26364 return this.masterPlaylistController_.duration();
26365 }
26366 /**
26367 * a wrapper around the function in MasterPlaylistController
26368 */
26369 ;
26370
26371 _proto.seekable = function seekable() {
26372 return this.masterPlaylistController_.seekable();
26373 }
26374 /**
26375 * Abort all outstanding work and cleanup.
26376 */
26377 ;
26378
26379 _proto.dispose = function dispose() {
26380 if (this.playbackWatcher_) {
26381 this.playbackWatcher_.dispose();
26382 }
26383
26384 if (this.masterPlaylistController_) {
26385 this.masterPlaylistController_.dispose();
26386 }
26387
26388 if (this.qualityLevels_) {
26389 this.qualityLevels_.dispose();
26390 }
26391
26392 if (this.player_) {
26393 delete this.player_.vhs;
26394 delete this.player_.dash;
26395 delete this.player_.hls;
26396 }
26397
26398 if (this.tech_ && this.tech_.vhs) {
26399 delete this.tech_.vhs;
26400 } // don't check this.tech_.hls as it will log a deprecated warning
26401
26402
26403 if (this.tech_) {
26404 delete this.tech_.hls;
26405 }
26406
26407 if (this.mediaSourceUrl_ && window$1.URL.revokeObjectURL) {
26408 window$1.URL.revokeObjectURL(this.mediaSourceUrl_);
26409 this.mediaSourceUrl_ = null;
26410 }
26411
26412 if (this.tech_) {
26413 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
26414 }
26415
26416 _Component.prototype.dispose.call(this);
26417 };
26418
26419 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
26420 return getProgramTime({
26421 playlist: this.masterPlaylistController_.media(),
26422 time: time,
26423 callback: callback
26424 });
26425 } // the player must be playing before calling this
26426 ;
26427
26428 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
26429 if (pauseAfterSeek === void 0) {
26430 pauseAfterSeek = true;
26431 }
26432
26433 if (retryCount === void 0) {
26434 retryCount = 2;
26435 }
26436
26437 return seekToProgramTime({
26438 programTime: programTime,
26439 playlist: this.masterPlaylistController_.media(),
26440 retryCount: retryCount,
26441 pauseAfterSeek: pauseAfterSeek,
26442 seekTo: this.options_.seekTo,
26443 tech: this.options_.tech,
26444 callback: callback
26445 });
26446 };
26447
26448 return VhsHandler;
26449}(Component);
26450/**
26451 * The Source Handler object, which informs video.js what additional
26452 * MIME types are supported and sets up playback. It is registered
26453 * automatically to the appropriate tech based on the capabilities of
26454 * the browser it is running in. It is not necessary to use or modify
26455 * this object in normal usage.
26456 */
26457
26458
26459var VhsSourceHandler = {
26460 name: 'videojs-http-streaming',
26461 VERSION: version$4,
26462 canHandleSource: function canHandleSource(srcObj, options) {
26463 if (options === void 0) {
26464 options = {};
26465 }
26466
26467 var localOptions = videojs.mergeOptions(videojs.options, options);
26468 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
26469 },
26470 handleSource: function handleSource(source, tech, options) {
26471 if (options === void 0) {
26472 options = {};
26473 }
26474
26475 var localOptions = videojs.mergeOptions(videojs.options, options);
26476 tech.vhs = new VhsHandler(source, tech, localOptions);
26477
26478 if (!videojs.hasOwnProperty('hls')) {
26479 Object.defineProperty(tech, 'hls', {
26480 get: function get() {
26481 videojs.log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
26482 return tech.vhs;
26483 },
26484 configurable: true
26485 });
26486 }
26487
26488 tech.vhs.xhr = xhrFactory();
26489 tech.vhs.src(source.src, source.type);
26490 return tech.vhs;
26491 },
26492 canPlayType: function canPlayType(type, options) {
26493 var simpleType = simpleTypeFromSourceType(type);
26494
26495 if (!simpleType) {
26496 return '';
26497 }
26498
26499 var overrideNative = VhsSourceHandler.getOverrideNative(options);
26500 var supportsTypeNatively = Vhs.supportsTypeNatively(simpleType);
26501 var canUseMsePlayback = !supportsTypeNatively || overrideNative;
26502 return canUseMsePlayback ? 'maybe' : '';
26503 },
26504 getOverrideNative: function getOverrideNative(options) {
26505 if (options === void 0) {
26506 options = {};
26507 }
26508
26509 var _options = options,
26510 _options$vhs = _options.vhs,
26511 vhs = _options$vhs === void 0 ? {} : _options$vhs,
26512 _options$hls = _options.hls,
26513 hls = _options$hls === void 0 ? {} : _options$hls;
26514 var defaultOverrideNative = !(videojs.browser.IS_ANY_SAFARI || videojs.browser.IS_IOS);
26515 var _vhs$overrideNative = vhs.overrideNative,
26516 overrideNative = _vhs$overrideNative === void 0 ? defaultOverrideNative : _vhs$overrideNative;
26517 var _hls$overrideNative = hls.overrideNative,
26518 legacyOverrideNative = _hls$overrideNative === void 0 ? false : _hls$overrideNative;
26519 return legacyOverrideNative || overrideNative;
26520 }
26521};
26522/**
26523 * Check to see if the native MediaSource object exists and supports
26524 * an MP4 container with both H.264 video and AAC-LC audio.
26525 *
26526 * @return {boolean} if native media sources are supported
26527 */
26528
26529var supportsNativeMediaSources = function supportsNativeMediaSources() {
26530 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
26531}; // register source handlers with the appropriate techs
26532
26533
26534if (supportsNativeMediaSources()) {
26535 videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
26536}
26537
26538videojs.VhsHandler = VhsHandler;
26539Object.defineProperty(videojs, 'HlsHandler', {
26540 get: function get() {
26541 videojs.log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
26542 return VhsHandler;
26543 },
26544 configurable: true
26545});
26546videojs.VhsSourceHandler = VhsSourceHandler;
26547Object.defineProperty(videojs, 'HlsSourceHandler', {
26548 get: function get() {
26549 videojs.log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
26550 return VhsSourceHandler;
26551 },
26552 configurable: true
26553});
26554videojs.Vhs = Vhs;
26555Object.defineProperty(videojs, 'Hls', {
26556 get: function get() {
26557 videojs.log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
26558 return Vhs;
26559 },
26560 configurable: true
26561});
26562
26563if (!videojs.use) {
26564 videojs.registerComponent('Hls', Vhs);
26565 videojs.registerComponent('Vhs', Vhs);
26566}
26567
26568videojs.options.vhs = videojs.options.vhs || {};
26569videojs.options.hls = videojs.options.hls || {};
26570
26571if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
26572 var registerPlugin = videojs.registerPlugin || videojs.plugin;
26573 registerPlugin('reloadSourceOnError', reloadSourceOnError);
26574}
26575
26576export { LOCAL_STORAGE_KEY, Vhs, VhsHandler, VhsSourceHandler, emeKeySystems, expandDataUri, getAllPsshKeySystemsOptions, setupEmeOptions, waitForKeySessionCreation };