UNPKG

888 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.14.2 @license Apache-2.0 */
2import _assertThisInitialized from '@babel/runtime/helpers/assertThisInitialized';
3import _inheritsLoose from '@babel/runtime/helpers/inheritsLoose';
4import document from 'global/document';
5import window$1 from 'global/window';
6import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
7import videojs from 'video.js';
8import _extends from '@babel/runtime/helpers/extends';
9import { Parser } from 'm3u8-parser';
10import { isAudioCodec, translateLegacyCodec, codecsFromDefault, parseCodecs, getMimeForCodec, DEFAULT_VIDEO_CODEC, DEFAULT_AUDIO_CODEC, browserSupportsCodec, muxerSupportsCodec } from '@videojs/vhs-utils/es/codecs.js';
11import { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
12export { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
13import { isArrayBufferView, concatTypedArrays, stringToBytes, toUint8 } from '@videojs/vhs-utils/es/byte-helpers';
14import { generateSidxKey, parseUTCTiming, parse, addSidxSegmentsToPlaylist } from 'mpd-parser';
15import parseSidx from 'mux.js/lib/tools/parse-sidx';
16import { getId3Offset } from '@videojs/vhs-utils/es/id3-helpers';
17import { detectContainerForBytes, isLikelyFmp4MediaSegment } from '@videojs/vhs-utils/es/containers';
18import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
19
20/**
21 * @file resolve-url.js - Handling how URLs are resolved and manipulated
22 */
23var resolveUrl = _resolveUrl;
24/**
25 * Checks whether xhr request was redirected and returns correct url depending
26 * on `handleManifestRedirects` option
27 *
28 * @api private
29 *
30 * @param {string} url - an url being requested
31 * @param {XMLHttpRequest} req - xhr request result
32 *
33 * @return {string}
34 */
35
36var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
37 // To understand how the responseURL below is set and generated:
38 // - https://fetch.spec.whatwg.org/#concept-response-url
39 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
40 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
41 return req.responseURL;
42 }
43
44 return url;
45};
46
47var logger = function logger(source) {
48 if (videojs.log.debug) {
49 return videojs.log.debug.bind(videojs, 'VHS:', source + " >");
50 }
51
52 return function () {};
53};
54
55/**
56 * ranges
57 *
58 * Utilities for working with TimeRanges.
59 *
60 */
61
62var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
63// can be misleading because of precision differences or when the current media has poorly
64// aligned audio and video, which can cause values to be slightly off from what you would
65// expect. This value is what we consider to be safe to use in such comparisons to account
66// for these scenarios.
67
68var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
69
70var filterRanges = function filterRanges(timeRanges, predicate) {
71 var results = [];
72 var i;
73
74 if (timeRanges && timeRanges.length) {
75 // Search for ranges that match the predicate
76 for (i = 0; i < timeRanges.length; i++) {
77 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
78 results.push([timeRanges.start(i), timeRanges.end(i)]);
79 }
80 }
81 }
82
83 return videojs.createTimeRanges(results);
84};
85/**
86 * Attempts to find the buffered TimeRange that contains the specified
87 * time.
88 *
89 * @param {TimeRanges} buffered - the TimeRanges object to query
90 * @param {number} time - the time to filter on.
91 * @return {TimeRanges} a new TimeRanges object
92 */
93
94
95var findRange = function findRange(buffered, time) {
96 return filterRanges(buffered, function (start, end) {
97 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
98 });
99};
100/**
101 * Returns the TimeRanges that begin later than the specified time.
102 *
103 * @param {TimeRanges} timeRanges - the TimeRanges object to query
104 * @param {number} time - the time to filter on.
105 * @return {TimeRanges} a new TimeRanges object.
106 */
107
108var findNextRange = function findNextRange(timeRanges, time) {
109 return filterRanges(timeRanges, function (start) {
110 return start - TIME_FUDGE_FACTOR >= time;
111 });
112};
113/**
114 * Returns gaps within a list of TimeRanges
115 *
116 * @param {TimeRanges} buffered - the TimeRanges object
117 * @return {TimeRanges} a TimeRanges object of gaps
118 */
119
120var findGaps = function findGaps(buffered) {
121 if (buffered.length < 2) {
122 return videojs.createTimeRanges();
123 }
124
125 var ranges = [];
126
127 for (var i = 1; i < buffered.length; i++) {
128 var start = buffered.end(i - 1);
129 var end = buffered.start(i);
130 ranges.push([start, end]);
131 }
132
133 return videojs.createTimeRanges(ranges);
134};
135/**
136 * Calculate the intersection of two TimeRanges
137 *
138 * @param {TimeRanges} bufferA
139 * @param {TimeRanges} bufferB
140 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
141 */
142
143var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
144 var start = null;
145 var end = null;
146 var arity = 0;
147 var extents = [];
148 var ranges = [];
149
150 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
151 return videojs.createTimeRange();
152 } // Handle the case where we have both buffers and create an
153 // intersection of the two
154
155
156 var count = bufferA.length; // A) Gather up all start and end times
157
158 while (count--) {
159 extents.push({
160 time: bufferA.start(count),
161 type: 'start'
162 });
163 extents.push({
164 time: bufferA.end(count),
165 type: 'end'
166 });
167 }
168
169 count = bufferB.length;
170
171 while (count--) {
172 extents.push({
173 time: bufferB.start(count),
174 type: 'start'
175 });
176 extents.push({
177 time: bufferB.end(count),
178 type: 'end'
179 });
180 } // B) Sort them by time
181
182
183 extents.sort(function (a, b) {
184 return a.time - b.time;
185 }); // C) Go along one by one incrementing arity for start and decrementing
186 // arity for ends
187
188 for (count = 0; count < extents.length; count++) {
189 if (extents[count].type === 'start') {
190 arity++; // D) If arity is ever incremented to 2 we are entering an
191 // overlapping range
192
193 if (arity === 2) {
194 start = extents[count].time;
195 }
196 } else if (extents[count].type === 'end') {
197 arity--; // E) If arity is ever decremented to 1 we leaving an
198 // overlapping range
199
200 if (arity === 1) {
201 end = extents[count].time;
202 }
203 } // F) Record overlapping ranges
204
205
206 if (start !== null && end !== null) {
207 ranges.push([start, end]);
208 start = null;
209 end = null;
210 }
211 }
212
213 return videojs.createTimeRanges(ranges);
214};
215/**
216 * Gets a human readable string for a TimeRange
217 *
218 * @param {TimeRange} range
219 * @return {string} a human readable string
220 */
221
222var printableRange = function printableRange(range) {
223 var strArr = [];
224
225 if (!range || !range.length) {
226 return '';
227 }
228
229 for (var i = 0; i < range.length; i++) {
230 strArr.push(range.start(i) + ' => ' + range.end(i));
231 }
232
233 return strArr.join(', ');
234};
235/**
236 * Calculates the amount of time left in seconds until the player hits the end of the
237 * buffer and causes a rebuffer
238 *
239 * @param {TimeRange} buffered
240 * The state of the buffer
241 * @param {Numnber} currentTime
242 * The current time of the player
243 * @param {number} playbackRate
244 * The current playback rate of the player. Defaults to 1.
245 * @return {number}
246 * Time until the player has to start rebuffering in seconds.
247 * @function timeUntilRebuffer
248 */
249
250var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
251 if (playbackRate === void 0) {
252 playbackRate = 1;
253 }
254
255 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
256 return (bufferedEnd - currentTime) / playbackRate;
257};
258/**
259 * Converts a TimeRanges object into an array representation
260 *
261 * @param {TimeRanges} timeRanges
262 * @return {Array}
263 */
264
265var timeRangesToArray = function timeRangesToArray(timeRanges) {
266 var timeRangesList = [];
267
268 for (var i = 0; i < timeRanges.length; i++) {
269 timeRangesList.push({
270 start: timeRanges.start(i),
271 end: timeRanges.end(i)
272 });
273 }
274
275 return timeRangesList;
276};
277/**
278 * Determines if two time range objects are different.
279 *
280 * @param {TimeRange} a
281 * the first time range object to check
282 *
283 * @param {TimeRange} b
284 * the second time range object to check
285 *
286 * @return {Boolean}
287 * Whether the time range objects differ
288 */
289
290var isRangeDifferent = function isRangeDifferent(a, b) {
291 // same object
292 if (a === b) {
293 return false;
294 } // one or the other is undefined
295
296
297 if (!a && b || !b && a) {
298 return true;
299 } // length is different
300
301
302 if (a.length !== b.length) {
303 return true;
304 } // see if any start/end pair is different
305
306
307 for (var i = 0; i < a.length; i++) {
308 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
309 return true;
310 }
311 } // if the length and every pair is the same
312 // this is the same time range
313
314
315 return false;
316};
317var lastBufferedEnd = function lastBufferedEnd(a) {
318 if (!a || !a.length || !a.end) {
319 return;
320 }
321
322 return a.end(a.length - 1);
323};
324/**
325 * A utility function to add up the amount of time in a timeRange
326 * after a specified startTime.
327 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
328 * would return 40 as there are 40s seconds after 0 in the timeRange
329 *
330 * @param {TimeRange} range
331 * The range to check against
332 * @param {number} startTime
333 * The time in the time range that you should start counting from
334 *
335 * @return {number}
336 * The number of seconds in the buffer passed the specified time.
337 */
338
339var timeAheadOf = function timeAheadOf(range, startTime) {
340 var time = 0;
341
342 if (!range || !range.length) {
343 return time;
344 }
345
346 for (var i = 0; i < range.length; i++) {
347 var start = range.start(i);
348 var end = range.end(i); // startTime is after this range entirely
349
350 if (startTime > end) {
351 continue;
352 } // startTime is within this range
353
354
355 if (startTime > start && startTime <= end) {
356 time += end - startTime;
357 continue;
358 } // startTime is before this range.
359
360
361 time += end - start;
362 }
363
364 return time;
365};
366
367/**
368 * @file playlist.js
369 *
370 * Playlist related utilities.
371 */
372var createTimeRange = videojs.createTimeRange;
373/**
374 * Get the duration of a segment, with special cases for
375 * llhls segments that do not have a duration yet.
376 *
377 * @param {Object} playlist
378 * the playlist that the segment belongs to.
379 * @param {Object} segment
380 * the segment to get a duration for.
381 *
382 * @return {number}
383 * the segment duration
384 */
385
386var segmentDurationWithParts = function segmentDurationWithParts(playlist, segment) {
387 // if this isn't a preload segment
388 // then we will have a segment duration that is accurate.
389 if (!segment.preload) {
390 return segment.duration;
391 } // otherwise we have to add up parts and preload hints
392 // to get an up to date duration.
393
394
395 var result = 0;
396 (segment.parts || []).forEach(function (p) {
397 result += p.duration;
398 }); // for preload hints we have to use partTargetDuration
399 // as they won't even have a duration yet.
400
401 (segment.preloadHints || []).forEach(function (p) {
402 if (p.type === 'PART') {
403 result += playlist.partTargetDuration;
404 }
405 });
406 return result;
407};
408/**
409 * A function to get a combined list of parts and segments with durations
410 * and indexes.
411 *
412 * @param {Playlist} playlist the playlist to get the list for.
413 *
414 * @return {Array} The part/segment list.
415 */
416
417var getPartsAndSegments = function getPartsAndSegments(playlist) {
418 return (playlist.segments || []).reduce(function (acc, segment, si) {
419 if (segment.parts) {
420 segment.parts.forEach(function (part, pi) {
421 acc.push({
422 duration: part.duration,
423 segmentIndex: si,
424 partIndex: pi,
425 part: part,
426 segment: segment
427 });
428 });
429 } else {
430 acc.push({
431 duration: segment.duration,
432 segmentIndex: si,
433 partIndex: null,
434 segment: segment,
435 part: null
436 });
437 }
438
439 return acc;
440 }, []);
441};
442var getLastParts = function getLastParts(media) {
443 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
444 return lastSegment && lastSegment.parts || [];
445};
446var getKnownPartCount = function getKnownPartCount(_ref) {
447 var preloadSegment = _ref.preloadSegment;
448
449 if (!preloadSegment) {
450 return;
451 }
452
453 var parts = preloadSegment.parts,
454 preloadHints = preloadSegment.preloadHints;
455 var partCount = (preloadHints || []).reduce(function (count, hint) {
456 return count + (hint.type === 'PART' ? 1 : 0);
457 }, 0);
458 partCount += parts && parts.length ? parts.length : 0;
459 return partCount;
460};
461/**
462 * Get the number of seconds to delay from the end of a
463 * live playlist.
464 *
465 * @param {Playlist} master the master playlist
466 * @param {Playlist} media the media playlist
467 * @return {number} the hold back in seconds.
468 */
469
470var liveEdgeDelay = function liveEdgeDelay(master, media) {
471 if (media.endList) {
472 return 0;
473 } // dash suggestedPresentationDelay trumps everything
474
475
476 if (master && master.suggestedPresentationDelay) {
477 return master.suggestedPresentationDelay;
478 }
479
480 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
481
482 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
483 return media.serverControl.partHoldBack;
484 } else if (hasParts && media.partTargetDuration) {
485 return media.partTargetDuration * 3; // finally look for full segment delays
486 } else if (media.serverControl && media.serverControl.holdBack) {
487 return media.serverControl.holdBack;
488 } else if (media.targetDuration) {
489 return media.targetDuration * 3;
490 }
491
492 return 0;
493};
494/**
495 * walk backward until we find a duration we can use
496 * or return a failure
497 *
498 * @param {Playlist} playlist the playlist to walk through
499 * @param {Number} endSequence the mediaSequence to stop walking on
500 */
501
502var backwardDuration = function backwardDuration(playlist, endSequence) {
503 var result = 0;
504 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
505 // the interval, use it
506
507 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
508 // information that is earlier than endSequence
509
510 if (segment) {
511 if (typeof segment.start !== 'undefined') {
512 return {
513 result: segment.start,
514 precise: true
515 };
516 }
517
518 if (typeof segment.end !== 'undefined') {
519 return {
520 result: segment.end - segment.duration,
521 precise: true
522 };
523 }
524 }
525
526 while (i--) {
527 segment = playlist.segments[i];
528
529 if (typeof segment.end !== 'undefined') {
530 return {
531 result: result + segment.end,
532 precise: true
533 };
534 }
535
536 result += segmentDurationWithParts(playlist, segment);
537
538 if (typeof segment.start !== 'undefined') {
539 return {
540 result: result + segment.start,
541 precise: true
542 };
543 }
544 }
545
546 return {
547 result: result,
548 precise: false
549 };
550};
551/**
552 * walk forward until we find a duration we can use
553 * or return a failure
554 *
555 * @param {Playlist} playlist the playlist to walk through
556 * @param {number} endSequence the mediaSequence to stop walking on
557 */
558
559
560var forwardDuration = function forwardDuration(playlist, endSequence) {
561 var result = 0;
562 var segment;
563 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
564 // information
565
566 for (; i < playlist.segments.length; i++) {
567 segment = playlist.segments[i];
568
569 if (typeof segment.start !== 'undefined') {
570 return {
571 result: segment.start - result,
572 precise: true
573 };
574 }
575
576 result += segmentDurationWithParts(playlist, segment);
577
578 if (typeof segment.end !== 'undefined') {
579 return {
580 result: segment.end - result,
581 precise: true
582 };
583 }
584 } // indicate we didn't find a useful duration estimate
585
586
587 return {
588 result: -1,
589 precise: false
590 };
591};
592/**
593 * Calculate the media duration from the segments associated with a
594 * playlist. The duration of a subinterval of the available segments
595 * may be calculated by specifying an end index.
596 *
597 * @param {Object} playlist a media playlist object
598 * @param {number=} endSequence an exclusive upper boundary
599 * for the playlist. Defaults to playlist length.
600 * @param {number} expired the amount of time that has dropped
601 * off the front of the playlist in a live scenario
602 * @return {number} the duration between the first available segment
603 * and end index.
604 */
605
606
607var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
608 if (typeof endSequence === 'undefined') {
609 endSequence = playlist.mediaSequence + playlist.segments.length;
610 }
611
612 if (endSequence < playlist.mediaSequence) {
613 return 0;
614 } // do a backward walk to estimate the duration
615
616
617 var backward = backwardDuration(playlist, endSequence);
618
619 if (backward.precise) {
620 // if we were able to base our duration estimate on timing
621 // information provided directly from the Media Source, return
622 // it
623 return backward.result;
624 } // walk forward to see if a precise duration estimate can be made
625 // that way
626
627
628 var forward = forwardDuration(playlist, endSequence);
629
630 if (forward.precise) {
631 // we found a segment that has been buffered and so it's
632 // position is known precisely
633 return forward.result;
634 } // return the less-precise, playlist-based duration estimate
635
636
637 return backward.result + expired;
638};
639/**
640 * Calculates the duration of a playlist. If a start and end index
641 * are specified, the duration will be for the subset of the media
642 * timeline between those two indices. The total duration for live
643 * playlists is always Infinity.
644 *
645 * @param {Object} playlist a media playlist object
646 * @param {number=} endSequence an exclusive upper
647 * boundary for the playlist. Defaults to the playlist media
648 * sequence number plus its length.
649 * @param {number=} expired the amount of time that has
650 * dropped off the front of the playlist in a live scenario
651 * @return {number} the duration between the start index and end
652 * index.
653 */
654
655
656var duration = function duration(playlist, endSequence, expired) {
657 if (!playlist) {
658 return 0;
659 }
660
661 if (typeof expired !== 'number') {
662 expired = 0;
663 } // if a slice of the total duration is not requested, use
664 // playlist-level duration indicators when they're present
665
666
667 if (typeof endSequence === 'undefined') {
668 // if present, use the duration specified in the playlist
669 if (playlist.totalDuration) {
670 return playlist.totalDuration;
671 } // duration should be Infinity for live playlists
672
673
674 if (!playlist.endList) {
675 return window$1.Infinity;
676 }
677 } // calculate the total duration based on the segment durations
678
679
680 return intervalDuration(playlist, endSequence, expired);
681};
682/**
683 * Calculate the time between two indexes in the current playlist
684 * neight the start- nor the end-index need to be within the current
685 * playlist in which case, the targetDuration of the playlist is used
686 * to approximate the durations of the segments
687 *
688 * @param {Array} options.durationList list to iterate over for durations.
689 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
690 * @param {number} options.startIndex partsAndSegments index to start
691 * @param {number} options.endIndex partsAndSegments index to end.
692 * @return {number} the number of seconds between startIndex and endIndex
693 */
694
695var sumDurations = function sumDurations(_ref2) {
696 var defaultDuration = _ref2.defaultDuration,
697 durationList = _ref2.durationList,
698 startIndex = _ref2.startIndex,
699 endIndex = _ref2.endIndex;
700 var durations = 0;
701
702 if (startIndex > endIndex) {
703 var _ref3 = [endIndex, startIndex];
704 startIndex = _ref3[0];
705 endIndex = _ref3[1];
706 }
707
708 if (startIndex < 0) {
709 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
710 durations += defaultDuration;
711 }
712
713 startIndex = 0;
714 }
715
716 for (var _i = startIndex; _i < endIndex; _i++) {
717 durations += durationList[_i].duration;
718 }
719
720 return durations;
721};
722/**
723 * Calculates the playlist end time
724 *
725 * @param {Object} playlist a media playlist object
726 * @param {number=} expired the amount of time that has
727 * dropped off the front of the playlist in a live scenario
728 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
729 * playlist end calculation should consider the safe live end
730 * (truncate the playlist end by three segments). This is normally
731 * used for calculating the end of the playlist's seekable range.
732 * This takes into account the value of liveEdgePadding.
733 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
734 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
735 * If this is provided, it is used in the safe live end calculation.
736 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
737 * Corresponds to suggestedPresentationDelay in DASH manifests.
738 * @return {number} the end time of playlist
739 * @function playlistEnd
740 */
741
742var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
743 if (!playlist || !playlist.segments) {
744 return null;
745 }
746
747 if (playlist.endList) {
748 return duration(playlist);
749 }
750
751 if (expired === null) {
752 return null;
753 }
754
755 expired = expired || 0;
756 var lastSegmentEndTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
757
758 if (useSafeLiveEnd) {
759 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
760 lastSegmentEndTime -= liveEdgePadding;
761 } // don't return a time less than zero
762
763
764 return Math.max(0, lastSegmentEndTime);
765};
766/**
767 * Calculates the interval of time that is currently seekable in a
768 * playlist. The returned time ranges are relative to the earliest
769 * moment in the specified playlist that is still available. A full
770 * seekable implementation for live streams would need to offset
771 * these values by the duration of content that has expired from the
772 * stream.
773 *
774 * @param {Object} playlist a media playlist object
775 * dropped off the front of the playlist in a live scenario
776 * @param {number=} expired the amount of time that has
777 * dropped off the front of the playlist in a live scenario
778 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
779 * Corresponds to suggestedPresentationDelay in DASH manifests.
780 * @return {TimeRanges} the periods of time that are valid targets
781 * for seeking
782 */
783
784var seekable = function seekable(playlist, expired, liveEdgePadding) {
785 var useSafeLiveEnd = true;
786 var seekableStart = expired || 0;
787 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
788
789 if (seekableEnd === null) {
790 return createTimeRange();
791 }
792
793 return createTimeRange(seekableStart, seekableEnd);
794};
795/**
796 * Determine the index and estimated starting time of the segment that
797 * contains a specified playback position in a media playlist.
798 *
799 * @param {Object} options.playlist the media playlist to query
800 * @param {number} options.currentTime The number of seconds since the earliest
801 * possible position to determine the containing segment for
802 * @param {number} options.startTime the time when the segment/part starts
803 * @param {number} options.startingSegmentIndex the segment index to start looking at.
804 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
805 *
806 * @return {Object} an object with partIndex, segmentIndex, and startTime.
807 */
808
809var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
810 var playlist = _ref4.playlist,
811 currentTime = _ref4.currentTime,
812 startingSegmentIndex = _ref4.startingSegmentIndex,
813 startingPartIndex = _ref4.startingPartIndex,
814 startTime = _ref4.startTime,
815 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
816 var time = currentTime - startTime;
817 var partsAndSegments = getPartsAndSegments(playlist);
818 var startIndex = 0;
819
820 for (var i = 0; i < partsAndSegments.length; i++) {
821 var partAndSegment = partsAndSegments[i];
822
823 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
824 continue;
825 } // skip this if part index does not match.
826
827
828 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
829 continue;
830 }
831
832 startIndex = i;
833 break;
834 }
835
836 if (time < 0) {
837 // Walk backward from startIndex in the playlist, adding durations
838 // until we find a segment that contains `time` and return it
839 if (startIndex > 0) {
840 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
841 var _partAndSegment = partsAndSegments[_i2];
842 time += _partAndSegment.duration;
843
844 if (experimentalExactManifestTimings) {
845 if (time < 0) {
846 continue;
847 }
848 } else if (time + TIME_FUDGE_FACTOR <= 0) {
849 continue;
850 }
851
852 return {
853 partIndex: _partAndSegment.partIndex,
854 segmentIndex: _partAndSegment.segmentIndex,
855 startTime: startTime - sumDurations({
856 defaultDuration: playlist.targetDuration,
857 durationList: partsAndSegments,
858 startIndex: startIndex,
859 endIndex: _i2
860 })
861 };
862 }
863 } // We were unable to find a good segment within the playlist
864 // so select the first segment
865
866
867 return {
868 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
869 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
870 startTime: currentTime
871 };
872 } // When startIndex is negative, we first walk forward to first segment
873 // adding target durations. If we "run out of time" before getting to
874 // the first segment, return the first segment
875
876
877 if (startIndex < 0) {
878 for (var _i3 = startIndex; _i3 < 0; _i3++) {
879 time -= playlist.targetDuration;
880
881 if (time < 0) {
882 return {
883 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
884 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
885 startTime: currentTime
886 };
887 }
888 }
889
890 startIndex = 0;
891 } // Walk forward from startIndex in the playlist, subtracting durations
892 // until we find a segment that contains `time` and return it
893
894
895 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
896 var _partAndSegment2 = partsAndSegments[_i4];
897 time -= _partAndSegment2.duration;
898
899 if (experimentalExactManifestTimings) {
900 if (time > 0) {
901 continue;
902 }
903 } else if (time - TIME_FUDGE_FACTOR >= 0) {
904 continue;
905 }
906
907 return {
908 partIndex: _partAndSegment2.partIndex,
909 segmentIndex: _partAndSegment2.segmentIndex,
910 startTime: startTime + sumDurations({
911 defaultDuration: playlist.targetDuration,
912 durationList: partsAndSegments,
913 startIndex: startIndex,
914 endIndex: _i4
915 })
916 };
917 } // We are out of possible candidates so load the last one...
918
919
920 return {
921 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
922 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
923 startTime: currentTime
924 };
925};
926/**
927 * Check whether the playlist is blacklisted or not.
928 *
929 * @param {Object} playlist the media playlist object
930 * @return {boolean} whether the playlist is blacklisted or not
931 * @function isBlacklisted
932 */
933
934var isBlacklisted = function isBlacklisted(playlist) {
935 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
936};
937/**
938 * Check whether the playlist is compatible with current playback configuration or has
939 * been blacklisted permanently for being incompatible.
940 *
941 * @param {Object} playlist the media playlist object
942 * @return {boolean} whether the playlist is incompatible or not
943 * @function isIncompatible
944 */
945
946var isIncompatible = function isIncompatible(playlist) {
947 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
948};
949/**
950 * Check whether the playlist is enabled or not.
951 *
952 * @param {Object} playlist the media playlist object
953 * @return {boolean} whether the playlist is enabled or not
954 * @function isEnabled
955 */
956
957var isEnabled = function isEnabled(playlist) {
958 var blacklisted = isBlacklisted(playlist);
959 return !playlist.disabled && !blacklisted;
960};
961/**
962 * Check whether the playlist has been manually disabled through the representations api.
963 *
964 * @param {Object} playlist the media playlist object
965 * @return {boolean} whether the playlist is disabled manually or not
966 * @function isDisabled
967 */
968
969var isDisabled = function isDisabled(playlist) {
970 return playlist.disabled;
971};
972/**
973 * Returns whether the current playlist is an AES encrypted HLS stream
974 *
975 * @return {boolean} true if it's an AES encrypted HLS stream
976 */
977
978var isAes = function isAes(media) {
979 for (var i = 0; i < media.segments.length; i++) {
980 if (media.segments[i].key) {
981 return true;
982 }
983 }
984
985 return false;
986};
987/**
988 * Checks if the playlist has a value for the specified attribute
989 *
990 * @param {string} attr
991 * Attribute to check for
992 * @param {Object} playlist
993 * The media playlist object
994 * @return {boolean}
995 * Whether the playlist contains a value for the attribute or not
996 * @function hasAttribute
997 */
998
999var hasAttribute = function hasAttribute(attr, playlist) {
1000 return playlist.attributes && playlist.attributes[attr];
1001};
1002/**
1003 * Estimates the time required to complete a segment download from the specified playlist
1004 *
1005 * @param {number} segmentDuration
1006 * Duration of requested segment
1007 * @param {number} bandwidth
1008 * Current measured bandwidth of the player
1009 * @param {Object} playlist
1010 * The media playlist object
1011 * @param {number=} bytesReceived
1012 * Number of bytes already received for the request. Defaults to 0
1013 * @return {number|NaN}
1014 * The estimated time to request the segment. NaN if bandwidth information for
1015 * the given playlist is unavailable
1016 * @function estimateSegmentRequestTime
1017 */
1018
1019var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
1020 if (bytesReceived === void 0) {
1021 bytesReceived = 0;
1022 }
1023
1024 if (!hasAttribute('BANDWIDTH', playlist)) {
1025 return NaN;
1026 }
1027
1028 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1029 return (size - bytesReceived * 8) / bandwidth;
1030};
1031/*
1032 * Returns whether the current playlist is the lowest rendition
1033 *
1034 * @return {Boolean} true if on lowest rendition
1035 */
1036
1037var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1038 if (master.playlists.length === 1) {
1039 return true;
1040 }
1041
1042 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1043 return master.playlists.filter(function (playlist) {
1044 if (!isEnabled(playlist)) {
1045 return false;
1046 }
1047
1048 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1049 }).length === 0;
1050};
1051var playlistMatch = function playlistMatch(a, b) {
1052 // both playlits are null
1053 // or only one playlist is non-null
1054 // no match
1055 if (!a && !b || !a && b || a && !b) {
1056 return false;
1057 } // playlist objects are the same, match
1058
1059
1060 if (a === b) {
1061 return true;
1062 } // first try to use id as it should be the most
1063 // accurate
1064
1065
1066 if (a.id && b.id && a.id === b.id) {
1067 return true;
1068 } // next try to use reslovedUri as it should be the
1069 // second most accurate.
1070
1071
1072 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
1073 return true;
1074 } // finally try to use uri as it should be accurate
1075 // but might miss a few cases for relative uris
1076
1077
1078 if (a.uri && b.uri && a.uri === b.uri) {
1079 return true;
1080 }
1081
1082 return false;
1083};
1084
1085var someAudioVariant = function someAudioVariant(master, callback) {
1086 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
1087 var found = false;
1088
1089 for (var groupName in AUDIO) {
1090 for (var label in AUDIO[groupName]) {
1091 found = callback(AUDIO[groupName][label]);
1092
1093 if (found) {
1094 break;
1095 }
1096 }
1097
1098 if (found) {
1099 break;
1100 }
1101 }
1102
1103 return !!found;
1104};
1105
1106var isAudioOnly = function isAudioOnly(master) {
1107 // we are audio only if we have no main playlists but do
1108 // have media group playlists.
1109 if (!master || !master.playlists || !master.playlists.length) {
1110 // without audio variants or playlists this
1111 // is not an audio only master.
1112 var found = someAudioVariant(master, function (variant) {
1113 return variant.playlists && variant.playlists.length || variant.uri;
1114 });
1115 return found;
1116 } // if every playlist has only an audio codec it is audio only
1117
1118
1119 var _loop = function _loop(i) {
1120 var playlist = master.playlists[i];
1121 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
1122
1123 if (CODECS && CODECS.split(',').every(function (c) {
1124 return isAudioCodec(c);
1125 })) {
1126 return "continue";
1127 } // playlist is in an audio group it is audio only
1128
1129
1130 var found = someAudioVariant(master, function (variant) {
1131 return playlistMatch(playlist, variant);
1132 });
1133
1134 if (found) {
1135 return "continue";
1136 } // if we make it here this playlist isn't audio and we
1137 // are not audio only
1138
1139
1140 return {
1141 v: false
1142 };
1143 };
1144
1145 for (var i = 0; i < master.playlists.length; i++) {
1146 var _ret = _loop(i);
1147
1148 if (_ret === "continue") continue;
1149 if (typeof _ret === "object") return _ret.v;
1150 } // if we make it past every playlist without returning, then
1151 // this is an audio only playlist.
1152
1153
1154 return true;
1155}; // exports
1156
1157var Playlist = {
1158 liveEdgeDelay: liveEdgeDelay,
1159 duration: duration,
1160 seekable: seekable,
1161 getMediaInfoForTime: getMediaInfoForTime,
1162 isEnabled: isEnabled,
1163 isDisabled: isDisabled,
1164 isBlacklisted: isBlacklisted,
1165 isIncompatible: isIncompatible,
1166 playlistEnd: playlistEnd,
1167 isAes: isAes,
1168 hasAttribute: hasAttribute,
1169 estimateSegmentRequestTime: estimateSegmentRequestTime,
1170 isLowestEnabledRendition: isLowestEnabledRendition,
1171 isAudioOnly: isAudioOnly,
1172 playlistMatch: playlistMatch,
1173 segmentDurationWithParts: segmentDurationWithParts
1174};
1175
1176var log = videojs.log;
1177var createPlaylistID = function createPlaylistID(index, uri) {
1178 return index + "-" + uri;
1179};
1180/**
1181 * Parses a given m3u8 playlist
1182 *
1183 * @param {Function} [onwarn]
1184 * a function to call when the parser triggers a warning event.
1185 * @param {Function} [oninfo]
1186 * a function to call when the parser triggers an info event.
1187 * @param {string} manifestString
1188 * The downloaded manifest string
1189 * @param {Object[]} [customTagParsers]
1190 * An array of custom tag parsers for the m3u8-parser instance
1191 * @param {Object[]} [customTagMappers]
1192 * An array of custom tag mappers for the m3u8-parser instance
1193 * @param {boolean} [experimentalLLHLS=false]
1194 * Whether to keep ll-hls features in the manifest after parsing.
1195 * @return {Object}
1196 * The manifest object
1197 */
1198
1199var parseManifest = function parseManifest(_ref) {
1200 var onwarn = _ref.onwarn,
1201 oninfo = _ref.oninfo,
1202 manifestString = _ref.manifestString,
1203 _ref$customTagParsers = _ref.customTagParsers,
1204 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
1205 _ref$customTagMappers = _ref.customTagMappers,
1206 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
1207 experimentalLLHLS = _ref.experimentalLLHLS;
1208 var parser = new Parser();
1209
1210 if (onwarn) {
1211 parser.on('warn', onwarn);
1212 }
1213
1214 if (oninfo) {
1215 parser.on('info', oninfo);
1216 }
1217
1218 customTagParsers.forEach(function (customParser) {
1219 return parser.addParser(customParser);
1220 });
1221 customTagMappers.forEach(function (mapper) {
1222 return parser.addTagMapper(mapper);
1223 });
1224 parser.push(manifestString);
1225 parser.end();
1226 var manifest = parser.manifest; // remove llhls features from the parsed manifest
1227 // if we don't want llhls support.
1228
1229 if (!experimentalLLHLS) {
1230 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
1231 if (manifest.hasOwnProperty(k)) {
1232 delete manifest[k];
1233 }
1234 });
1235
1236 if (manifest.segments) {
1237 manifest.segments.forEach(function (segment) {
1238 ['parts', 'preloadHints'].forEach(function (k) {
1239 if (segment.hasOwnProperty(k)) {
1240 delete segment[k];
1241 }
1242 });
1243 });
1244 }
1245 }
1246
1247 if (!manifest.targetDuration) {
1248 var targetDuration = 10;
1249
1250 if (manifest.segments && manifest.segments.length) {
1251 targetDuration = manifest.segments.reduce(function (acc, s) {
1252 return Math.max(acc, s.duration);
1253 }, 0);
1254 }
1255
1256 if (onwarn) {
1257 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
1258 }
1259
1260 manifest.targetDuration = targetDuration;
1261 }
1262
1263 var parts = getLastParts(manifest);
1264
1265 if (parts.length && !manifest.partTargetDuration) {
1266 var partTargetDuration = parts.reduce(function (acc, p) {
1267 return Math.max(acc, p.duration);
1268 }, 0);
1269
1270 if (onwarn) {
1271 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
1272 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
1273 }
1274
1275 manifest.partTargetDuration = partTargetDuration;
1276 }
1277
1278 return manifest;
1279};
1280/**
1281 * Loops through all supported media groups in master and calls the provided
1282 * callback for each group
1283 *
1284 * @param {Object} master
1285 * The parsed master manifest object
1286 * @param {Function} callback
1287 * Callback to call for each media group
1288 */
1289
1290var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1291 if (!master.mediaGroups) {
1292 return;
1293 }
1294
1295 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1296 if (!master.mediaGroups[mediaType]) {
1297 return;
1298 }
1299
1300 for (var groupKey in master.mediaGroups[mediaType]) {
1301 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1302 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1303 callback(mediaProperties, mediaType, groupKey, labelKey);
1304 }
1305 }
1306 });
1307};
1308/**
1309 * Adds properties and attributes to the playlist to keep consistent functionality for
1310 * playlists throughout VHS.
1311 *
1312 * @param {Object} config
1313 * Arguments object
1314 * @param {Object} config.playlist
1315 * The media playlist
1316 * @param {string} [config.uri]
1317 * The uri to the media playlist (if media playlist is not from within a master
1318 * playlist)
1319 * @param {string} id
1320 * ID to use for the playlist
1321 */
1322
1323var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
1324 var playlist = _ref2.playlist,
1325 uri = _ref2.uri,
1326 id = _ref2.id;
1327 playlist.id = id;
1328 playlist.playlistErrors_ = 0;
1329
1330 if (uri) {
1331 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
1332 // playlists do not contain their own source URI, but one is needed for consistency in
1333 // VHS.
1334 playlist.uri = uri;
1335 } // For HLS master playlists, even though certain attributes MUST be defined, the
1336 // stream may still be played without them.
1337 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
1338 // manifest.
1339 //
1340 // To avoid undefined reference errors through the project, and make the code easier
1341 // to write/read, add an empty attributes object for these cases.
1342
1343
1344 playlist.attributes = playlist.attributes || {};
1345};
1346/**
1347 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
1348 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
1349 * playlist references to the playlists array.
1350 *
1351 * @param {Object} master
1352 * The master playlist
1353 */
1354
1355var setupMediaPlaylists = function setupMediaPlaylists(master) {
1356 var i = master.playlists.length;
1357
1358 while (i--) {
1359 var playlist = master.playlists[i];
1360 setupMediaPlaylist({
1361 playlist: playlist,
1362 id: createPlaylistID(i, playlist.uri)
1363 });
1364 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1365 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
1366
1367 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
1368 // the stream can be played without it. Although an attributes property may have been
1369 // added to the playlist to prevent undefined references, issue a warning to fix the
1370 // manifest.
1371
1372 if (!playlist.attributes.BANDWIDTH) {
1373 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1374 }
1375 }
1376};
1377/**
1378 * Adds resolvedUri properties to each media group.
1379 *
1380 * @param {Object} master
1381 * The master playlist
1382 */
1383
1384var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1385 forEachMediaGroup(master, function (properties) {
1386 if (properties.uri) {
1387 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1388 }
1389 });
1390};
1391/**
1392 * Creates a master playlist wrapper to insert a sole media playlist into.
1393 *
1394 * @param {Object} media
1395 * Media playlist
1396 * @param {string} uri
1397 * The media URI
1398 *
1399 * @return {Object}
1400 * Master playlist
1401 */
1402
1403var masterForMedia = function masterForMedia(media, uri) {
1404 var id = createPlaylistID(0, uri);
1405 var master = {
1406 mediaGroups: {
1407 'AUDIO': {},
1408 'VIDEO': {},
1409 'CLOSED-CAPTIONS': {},
1410 'SUBTITLES': {}
1411 },
1412 uri: window$1.location.href,
1413 resolvedUri: window$1.location.href,
1414 playlists: [{
1415 uri: uri,
1416 id: id,
1417 resolvedUri: uri,
1418 // m3u8-parser does not attach an attributes property to media playlists so make
1419 // sure that the property is attached to avoid undefined reference errors
1420 attributes: {}
1421 }]
1422 }; // set up ID reference
1423
1424 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
1425
1426 master.playlists[uri] = master.playlists[0];
1427 return master;
1428};
1429/**
1430 * Does an in-place update of the master manifest to add updated playlist URI references
1431 * as well as other properties needed by VHS that aren't included by the parser.
1432 *
1433 * @param {Object} master
1434 * Master manifest object
1435 * @param {string} uri
1436 * The source URI
1437 */
1438
1439var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
1440 master.uri = uri;
1441
1442 for (var i = 0; i < master.playlists.length; i++) {
1443 if (!master.playlists[i].uri) {
1444 // Set up phony URIs for the playlists since playlists are referenced by their URIs
1445 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
1446 // TODO: consider adding dummy URIs in mpd-parser
1447 var phonyUri = "placeholder-uri-" + i;
1448 master.playlists[i].uri = phonyUri;
1449 }
1450 }
1451
1452 var audioOnlyMaster = isAudioOnly(master);
1453 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1454 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
1455
1456 if (!properties.playlists || !properties.playlists.length) {
1457 // If the manifest is audio only and this media group does not have a uri, check
1458 // if the media group is located in the main list of playlists. If it is, don't add
1459 // placeholder properties as it shouldn't be considered an alternate audio track.
1460 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
1461 for (var _i = 0; _i < master.playlists.length; _i++) {
1462 var p = master.playlists[_i];
1463
1464 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
1465 return;
1466 }
1467 }
1468 }
1469
1470 properties.playlists = [_extends({}, properties)];
1471 }
1472
1473 properties.playlists.forEach(function (p, i) {
1474 var id = createPlaylistID(i, groupId);
1475
1476 if (p.uri) {
1477 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
1478 } else {
1479 // DEPRECATED, this has been added to prevent a breaking change.
1480 // previously we only ever had a single media group playlist, so
1481 // we mark the first playlist uri without prepending the index as we used to
1482 // ideally we would do all of the playlists the same way.
1483 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
1484 // the placeholder again
1485
1486 p.resolvedUri = p.uri;
1487 }
1488
1489 p.id = p.id || id; // add an empty attributes object, all playlists are
1490 // expected to have this.
1491
1492 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
1493
1494 master.playlists[p.id] = p;
1495 master.playlists[p.uri] = p;
1496 });
1497 });
1498 setupMediaPlaylists(master);
1499 resolveMediaGroupUris(master);
1500};
1501
1502var mergeOptions$2 = videojs.mergeOptions,
1503 EventTarget$1 = videojs.EventTarget;
1504
1505var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
1506 if (media.endList || !media.serverControl) {
1507 return uri;
1508 }
1509
1510 var parameters = {};
1511
1512 if (media.serverControl.canBlockReload) {
1513 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
1514
1515 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
1516 // that we are going to request a part of that preload segment.
1517 // the logic below is used to determine that.
1518
1519 if (preloadSegment) {
1520 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
1521
1522 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
1523 // length of parts, then we know we had part preload hints
1524 // and we need to add the _HLS_part= query
1525
1526 if (nextPart > -1 && nextPart !== parts.length - 1) {
1527 // add existing parts to our preload hints
1528 // eslint-disable-next-line
1529 parameters._HLS_part = nextPart;
1530 } // this if statement makes sure that we request the msn
1531 // of the preload segment if:
1532 // 1. the preload segment had parts (and was not yet a full segment)
1533 // but was added to our segments array
1534 // 2. the preload segment had preload hints for parts that are not in
1535 // the manifest yet.
1536 // in all other cases we want the segment after the preload segment
1537 // which will be given by using media.segments.length because it is 1 based
1538 // rather than 0 based.
1539
1540
1541 if (nextPart > -1 || parts.length) {
1542 nextMSN--;
1543 }
1544 } // add _HLS_msn= in front of any _HLS_part query
1545 // eslint-disable-next-line
1546
1547
1548 parameters._HLS_msn = nextMSN;
1549 }
1550
1551 if (media.serverControl && media.serverControl.canSkipUntil) {
1552 // add _HLS_skip= infront of all other queries.
1553 // eslint-disable-next-line
1554 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
1555 }
1556
1557 if (Object.keys(parameters).length) {
1558 var parsedUri = new window$1.URL(uri);
1559 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
1560 if (!parameters.hasOwnProperty(name)) {
1561 return;
1562 }
1563
1564 parsedUri.searchParams.set(name, parameters[name]);
1565 });
1566 uri = parsedUri.toString();
1567 }
1568
1569 return uri;
1570};
1571/**
1572 * Returns a new segment object with properties and
1573 * the parts array merged.
1574 *
1575 * @param {Object} a the old segment
1576 * @param {Object} b the new segment
1577 *
1578 * @return {Object} the merged segment
1579 */
1580
1581
1582var updateSegment = function updateSegment(a, b) {
1583 if (!a) {
1584 return b;
1585 }
1586
1587 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
1588 // and the new one does not, remove preload hints.
1589
1590 if (a.preloadHints && !b.preloadHints) {
1591 delete result.preloadHints;
1592 } // if only the old segment has parts
1593 // then the parts are no longer valid
1594
1595
1596 if (a.parts && !b.parts) {
1597 delete result.parts; // if both segments have parts
1598 // copy part propeties from the old segment
1599 // to the new one.
1600 } else if (a.parts && b.parts) {
1601 for (var i = 0; i < b.parts.length; i++) {
1602 if (a.parts && a.parts[i]) {
1603 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
1604 }
1605 }
1606 } // set skipped to false for segments that have
1607 // have had information merged from the old segment.
1608
1609
1610 if (!a.skipped && b.skipped) {
1611 result.skipped = false;
1612 } // set preload to false for segments that have
1613 // had information added in the new segment.
1614
1615
1616 if (a.preload && !b.preload) {
1617 result.preload = false;
1618 }
1619
1620 return result;
1621};
1622/**
1623 * Returns a new array of segments that is the result of merging
1624 * properties from an older list of segments onto an updated
1625 * list. No properties on the updated playlist will be ovewritten.
1626 *
1627 * @param {Array} original the outdated list of segments
1628 * @param {Array} update the updated list of segments
1629 * @param {number=} offset the index of the first update
1630 * segment in the original segment list. For non-live playlists,
1631 * this should always be zero and does not need to be
1632 * specified. For live playlists, it should be the difference
1633 * between the media sequence numbers in the original and updated
1634 * playlists.
1635 * @return {Array} a list of merged segment objects
1636 */
1637
1638var updateSegments = function updateSegments(original, update, offset) {
1639 var oldSegments = original.slice();
1640 var newSegments = update.slice();
1641 offset = offset || 0;
1642 var result = [];
1643 var currentMap;
1644
1645 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
1646 var oldSegment = oldSegments[newIndex + offset];
1647 var newSegment = newSegments[newIndex];
1648
1649 if (oldSegment) {
1650 currentMap = oldSegment.map || currentMap;
1651 result.push(updateSegment(oldSegment, newSegment));
1652 } else {
1653 // carry over map to new segment if it is missing
1654 if (currentMap && !newSegment.map) {
1655 newSegment.map = currentMap;
1656 }
1657
1658 result.push(newSegment);
1659 }
1660 }
1661
1662 return result;
1663};
1664var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1665 // preloadSegment will not have a uri at all
1666 // as the segment isn't actually in the manifest yet, only parts
1667 if (!segment.resolvedUri && segment.uri) {
1668 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1669 }
1670
1671 if (segment.key && !segment.key.resolvedUri) {
1672 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1673 }
1674
1675 if (segment.map && !segment.map.resolvedUri) {
1676 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1677 }
1678
1679 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
1680 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
1681 }
1682
1683 if (segment.parts && segment.parts.length) {
1684 segment.parts.forEach(function (p) {
1685 if (p.resolvedUri) {
1686 return;
1687 }
1688
1689 p.resolvedUri = resolveUrl(baseUri, p.uri);
1690 });
1691 }
1692
1693 if (segment.preloadHints && segment.preloadHints.length) {
1694 segment.preloadHints.forEach(function (p) {
1695 if (p.resolvedUri) {
1696 return;
1697 }
1698
1699 p.resolvedUri = resolveUrl(baseUri, p.uri);
1700 });
1701 }
1702};
1703
1704var getAllSegments = function getAllSegments(media) {
1705 var segments = media.segments || [];
1706 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
1707 // a usable segment, only include a preloadSegment that has
1708 // parts.
1709
1710 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
1711 // if preloadHints has a MAP that means that the
1712 // init segment is going to change. We cannot use any of the parts
1713 // from this preload segment.
1714 if (preloadSegment.preloadHints) {
1715 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
1716 if (preloadSegment.preloadHints[i].type === 'MAP') {
1717 return segments;
1718 }
1719 }
1720 } // set the duration for our preload segment to target duration.
1721
1722
1723 preloadSegment.duration = media.targetDuration;
1724 preloadSegment.preload = true;
1725 segments.push(preloadSegment);
1726 }
1727
1728 return segments;
1729}; // consider the playlist unchanged if the playlist object is the same or
1730// the number of segments is equal, the media sequence number is unchanged,
1731// and this playlist hasn't become the end of the playlist
1732
1733
1734var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
1735 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence && a.preloadSegment === b.preloadSegment;
1736};
1737/**
1738 * Returns a new master playlist that is the result of merging an
1739 * updated media playlist into the original version. If the
1740 * updated media playlist does not match any of the playlist
1741 * entries in the original master playlist, null is returned.
1742 *
1743 * @param {Object} master a parsed master M3U8 object
1744 * @param {Object} media a parsed media M3U8 object
1745 * @return {Object} a new object that represents the original
1746 * master playlist with the updated media playlist merged in, or
1747 * null if the merge produced no change.
1748 */
1749
1750var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
1751 if (unchangedCheck === void 0) {
1752 unchangedCheck = isPlaylistUnchanged;
1753 }
1754
1755 var result = mergeOptions$2(master, {});
1756 var oldMedia = result.playlists[newMedia.id];
1757
1758 if (!oldMedia) {
1759 return null;
1760 }
1761
1762 if (unchangedCheck(oldMedia, newMedia)) {
1763 return null;
1764 }
1765
1766 newMedia.segments = getAllSegments(newMedia);
1767 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
1768
1769 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
1770 delete mergedPlaylist.preloadSegment;
1771 } // if the update could overlap existing segment information, merge the two segment lists
1772
1773
1774 if (oldMedia.segments) {
1775 if (newMedia.skip) {
1776 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
1777 // old properties into the new segments
1778
1779 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
1780 newMedia.segments.unshift({
1781 skipped: true
1782 });
1783 }
1784 }
1785
1786 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
1787 } // resolve any segment URIs to prevent us from having to do it later
1788
1789
1790 mergedPlaylist.segments.forEach(function (segment) {
1791 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1792 }); // TODO Right now in the playlists array there are two references to each playlist, one
1793 // that is referenced by index, and one by URI. The index reference may no longer be
1794 // necessary.
1795
1796 for (var _i = 0; _i < result.playlists.length; _i++) {
1797 if (result.playlists[_i].id === newMedia.id) {
1798 result.playlists[_i] = mergedPlaylist;
1799 }
1800 }
1801
1802 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
1803
1804 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
1805
1806 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1807 if (!properties.playlists) {
1808 return;
1809 }
1810
1811 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
1812 if (newMedia.id === properties.playlists[_i2].id) {
1813 properties.playlists[_i2] = mergedPlaylist;
1814 }
1815 }
1816 });
1817 return result;
1818};
1819/**
1820 * Calculates the time to wait before refreshing a live playlist
1821 *
1822 * @param {Object} media
1823 * The current media
1824 * @param {boolean} update
1825 * True if there were any updates from the last refresh, false otherwise
1826 * @return {number}
1827 * The time in ms to wait before refreshing the live playlist
1828 */
1829
1830var refreshDelay = function refreshDelay(media, update) {
1831 var segments = media.segments || [];
1832 var lastSegment = segments[segments.length - 1];
1833 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
1834 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
1835
1836 if (update && lastDuration) {
1837 return lastDuration * 1000;
1838 } // if the playlist is unchanged since the last reload or last segment duration
1839 // cannot be determined, try again after half the target duration
1840
1841
1842 return (media.partTargetDuration || media.targetDuration || 10) * 500;
1843};
1844/**
1845 * Load a playlist from a remote location
1846 *
1847 * @class PlaylistLoader
1848 * @extends Stream
1849 * @param {string|Object} src url or object of manifest
1850 * @param {boolean} withCredentials the withCredentials xhr option
1851 * @class
1852 */
1853
1854var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
1855 _inheritsLoose(PlaylistLoader, _EventTarget);
1856
1857 function PlaylistLoader(src, vhs, options) {
1858 var _this;
1859
1860 if (options === void 0) {
1861 options = {};
1862 }
1863
1864 _this = _EventTarget.call(this) || this;
1865
1866 if (!src) {
1867 throw new Error('A non-empty playlist URL or object is required');
1868 }
1869
1870 _this.logger_ = logger('PlaylistLoader');
1871 var _options = options,
1872 _options$withCredenti = _options.withCredentials,
1873 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
1874 _options$handleManife = _options.handleManifestRedirects,
1875 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
1876 _this.src = src;
1877 _this.vhs_ = vhs;
1878 _this.withCredentials = withCredentials;
1879 _this.handleManifestRedirects = handleManifestRedirects;
1880 var vhsOptions = vhs.options_;
1881 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
1882 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
1883 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
1884
1885 if (videojs.browser.IE_VERSION) {
1886 _this.experimentalLLHLS = false;
1887 } // initialize the loader state
1888
1889
1890 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
1891
1892 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized(_this));
1893
1894 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
1895
1896 return _this;
1897 }
1898
1899 var _proto = PlaylistLoader.prototype;
1900
1901 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
1902 var _this2 = this;
1903
1904 if (this.state !== 'HAVE_METADATA') {
1905 // only refresh the media playlist if no other activity is going on
1906 return;
1907 }
1908
1909 var media = this.media();
1910 var uri = resolveUrl(this.master.uri, media.uri);
1911
1912 if (this.experimentalLLHLS) {
1913 uri = addLLHLSQueryDirectives(uri, media);
1914 }
1915
1916 this.state = 'HAVE_CURRENT_METADATA';
1917 this.request = this.vhs_.xhr({
1918 uri: uri,
1919 withCredentials: this.withCredentials
1920 }, function (error, req) {
1921 // disposed
1922 if (!_this2.request) {
1923 return;
1924 }
1925
1926 if (error) {
1927 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
1928 }
1929
1930 _this2.haveMetadata({
1931 playlistString: _this2.request.responseText,
1932 url: _this2.media().uri,
1933 id: _this2.media().id
1934 });
1935 });
1936 };
1937
1938 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
1939 var uri = playlist.uri,
1940 id = playlist.id; // any in-flight request is now finished
1941
1942 this.request = null;
1943
1944 if (startingState) {
1945 this.state = startingState;
1946 }
1947
1948 this.error = {
1949 playlist: this.master.playlists[id],
1950 status: xhr.status,
1951 message: "HLS playlist request error at URL: " + uri + ".",
1952 responseText: xhr.responseText,
1953 code: xhr.status >= 500 ? 4 : 2
1954 };
1955 this.trigger('error');
1956 };
1957
1958 _proto.parseManifest_ = function parseManifest_(_ref) {
1959 var _this3 = this;
1960
1961 var url = _ref.url,
1962 manifestString = _ref.manifestString;
1963 return parseManifest({
1964 onwarn: function onwarn(_ref2) {
1965 var message = _ref2.message;
1966 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
1967 },
1968 oninfo: function oninfo(_ref3) {
1969 var message = _ref3.message;
1970 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
1971 },
1972 manifestString: manifestString,
1973 customTagParsers: this.customTagParsers,
1974 customTagMappers: this.customTagMappers,
1975 experimentalLLHLS: this.experimentalLLHLS
1976 });
1977 }
1978 /**
1979 * Update the playlist loader's state in response to a new or updated playlist.
1980 *
1981 * @param {string} [playlistString]
1982 * Playlist string (if playlistObject is not provided)
1983 * @param {Object} [playlistObject]
1984 * Playlist object (if playlistString is not provided)
1985 * @param {string} url
1986 * URL of playlist
1987 * @param {string} id
1988 * ID to use for playlist
1989 */
1990 ;
1991
1992 _proto.haveMetadata = function haveMetadata(_ref4) {
1993 var playlistString = _ref4.playlistString,
1994 playlistObject = _ref4.playlistObject,
1995 url = _ref4.url,
1996 id = _ref4.id;
1997 // any in-flight request is now finished
1998 this.request = null;
1999 this.state = 'HAVE_METADATA';
2000 var playlist = playlistObject || this.parseManifest_({
2001 url: url,
2002 manifestString: playlistString
2003 });
2004 playlist.lastRequest = Date.now();
2005 setupMediaPlaylist({
2006 playlist: playlist,
2007 uri: url,
2008 id: id
2009 }); // merge this playlist into the master
2010
2011 var update = updateMaster$1(this.master, playlist);
2012 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
2013 this.pendingMedia_ = null;
2014
2015 if (update) {
2016 this.master = update;
2017 this.media_ = this.master.playlists[id];
2018 } else {
2019 this.trigger('playlistunchanged');
2020 }
2021
2022 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
2023 this.trigger('loadedplaylist');
2024 }
2025 /**
2026 * Abort any outstanding work and clean up.
2027 */
2028 ;
2029
2030 _proto.dispose = function dispose() {
2031 this.trigger('dispose');
2032 this.stopRequest();
2033 window$1.clearTimeout(this.mediaUpdateTimeout);
2034 window$1.clearTimeout(this.finalRenditionTimeout);
2035 this.off();
2036 };
2037
2038 _proto.stopRequest = function stopRequest() {
2039 if (this.request) {
2040 var oldRequest = this.request;
2041 this.request = null;
2042 oldRequest.onreadystatechange = null;
2043 oldRequest.abort();
2044 }
2045 }
2046 /**
2047 * When called without any arguments, returns the currently
2048 * active media playlist. When called with a single argument,
2049 * triggers the playlist loader to asynchronously switch to the
2050 * specified media playlist. Calling this method while the
2051 * loader is in the HAVE_NOTHING causes an error to be emitted
2052 * but otherwise has no effect.
2053 *
2054 * @param {Object=} playlist the parsed media playlist
2055 * object to switch to
2056 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
2057 *
2058 * @return {Playlist} the current loaded media
2059 */
2060 ;
2061
2062 _proto.media = function media(playlist, shouldDelay) {
2063 var _this4 = this;
2064
2065 // getter
2066 if (!playlist) {
2067 return this.media_;
2068 } // setter
2069
2070
2071 if (this.state === 'HAVE_NOTHING') {
2072 throw new Error('Cannot switch media playlist from ' + this.state);
2073 } // find the playlist object if the target playlist has been
2074 // specified by URI
2075
2076
2077 if (typeof playlist === 'string') {
2078 if (!this.master.playlists[playlist]) {
2079 throw new Error('Unknown playlist URI: ' + playlist);
2080 }
2081
2082 playlist = this.master.playlists[playlist];
2083 }
2084
2085 window$1.clearTimeout(this.finalRenditionTimeout);
2086
2087 if (shouldDelay) {
2088 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
2089 this.finalRenditionTimeout = window$1.setTimeout(this.media.bind(this, playlist, false), delay);
2090 return;
2091 }
2092
2093 var startingState = this.state;
2094 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2095 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
2096
2097 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
2098 // media playlist or, for the case of demuxed audio, a resolved audio media group)
2099 playlist.endList && playlist.segments.length) {
2100 // abort outstanding playlist requests
2101 if (this.request) {
2102 this.request.onreadystatechange = null;
2103 this.request.abort();
2104 this.request = null;
2105 }
2106
2107 this.state = 'HAVE_METADATA';
2108 this.media_ = playlist; // trigger media change if the active media has been updated
2109
2110 if (mediaChange) {
2111 this.trigger('mediachanging');
2112
2113 if (startingState === 'HAVE_MASTER') {
2114 // The initial playlist was a master manifest, and the first media selected was
2115 // also provided (in the form of a resolved playlist object) as part of the
2116 // source object (rather than just a URL). Therefore, since the media playlist
2117 // doesn't need to be requested, loadedmetadata won't trigger as part of the
2118 // normal flow, and needs an explicit trigger here.
2119 this.trigger('loadedmetadata');
2120 } else {
2121 this.trigger('mediachange');
2122 }
2123 }
2124
2125 return;
2126 } // We update/set the timeout here so that live playlists
2127 // that are not a media change will "start" the loader as expected.
2128 // We expect that this function will start the media update timeout
2129 // cycle again. This also prevents a playlist switch failure from
2130 // causing us to stall during live.
2131
2132
2133 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
2134
2135 if (!mediaChange) {
2136 return;
2137 }
2138
2139 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
2140
2141 if (this.request) {
2142 if (playlist.resolvedUri === this.request.url) {
2143 // requesting to switch to the same playlist multiple times
2144 // has no effect after the first
2145 return;
2146 }
2147
2148 this.request.onreadystatechange = null;
2149 this.request.abort();
2150 this.request = null;
2151 } // request the new playlist
2152
2153
2154 if (this.media_) {
2155 this.trigger('mediachanging');
2156 }
2157
2158 this.pendingMedia_ = playlist;
2159 this.request = this.vhs_.xhr({
2160 uri: playlist.resolvedUri,
2161 withCredentials: this.withCredentials
2162 }, function (error, req) {
2163 // disposed
2164 if (!_this4.request) {
2165 return;
2166 }
2167
2168 playlist.lastRequest = Date.now();
2169 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
2170
2171 if (error) {
2172 return _this4.playlistRequestError(_this4.request, playlist, startingState);
2173 }
2174
2175 _this4.haveMetadata({
2176 playlistString: req.responseText,
2177 url: playlist.uri,
2178 id: playlist.id
2179 }); // fire loadedmetadata the first time a media playlist is loaded
2180
2181
2182 if (startingState === 'HAVE_MASTER') {
2183 _this4.trigger('loadedmetadata');
2184 } else {
2185 _this4.trigger('mediachange');
2186 }
2187 });
2188 }
2189 /**
2190 * pause loading of the playlist
2191 */
2192 ;
2193
2194 _proto.pause = function pause() {
2195 if (this.mediaUpdateTimeout) {
2196 window$1.clearTimeout(this.mediaUpdateTimeout);
2197 this.mediaUpdateTimeout = null;
2198 }
2199
2200 this.stopRequest();
2201
2202 if (this.state === 'HAVE_NOTHING') {
2203 // If we pause the loader before any data has been retrieved, its as if we never
2204 // started, so reset to an unstarted state.
2205 this.started = false;
2206 } // Need to restore state now that no activity is happening
2207
2208
2209 if (this.state === 'SWITCHING_MEDIA') {
2210 // if the loader was in the process of switching media, it should either return to
2211 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2212 // playlist yet. This is determined by the existence of loader.media_
2213 if (this.media_) {
2214 this.state = 'HAVE_METADATA';
2215 } else {
2216 this.state = 'HAVE_MASTER';
2217 }
2218 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2219 this.state = 'HAVE_METADATA';
2220 }
2221 }
2222 /**
2223 * start loading of the playlist
2224 */
2225 ;
2226
2227 _proto.load = function load(shouldDelay) {
2228 var _this5 = this;
2229
2230 if (this.mediaUpdateTimeout) {
2231 window$1.clearTimeout(this.mediaUpdateTimeout);
2232 this.mediaUpdateTimeout = null;
2233 }
2234
2235 var media = this.media();
2236
2237 if (shouldDelay) {
2238 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
2239 this.mediaUpdateTimeout = window$1.setTimeout(function () {
2240 _this5.mediaUpdateTimeout = null;
2241
2242 _this5.load();
2243 }, delay);
2244 return;
2245 }
2246
2247 if (!this.started) {
2248 this.start();
2249 return;
2250 }
2251
2252 if (media && !media.endList) {
2253 this.trigger('mediaupdatetimeout');
2254 } else {
2255 this.trigger('loadedplaylist');
2256 }
2257 };
2258
2259 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
2260 var _this6 = this;
2261
2262 if (this.mediaUpdateTimeout) {
2263 window$1.clearTimeout(this.mediaUpdateTimeout);
2264 this.mediaUpdateTimeout = null;
2265 } // we only have use mediaupdatetimeout for live playlists.
2266
2267
2268 if (!this.media() || this.media().endList) {
2269 return;
2270 }
2271
2272 this.mediaUpdateTimeout = window$1.setTimeout(function () {
2273 _this6.mediaUpdateTimeout = null;
2274
2275 _this6.trigger('mediaupdatetimeout');
2276
2277 _this6.updateMediaUpdateTimeout_(delay);
2278 }, delay);
2279 }
2280 /**
2281 * start loading of the playlist
2282 */
2283 ;
2284
2285 _proto.start = function start() {
2286 var _this7 = this;
2287
2288 this.started = true;
2289
2290 if (typeof this.src === 'object') {
2291 // in the case of an entirely constructed manifest object (meaning there's no actual
2292 // manifest on a server), default the uri to the page's href
2293 if (!this.src.uri) {
2294 this.src.uri = window$1.location.href;
2295 } // resolvedUri is added on internally after the initial request. Since there's no
2296 // request for pre-resolved manifests, add on resolvedUri here.
2297
2298
2299 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
2300 // request can be skipped (since the top level of the manifest, at a minimum, is
2301 // already available as a parsed manifest object). However, if the manifest object
2302 // represents a master playlist, some media playlists may need to be resolved before
2303 // the starting segment list is available. Therefore, go directly to setup of the
2304 // initial playlist, and let the normal flow continue from there.
2305 //
2306 // Note that the call to setup is asynchronous, as other sections of VHS may assume
2307 // that the first request is asynchronous.
2308
2309 setTimeout(function () {
2310 _this7.setupInitialPlaylist(_this7.src);
2311 }, 0);
2312 return;
2313 } // request the specified URL
2314
2315
2316 this.request = this.vhs_.xhr({
2317 uri: this.src,
2318 withCredentials: this.withCredentials
2319 }, function (error, req) {
2320 // disposed
2321 if (!_this7.request) {
2322 return;
2323 } // clear the loader's request reference
2324
2325
2326 _this7.request = null;
2327
2328 if (error) {
2329 _this7.error = {
2330 status: req.status,
2331 message: "HLS playlist request error at URL: " + _this7.src + ".",
2332 responseText: req.responseText,
2333 // MEDIA_ERR_NETWORK
2334 code: 2
2335 };
2336
2337 if (_this7.state === 'HAVE_NOTHING') {
2338 _this7.started = false;
2339 }
2340
2341 return _this7.trigger('error');
2342 }
2343
2344 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
2345
2346 var manifest = _this7.parseManifest_({
2347 manifestString: req.responseText,
2348 url: _this7.src
2349 });
2350
2351 _this7.setupInitialPlaylist(manifest);
2352 });
2353 };
2354
2355 _proto.srcUri = function srcUri() {
2356 return typeof this.src === 'string' ? this.src : this.src.uri;
2357 }
2358 /**
2359 * Given a manifest object that's either a master or media playlist, trigger the proper
2360 * events and set the state of the playlist loader.
2361 *
2362 * If the manifest object represents a master playlist, `loadedplaylist` will be
2363 * triggered to allow listeners to select a playlist. If none is selected, the loader
2364 * will default to the first one in the playlists array.
2365 *
2366 * If the manifest object represents a media playlist, `loadedplaylist` will be
2367 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
2368 *
2369 * In the case of a media playlist, a master playlist object wrapper with one playlist
2370 * will be created so that all logic can handle playlists in the same fashion (as an
2371 * assumed manifest object schema).
2372 *
2373 * @param {Object} manifest
2374 * The parsed manifest object
2375 */
2376 ;
2377
2378 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
2379 this.state = 'HAVE_MASTER';
2380
2381 if (manifest.playlists) {
2382 this.master = manifest;
2383 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
2384 // then resolve URIs in advance, as they are usually done after a playlist request,
2385 // which may not happen if the playlist is resolved.
2386
2387 manifest.playlists.forEach(function (playlist) {
2388 playlist.segments = getAllSegments(playlist);
2389 playlist.segments.forEach(function (segment) {
2390 resolveSegmentUris(segment, playlist.resolvedUri);
2391 });
2392 });
2393 this.trigger('loadedplaylist');
2394
2395 if (!this.request) {
2396 // no media playlist was specifically selected so start
2397 // from the first listed one
2398 this.media(this.master.playlists[0]);
2399 }
2400
2401 return;
2402 } // In order to support media playlists passed in as vhs-json, the case where the uri
2403 // is not provided as part of the manifest should be considered, and an appropriate
2404 // default used.
2405
2406
2407 var uri = this.srcUri() || window$1.location.href;
2408 this.master = masterForMedia(manifest, uri);
2409 this.haveMetadata({
2410 playlistObject: manifest,
2411 url: uri,
2412 id: this.master.playlists[0].id
2413 });
2414 this.trigger('loadedmetadata');
2415 };
2416
2417 return PlaylistLoader;
2418}(EventTarget$1);
2419
2420/**
2421 * @file xhr.js
2422 */
2423var videojsXHR = videojs.xhr,
2424 mergeOptions$1 = videojs.mergeOptions;
2425
2426var callbackWrapper = function callbackWrapper(request, error, response, callback) {
2427 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
2428
2429 if (!error && reqResponse) {
2430 request.responseTime = Date.now();
2431 request.roundTripTime = request.responseTime - request.requestTime;
2432 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2433
2434 if (!request.bandwidth) {
2435 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2436 }
2437 }
2438
2439 if (response.headers) {
2440 request.responseHeaders = response.headers;
2441 } // videojs.xhr now uses a specific code on the error
2442 // object to signal that a request has timed out instead
2443 // of setting a boolean on the request object
2444
2445
2446 if (error && error.code === 'ETIMEDOUT') {
2447 request.timedout = true;
2448 } // videojs.xhr no longer considers status codes outside of 200 and 0
2449 // (for file uris) to be errors, but the old XHR did, so emulate that
2450 // behavior. Status 206 may be used in response to byterange requests.
2451
2452
2453 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2454 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2455 }
2456
2457 callback(error, request);
2458};
2459
2460var xhrFactory = function xhrFactory() {
2461 var xhr = function XhrFunction(options, callback) {
2462 // Add a default timeout
2463 options = mergeOptions$1({
2464 timeout: 45e3
2465 }, options); // Allow an optional user-specified function to modify the option
2466 // object before we construct the xhr request
2467
2468 var beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
2469
2470 if (beforeRequest && typeof beforeRequest === 'function') {
2471 var newOptions = beforeRequest(options);
2472
2473 if (newOptions) {
2474 options = newOptions;
2475 }
2476 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
2477 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
2478
2479
2480 var xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr;
2481 var request = xhrMethod(options, function (error, response) {
2482 return callbackWrapper(request, error, response, callback);
2483 });
2484 var originalAbort = request.abort;
2485
2486 request.abort = function () {
2487 request.aborted = true;
2488 return originalAbort.apply(request, arguments);
2489 };
2490
2491 request.uri = options.uri;
2492 request.requestTime = Date.now();
2493 return request;
2494 };
2495
2496 xhr.original = true;
2497 return xhr;
2498};
2499/**
2500 * Turns segment byterange into a string suitable for use in
2501 * HTTP Range requests
2502 *
2503 * @param {Object} byterange - an object with two values defining the start and end
2504 * of a byte-range
2505 */
2506
2507
2508var byterangeStr = function byterangeStr(byterange) {
2509 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2510 // header uses inclusive ranges
2511 var byterangeEnd;
2512 var byterangeStart = byterange.offset;
2513
2514 if (typeof byterange.offset === 'bigint' || typeof byterange.length === 'bigint') {
2515 byterangeEnd = window$1.BigInt(byterange.offset) + window$1.BigInt(byterange.length) - window$1.BigInt(1);
2516 } else {
2517 byterangeEnd = byterange.offset + byterange.length - 1;
2518 }
2519
2520 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2521};
2522/**
2523 * Defines headers for use in the xhr request for a particular segment.
2524 *
2525 * @param {Object} segment - a simplified copy of the segmentInfo object
2526 * from SegmentLoader
2527 */
2528
2529var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2530 var headers = {};
2531
2532 if (segment.byterange) {
2533 headers.Range = byterangeStr(segment.byterange);
2534 }
2535
2536 return headers;
2537};
2538
2539/**
2540 * @file bin-utils.js
2541 */
2542
2543/**
2544 * convert a TimeRange to text
2545 *
2546 * @param {TimeRange} range the timerange to use for conversion
2547 * @param {number} i the iterator on the range to convert
2548 * @return {string} the range in string format
2549 */
2550
2551var textRange = function textRange(range, i) {
2552 return range.start(i) + '-' + range.end(i);
2553};
2554/**
2555 * format a number as hex string
2556 *
2557 * @param {number} e The number
2558 * @param {number} i the iterator
2559 * @return {string} the hex formatted number as a string
2560 */
2561
2562
2563var formatHexString = function formatHexString(e, i) {
2564 var value = e.toString(16);
2565 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
2566};
2567
2568var formatAsciiString = function formatAsciiString(e) {
2569 if (e >= 0x20 && e < 0x7e) {
2570 return String.fromCharCode(e);
2571 }
2572
2573 return '.';
2574};
2575/**
2576 * Creates an object for sending to a web worker modifying properties that are TypedArrays
2577 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
2578 *
2579 * @param {Object} message
2580 * Object of properties and values to send to the web worker
2581 * @return {Object}
2582 * Modified message with TypedArray values expanded
2583 * @function createTransferableMessage
2584 */
2585
2586
2587var createTransferableMessage = function createTransferableMessage(message) {
2588 var transferable = {};
2589 Object.keys(message).forEach(function (key) {
2590 var value = message[key];
2591
2592 if (isArrayBufferView(value)) {
2593 transferable[key] = {
2594 bytes: value.buffer,
2595 byteOffset: value.byteOffset,
2596 byteLength: value.byteLength
2597 };
2598 } else {
2599 transferable[key] = value;
2600 }
2601 });
2602 return transferable;
2603};
2604/**
2605 * Returns a unique string identifier for a media initialization
2606 * segment.
2607 *
2608 * @param {Object} initSegment
2609 * the init segment object.
2610 *
2611 * @return {string} the generated init segment id
2612 */
2613
2614var initSegmentId = function initSegmentId(initSegment) {
2615 var byterange = initSegment.byterange || {
2616 length: Infinity,
2617 offset: 0
2618 };
2619 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
2620};
2621/**
2622 * Returns a unique string identifier for a media segment key.
2623 *
2624 * @param {Object} key the encryption key
2625 * @return {string} the unique id for the media segment key.
2626 */
2627
2628var segmentKeyId = function segmentKeyId(key) {
2629 return key.resolvedUri;
2630};
2631/**
2632 * utils to help dump binary data to the console
2633 *
2634 * @param {Array|TypedArray} data
2635 * data to dump to a string
2636 *
2637 * @return {string} the data as a hex string.
2638 */
2639
2640var hexDump = function hexDump(data) {
2641 var bytes = Array.prototype.slice.call(data);
2642 var step = 16;
2643 var result = '';
2644 var hex;
2645 var ascii;
2646
2647 for (var j = 0; j < bytes.length / step; j++) {
2648 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
2649 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
2650 result += hex + ' ' + ascii + '\n';
2651 }
2652
2653 return result;
2654};
2655var tagDump = function tagDump(_ref) {
2656 var bytes = _ref.bytes;
2657 return hexDump(bytes);
2658};
2659var textRanges = function textRanges(ranges) {
2660 var result = '';
2661 var i;
2662
2663 for (i = 0; i < ranges.length; i++) {
2664 result += textRange(ranges, i) + ' ';
2665 }
2666
2667 return result;
2668};
2669
2670var utils = /*#__PURE__*/Object.freeze({
2671 __proto__: null,
2672 createTransferableMessage: createTransferableMessage,
2673 initSegmentId: initSegmentId,
2674 segmentKeyId: segmentKeyId,
2675 hexDump: hexDump,
2676 tagDump: tagDump,
2677 textRanges: textRanges
2678});
2679
2680// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2681// 25% was arbitrarily chosen, and may need to be refined over time.
2682
2683var SEGMENT_END_FUDGE_PERCENT = 0.25;
2684/**
2685 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2686 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2687 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2688 *
2689 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2690 * point" (a point where we have a mapping from program time to player time, with player
2691 * time being the post transmux start of the segment).
2692 *
2693 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2694 *
2695 * @param {number} playerTime the player time
2696 * @param {Object} segment the segment which contains the player time
2697 * @return {Date} program time
2698 */
2699
2700var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2701 if (!segment.dateTimeObject) {
2702 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2703 // be used to map the start of a segment with a real world time).
2704 return null;
2705 }
2706
2707 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2708 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2709
2710 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2711 var offsetFromSegmentStart = playerTime - startOfSegment;
2712 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2713};
2714var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2715 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2716};
2717/**
2718 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2719 * returned segment might be an estimate or an accurate match.
2720 *
2721 * @param {string} programTime The ISO-8601 programTime to find a match for
2722 * @param {Object} playlist A playlist object to search within
2723 */
2724
2725var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2726 // Assumptions:
2727 // - verifyProgramDateTimeTags has already been run
2728 // - live streams have been started
2729 var dateTimeObject;
2730
2731 try {
2732 dateTimeObject = new Date(programTime);
2733 } catch (e) {
2734 return null;
2735 }
2736
2737 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2738 return null;
2739 }
2740
2741 var segment = playlist.segments[0];
2742
2743 if (dateTimeObject < segment.dateTimeObject) {
2744 // Requested time is before stream start.
2745 return null;
2746 }
2747
2748 for (var i = 0; i < playlist.segments.length - 1; i++) {
2749 segment = playlist.segments[i];
2750 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2751
2752 if (dateTimeObject < nextSegmentStart) {
2753 break;
2754 }
2755 }
2756
2757 var lastSegment = playlist.segments[playlist.segments.length - 1];
2758 var lastSegmentStart = lastSegment.dateTimeObject;
2759 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2760 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2761
2762 if (dateTimeObject > lastSegmentEnd) {
2763 // Beyond the end of the stream, or our best guess of the end of the stream.
2764 return null;
2765 }
2766
2767 if (dateTimeObject > lastSegmentStart) {
2768 segment = lastSegment;
2769 }
2770
2771 return {
2772 segment: segment,
2773 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2774 // Although, given that all segments have accurate date time objects, the segment
2775 // selected should be accurate, unless the video has been transmuxed at some point
2776 // (determined by the presence of the videoTimingInfo object), the segment's "player
2777 // time" (the start time in the player) can't be considered accurate.
2778 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2779 };
2780};
2781/**
2782 * Finds a segment that contains the given player time(in seconds).
2783 *
2784 * @param {number} time The player time to find a match for
2785 * @param {Object} playlist A playlist object to search within
2786 */
2787
2788var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2789 // Assumptions:
2790 // - there will always be a segment.duration
2791 // - we can start from zero
2792 // - segments are in time order
2793 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2794 return null;
2795 }
2796
2797 var segmentEnd = 0;
2798 var segment;
2799
2800 for (var i = 0; i < playlist.segments.length; i++) {
2801 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2802 // should contain the most accurate values we have for the segment's player times.
2803 //
2804 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2805 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2806 // calculate an end value.
2807
2808 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2809
2810 if (time <= segmentEnd) {
2811 break;
2812 }
2813 }
2814
2815 var lastSegment = playlist.segments[playlist.segments.length - 1];
2816
2817 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2818 // The time requested is beyond the stream end.
2819 return null;
2820 }
2821
2822 if (time > segmentEnd) {
2823 // The time is within or beyond the last segment.
2824 //
2825 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2826 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2827 // Technically, because the duration value is only an estimate, the time may still
2828 // exist in the last segment, however, there isn't enough information to make even
2829 // a reasonable estimate.
2830 return null;
2831 }
2832
2833 segment = lastSegment;
2834 }
2835
2836 return {
2837 segment: segment,
2838 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2839 // Because videoTimingInfo is only set after transmux, it is the only way to get
2840 // accurate timing values.
2841 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2842 };
2843};
2844/**
2845 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2846 * If the offset returned is positive, the programTime occurs after the
2847 * comparisonTimestamp.
2848 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2849 *
2850 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2851 * @param {string} programTime The programTime as an ISO-8601 string
2852 * @return {number} offset
2853 */
2854
2855var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2856 var segmentDateTime;
2857 var programDateTime;
2858
2859 try {
2860 segmentDateTime = new Date(comparisonTimeStamp);
2861 programDateTime = new Date(programTime);
2862 } catch (e) {// TODO handle error
2863 }
2864
2865 var segmentTimeEpoch = segmentDateTime.getTime();
2866 var programTimeEpoch = programDateTime.getTime();
2867 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2868};
2869/**
2870 * Checks that all segments in this playlist have programDateTime tags.
2871 *
2872 * @param {Object} playlist A playlist object
2873 */
2874
2875var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2876 if (!playlist.segments || playlist.segments.length === 0) {
2877 return false;
2878 }
2879
2880 for (var i = 0; i < playlist.segments.length; i++) {
2881 var segment = playlist.segments[i];
2882
2883 if (!segment.dateTimeObject) {
2884 return false;
2885 }
2886 }
2887
2888 return true;
2889};
2890/**
2891 * Returns the programTime of the media given a playlist and a playerTime.
2892 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2893 * If the segments containing the time requested have not been buffered yet, an estimate
2894 * may be returned to the callback.
2895 *
2896 * @param {Object} args
2897 * @param {Object} args.playlist A playlist object to search within
2898 * @param {number} time A playerTime in seconds
2899 * @param {Function} callback(err, programTime)
2900 * @return {string} err.message A detailed error message
2901 * @return {Object} programTime
2902 * @return {number} programTime.mediaSeconds The streamTime in seconds
2903 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2904 */
2905
2906var getProgramTime = function getProgramTime(_ref) {
2907 var playlist = _ref.playlist,
2908 _ref$time = _ref.time,
2909 time = _ref$time === void 0 ? undefined : _ref$time,
2910 callback = _ref.callback;
2911
2912 if (!callback) {
2913 throw new Error('getProgramTime: callback must be provided');
2914 }
2915
2916 if (!playlist || time === undefined) {
2917 return callback({
2918 message: 'getProgramTime: playlist and time must be provided'
2919 });
2920 }
2921
2922 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2923
2924 if (!matchedSegment) {
2925 return callback({
2926 message: 'valid programTime was not found'
2927 });
2928 }
2929
2930 if (matchedSegment.type === 'estimate') {
2931 return callback({
2932 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2933 seekTime: matchedSegment.estimatedStart
2934 });
2935 }
2936
2937 var programTimeObject = {
2938 mediaSeconds: time
2939 };
2940 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2941
2942 if (programTime) {
2943 programTimeObject.programDateTime = programTime.toISOString();
2944 }
2945
2946 return callback(null, programTimeObject);
2947};
2948/**
2949 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2950 *
2951 * @param {Object} args
2952 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2953 * @param {Object} args.playlist A playlist to look within
2954 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2955 * @param {Function} args.seekTo A method to perform a seek
2956 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2957 * @param {Object} args.tech The tech to seek on
2958 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2959 * @return {string} err.message A detailed error message
2960 * @return {number} newTime The exact time that was seeked to in seconds
2961 */
2962
2963var seekToProgramTime = function seekToProgramTime(_ref2) {
2964 var programTime = _ref2.programTime,
2965 playlist = _ref2.playlist,
2966 _ref2$retryCount = _ref2.retryCount,
2967 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2968 seekTo = _ref2.seekTo,
2969 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2970 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2971 tech = _ref2.tech,
2972 callback = _ref2.callback;
2973
2974 if (!callback) {
2975 throw new Error('seekToProgramTime: callback must be provided');
2976 }
2977
2978 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2979 return callback({
2980 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2981 });
2982 }
2983
2984 if (!playlist.endList && !tech.hasStarted_) {
2985 return callback({
2986 message: 'player must be playing a live stream to start buffering'
2987 });
2988 }
2989
2990 if (!verifyProgramDateTimeTags(playlist)) {
2991 return callback({
2992 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
2993 });
2994 }
2995
2996 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
2997
2998 if (!matchedSegment) {
2999 return callback({
3000 message: programTime + " was not found in the stream"
3001 });
3002 }
3003
3004 var segment = matchedSegment.segment;
3005 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
3006
3007 if (matchedSegment.type === 'estimate') {
3008 // we've run out of retries
3009 if (retryCount === 0) {
3010 return callback({
3011 message: programTime + " is not buffered yet. Try again"
3012 });
3013 }
3014
3015 seekTo(matchedSegment.estimatedStart + mediaOffset);
3016 tech.one('seeked', function () {
3017 seekToProgramTime({
3018 programTime: programTime,
3019 playlist: playlist,
3020 retryCount: retryCount - 1,
3021 seekTo: seekTo,
3022 pauseAfterSeek: pauseAfterSeek,
3023 tech: tech,
3024 callback: callback
3025 });
3026 });
3027 return;
3028 } // Since the segment.start value is determined from the buffered end or ending time
3029 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
3030 // modifications.
3031
3032
3033 var seekToTime = segment.start + mediaOffset;
3034
3035 var seekedCallback = function seekedCallback() {
3036 return callback(null, tech.currentTime());
3037 }; // listen for seeked event
3038
3039
3040 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
3041
3042 if (pauseAfterSeek) {
3043 tech.pause();
3044 }
3045
3046 seekTo(seekToTime);
3047};
3048
3049// which will only happen if the request is complete.
3050
3051var callbackOnCompleted = function callbackOnCompleted(request, cb) {
3052 if (request.readyState === 4) {
3053 return cb();
3054 }
3055
3056 return;
3057};
3058
3059var containerRequest = function containerRequest(uri, xhr, cb) {
3060 var bytes = [];
3061 var id3Offset;
3062 var finished = false;
3063
3064 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
3065 req.abort();
3066 finished = true;
3067 return cb(err, req, type, _bytes);
3068 };
3069
3070 var progressListener = function progressListener(error, request) {
3071 if (finished) {
3072 return;
3073 }
3074
3075 if (error) {
3076 return endRequestAndCallback(error, request, '', bytes);
3077 } // grap the new part of content that was just downloaded
3078
3079
3080 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
3081
3082 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
3083 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
3084 // or we need at least two bytes after an id3Offset
3085
3086 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
3087 return callbackOnCompleted(request, function () {
3088 return endRequestAndCallback(error, request, '', bytes);
3089 });
3090 }
3091
3092 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
3093 // to see the second sync byte, wait until we have enough data
3094 // before declaring it ts
3095
3096 if (type === 'ts' && bytes.length < 188) {
3097 return callbackOnCompleted(request, function () {
3098 return endRequestAndCallback(error, request, '', bytes);
3099 });
3100 } // this may be an unsynced ts segment
3101 // wait for 376 bytes before detecting no container
3102
3103
3104 if (!type && bytes.length < 376) {
3105 return callbackOnCompleted(request, function () {
3106 return endRequestAndCallback(error, request, '', bytes);
3107 });
3108 }
3109
3110 return endRequestAndCallback(null, request, type, bytes);
3111 };
3112
3113 var options = {
3114 uri: uri,
3115 beforeSend: function beforeSend(request) {
3116 // this forces the browser to pass the bytes to us unprocessed
3117 request.overrideMimeType('text/plain; charset=x-user-defined');
3118 request.addEventListener('progress', function (_ref) {
3119 _ref.total;
3120 _ref.loaded;
3121 return callbackWrapper(request, null, {
3122 statusCode: request.status
3123 }, progressListener);
3124 });
3125 }
3126 };
3127 var request = xhr(options, function (error, response) {
3128 return callbackWrapper(request, error, response, progressListener);
3129 });
3130 return request;
3131};
3132
3133var EventTarget = videojs.EventTarget,
3134 mergeOptions = videojs.mergeOptions;
3135
3136var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
3137 if (!isPlaylistUnchanged(a, b)) {
3138 return false;
3139 } // for dash the above check will often return true in scenarios where
3140 // the playlist actually has changed because mediaSequence isn't a
3141 // dash thing, and we often set it to 1. So if the playlists have the same amount
3142 // of segments we return true.
3143 // So for dash we need to make sure that the underlying segments are different.
3144 // if sidx changed then the playlists are different.
3145
3146
3147 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
3148 return false;
3149 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
3150 return false;
3151 } // one or the other does not have segments
3152 // there was a change.
3153
3154
3155 if (a.segments && !b.segments || !a.segments && b.segments) {
3156 return false;
3157 } // neither has segments nothing changed
3158
3159
3160 if (!a.segments && !b.segments) {
3161 return true;
3162 } // check segments themselves
3163
3164
3165 for (var i = 0; i < a.segments.length; i++) {
3166 var aSegment = a.segments[i];
3167 var bSegment = b.segments[i]; // if uris are different between segments there was a change
3168
3169 if (aSegment.uri !== bSegment.uri) {
3170 return false;
3171 } // neither segment has a byterange, there will be no byterange change.
3172
3173
3174 if (!aSegment.byterange && !bSegment.byterange) {
3175 continue;
3176 }
3177
3178 var aByterange = aSegment.byterange;
3179 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
3180
3181 if (aByterange && !bByterange || !aByterange && bByterange) {
3182 return false;
3183 } // if both segments have byterange with different offsets, there was a change.
3184
3185
3186 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
3187 return false;
3188 }
3189 } // if everything was the same with segments, this is the same playlist.
3190
3191
3192 return true;
3193};
3194/**
3195 * Parses the master XML string and updates playlist URI references.
3196 *
3197 * @param {Object} config
3198 * Object of arguments
3199 * @param {string} config.masterXml
3200 * The mpd XML
3201 * @param {string} config.srcUrl
3202 * The mpd URL
3203 * @param {Date} config.clientOffset
3204 * A time difference between server and client
3205 * @param {Object} config.sidxMapping
3206 * SIDX mappings for moof/mdat URIs and byte ranges
3207 * @return {Object}
3208 * The parsed mpd manifest object
3209 */
3210
3211
3212var parseMasterXml = function parseMasterXml(_ref) {
3213 var masterXml = _ref.masterXml,
3214 srcUrl = _ref.srcUrl,
3215 clientOffset = _ref.clientOffset,
3216 sidxMapping = _ref.sidxMapping,
3217 previousManifest = _ref.previousManifest;
3218 var manifest = parse(masterXml, {
3219 manifestUri: srcUrl,
3220 clientOffset: clientOffset,
3221 sidxMapping: sidxMapping,
3222 previousManifest: previousManifest
3223 });
3224 addPropertiesToMaster(manifest, srcUrl);
3225 return manifest;
3226};
3227/**
3228 * Returns a new master manifest that is the result of merging an updated master manifest
3229 * into the original version.
3230 *
3231 * @param {Object} oldMaster
3232 * The old parsed mpd object
3233 * @param {Object} newMaster
3234 * The updated parsed mpd object
3235 * @return {Object}
3236 * A new object representing the original master manifest with the updated media
3237 * playlists merged in
3238 */
3239
3240var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
3241 var noChanges = true;
3242 var update = mergeOptions(oldMaster, {
3243 // These are top level properties that can be updated
3244 duration: newMaster.duration,
3245 minimumUpdatePeriod: newMaster.minimumUpdatePeriod,
3246 timelineStarts: newMaster.timelineStarts
3247 }); // First update the playlists in playlist list
3248
3249 for (var i = 0; i < newMaster.playlists.length; i++) {
3250 var playlist = newMaster.playlists[i];
3251
3252 if (playlist.sidx) {
3253 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
3254
3255 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
3256 addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
3257 }
3258 }
3259
3260 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
3261
3262 if (playlistUpdate) {
3263 update = playlistUpdate;
3264 noChanges = false;
3265 }
3266 } // Then update media group playlists
3267
3268
3269 forEachMediaGroup(newMaster, function (properties, type, group, label) {
3270 if (properties.playlists && properties.playlists.length) {
3271 var id = properties.playlists[0].id;
3272
3273 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
3274
3275 if (_playlistUpdate) {
3276 update = _playlistUpdate; // update the playlist reference within media groups
3277
3278 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
3279 noChanges = false;
3280 }
3281 }
3282 });
3283
3284 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3285 noChanges = false;
3286 }
3287
3288 if (noChanges) {
3289 return null;
3290 }
3291
3292 return update;
3293}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
3294// If the SIDXs have maps, the two maps should match,
3295// both `a` and `b` missing SIDXs is considered matching.
3296// If `a` or `b` but not both have a map, they aren't matching.
3297
3298var equivalentSidx = function equivalentSidx(a, b) {
3299 var neitherMap = Boolean(!a.map && !b.map);
3300 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
3301 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
3302}; // exported for testing
3303
3304
3305var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
3306 var newSidxMapping = {};
3307
3308 for (var id in playlists) {
3309 var playlist = playlists[id];
3310 var currentSidxInfo = playlist.sidx;
3311
3312 if (currentSidxInfo) {
3313 var key = generateSidxKey(currentSidxInfo);
3314
3315 if (!oldSidxMapping[key]) {
3316 break;
3317 }
3318
3319 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
3320
3321 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
3322 newSidxMapping[key] = oldSidxMapping[key];
3323 }
3324 }
3325 }
3326
3327 return newSidxMapping;
3328};
3329/**
3330 * A function that filters out changed items as they need to be requested separately.
3331 *
3332 * The method is exported for testing
3333 *
3334 * @param {Object} master the parsed mpd XML returned via mpd-parser
3335 * @param {Object} oldSidxMapping the SIDX to compare against
3336 */
3337
3338var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
3339 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
3340 var mediaGroupSidx = videoSidx;
3341 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3342 if (properties.playlists && properties.playlists.length) {
3343 var playlists = properties.playlists;
3344 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
3345 }
3346 });
3347 return mediaGroupSidx;
3348};
3349
3350var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
3351 _inheritsLoose(DashPlaylistLoader, _EventTarget);
3352
3353 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
3354 // playlist loader setups from media groups will expect to be able to pass a playlist
3355 // (since there aren't external URLs to media playlists with DASH)
3356 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
3357 var _this;
3358
3359 if (options === void 0) {
3360 options = {};
3361 }
3362
3363 _this = _EventTarget.call(this) || this;
3364 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized(_this);
3365
3366 if (!masterPlaylistLoader) {
3367 _this.isMaster_ = true;
3368 }
3369
3370 var _options = options,
3371 _options$withCredenti = _options.withCredentials,
3372 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
3373 _options$handleManife = _options.handleManifestRedirects,
3374 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
3375 _this.vhs_ = vhs;
3376 _this.withCredentials = withCredentials;
3377 _this.handleManifestRedirects = handleManifestRedirects;
3378
3379 if (!srcUrlOrPlaylist) {
3380 throw new Error('A non-empty playlist URL or object is required');
3381 } // event naming?
3382
3383
3384 _this.on('minimumUpdatePeriod', function () {
3385 _this.refreshXml_();
3386 }); // live playlist staleness timeout
3387
3388
3389 _this.on('mediaupdatetimeout', function () {
3390 _this.refreshMedia_(_this.media().id);
3391 });
3392
3393 _this.state = 'HAVE_NOTHING';
3394 _this.loadedPlaylists_ = {};
3395 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
3396 // The masterPlaylistLoader will be created with a string
3397
3398 if (_this.isMaster_) {
3399 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
3400 // once multi-period is refactored
3401
3402 _this.masterPlaylistLoader_.sidxMapping_ = {};
3403 } else {
3404 _this.childPlaylist_ = srcUrlOrPlaylist;
3405 }
3406
3407 return _this;
3408 }
3409
3410 var _proto = DashPlaylistLoader.prototype;
3411
3412 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
3413 // disposed
3414 if (!this.request) {
3415 return true;
3416 } // pending request is cleared
3417
3418
3419 this.request = null;
3420
3421 if (err) {
3422 // use the provided error object or create one
3423 // based on the request/response
3424 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
3425 status: request.status,
3426 message: 'DASH request error at URL: ' + request.uri,
3427 response: request.response,
3428 // MEDIA_ERR_NETWORK
3429 code: 2
3430 };
3431
3432 if (startingState) {
3433 this.state = startingState;
3434 }
3435
3436 this.trigger('error');
3437 return true;
3438 }
3439 }
3440 /**
3441 * Verify that the container of the sidx segment can be parsed
3442 * and if it can, get and parse that segment.
3443 */
3444 ;
3445
3446 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
3447 var _this2 = this;
3448
3449 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
3450
3451 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
3452 // keep this function async
3453 this.mediaRequest_ = window$1.setTimeout(function () {
3454 return cb(false);
3455 }, 0);
3456 return;
3457 } // resolve the segment URL relative to the playlist
3458
3459
3460 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
3461
3462 var fin = function fin(err, request) {
3463 if (_this2.requestErrored_(err, request, startingState)) {
3464 return;
3465 }
3466
3467 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
3468 var sidx;
3469
3470 try {
3471 sidx = parseSidx(toUint8(request.response).subarray(8));
3472 } catch (e) {
3473 // sidx parsing failed.
3474 _this2.requestErrored_(e, request, startingState);
3475
3476 return;
3477 }
3478
3479 sidxMapping[sidxKey] = {
3480 sidxInfo: playlist.sidx,
3481 sidx: sidx
3482 };
3483 addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
3484 return cb(true);
3485 };
3486
3487 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
3488 if (err) {
3489 return fin(err, request);
3490 }
3491
3492 if (!container || container !== 'mp4') {
3493 return fin({
3494 status: request.status,
3495 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
3496 // response is just bytes in this case
3497 // but we really don't want to return that.
3498 response: '',
3499 playlist: playlist,
3500 internal: true,
3501 blacklistDuration: Infinity,
3502 // MEDIA_ERR_NETWORK
3503 code: 2
3504 }, request);
3505 } // if we already downloaded the sidx bytes in the container request, use them
3506
3507
3508 var _playlist$sidx$bytera = playlist.sidx.byterange,
3509 offset = _playlist$sidx$bytera.offset,
3510 length = _playlist$sidx$bytera.length;
3511
3512 if (bytes.length >= length + offset) {
3513 return fin(err, {
3514 response: bytes.subarray(offset, offset + length),
3515 status: request.status,
3516 uri: request.uri
3517 });
3518 } // otherwise request sidx bytes
3519
3520
3521 _this2.request = _this2.vhs_.xhr({
3522 uri: uri,
3523 responseType: 'arraybuffer',
3524 headers: segmentXhrHeaders({
3525 byterange: playlist.sidx.byterange
3526 })
3527 }, fin);
3528 });
3529 };
3530
3531 _proto.dispose = function dispose() {
3532 this.trigger('dispose');
3533 this.stopRequest();
3534 this.loadedPlaylists_ = {};
3535 window$1.clearTimeout(this.minimumUpdatePeriodTimeout_);
3536 window$1.clearTimeout(this.mediaRequest_);
3537 window$1.clearTimeout(this.mediaUpdateTimeout);
3538 this.mediaUpdateTimeout = null;
3539 this.mediaRequest_ = null;
3540 this.minimumUpdatePeriodTimeout_ = null;
3541
3542 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3543 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3544 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3545 }
3546
3547 this.off();
3548 };
3549
3550 _proto.hasPendingRequest = function hasPendingRequest() {
3551 return this.request || this.mediaRequest_;
3552 };
3553
3554 _proto.stopRequest = function stopRequest() {
3555 if (this.request) {
3556 var oldRequest = this.request;
3557 this.request = null;
3558 oldRequest.onreadystatechange = null;
3559 oldRequest.abort();
3560 }
3561 };
3562
3563 _proto.media = function media(playlist) {
3564 var _this3 = this;
3565
3566 // getter
3567 if (!playlist) {
3568 return this.media_;
3569 } // setter
3570
3571
3572 if (this.state === 'HAVE_NOTHING') {
3573 throw new Error('Cannot switch media playlist from ' + this.state);
3574 }
3575
3576 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
3577
3578 if (typeof playlist === 'string') {
3579 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
3580 throw new Error('Unknown playlist URI: ' + playlist);
3581 }
3582
3583 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
3584 }
3585
3586 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
3587
3588 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
3589 this.state = 'HAVE_METADATA';
3590 this.media_ = playlist; // trigger media change if the active media has been updated
3591
3592 if (mediaChange) {
3593 this.trigger('mediachanging');
3594 this.trigger('mediachange');
3595 }
3596
3597 return;
3598 } // switching to the active playlist is a no-op
3599
3600
3601 if (!mediaChange) {
3602 return;
3603 } // switching from an already loaded playlist
3604
3605
3606 if (this.media_) {
3607 this.trigger('mediachanging');
3608 }
3609
3610 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
3611 // everything is ready just continue to haveMetadata
3612 _this3.haveMetadata({
3613 startingState: startingState,
3614 playlist: playlist
3615 });
3616 });
3617 };
3618
3619 _proto.haveMetadata = function haveMetadata(_ref2) {
3620 var startingState = _ref2.startingState,
3621 playlist = _ref2.playlist;
3622 this.state = 'HAVE_METADATA';
3623 this.loadedPlaylists_[playlist.id] = playlist;
3624 this.mediaRequest_ = null; // This will trigger loadedplaylist
3625
3626 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
3627 // to resolve setup of media groups
3628
3629 if (startingState === 'HAVE_MASTER') {
3630 this.trigger('loadedmetadata');
3631 } else {
3632 // trigger media change if the active media has been updated
3633 this.trigger('mediachange');
3634 }
3635 };
3636
3637 _proto.pause = function pause() {
3638 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3639 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3640 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3641 }
3642
3643 this.stopRequest();
3644 window$1.clearTimeout(this.mediaUpdateTimeout);
3645 this.mediaUpdateTimeout = null;
3646
3647 if (this.isMaster_) {
3648 window$1.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
3649 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
3650 }
3651
3652 if (this.state === 'HAVE_NOTHING') {
3653 // If we pause the loader before any data has been retrieved, its as if we never
3654 // started, so reset to an unstarted state.
3655 this.started = false;
3656 }
3657 };
3658
3659 _proto.load = function load(isFinalRendition) {
3660 var _this4 = this;
3661
3662 window$1.clearTimeout(this.mediaUpdateTimeout);
3663 this.mediaUpdateTimeout = null;
3664 var media = this.media();
3665
3666 if (isFinalRendition) {
3667 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3668 this.mediaUpdateTimeout = window$1.setTimeout(function () {
3669 return _this4.load();
3670 }, delay);
3671 return;
3672 } // because the playlists are internal to the manifest, load should either load the
3673 // main manifest, or do nothing but trigger an event
3674
3675
3676 if (!this.started) {
3677 this.start();
3678 return;
3679 }
3680
3681 if (media && !media.endList) {
3682 // Check to see if this is the master loader and the MUP was cleared (this happens
3683 // when the loader was paused). `media` should be set at this point since one is always
3684 // set during `start()`.
3685 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
3686 // Trigger minimumUpdatePeriod to refresh the master manifest
3687 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
3688
3689 this.updateMinimumUpdatePeriodTimeout_();
3690 }
3691
3692 this.trigger('mediaupdatetimeout');
3693 } else {
3694 this.trigger('loadedplaylist');
3695 }
3696 };
3697
3698 _proto.start = function start() {
3699 var _this5 = this;
3700
3701 this.started = true; // We don't need to request the master manifest again
3702 // Call this asynchronously to match the xhr request behavior below
3703
3704 if (!this.isMaster_) {
3705 this.mediaRequest_ = window$1.setTimeout(function () {
3706 return _this5.haveMaster_();
3707 }, 0);
3708 return;
3709 }
3710
3711 this.requestMaster_(function (req, masterChanged) {
3712 _this5.haveMaster_();
3713
3714 if (!_this5.hasPendingRequest() && !_this5.media_) {
3715 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3716 }
3717 });
3718 };
3719
3720 _proto.requestMaster_ = function requestMaster_(cb) {
3721 var _this6 = this;
3722
3723 this.request = this.vhs_.xhr({
3724 uri: this.masterPlaylistLoader_.srcUrl,
3725 withCredentials: this.withCredentials
3726 }, function (error, req) {
3727 if (_this6.requestErrored_(error, req)) {
3728 if (_this6.state === 'HAVE_NOTHING') {
3729 _this6.started = false;
3730 }
3731
3732 return;
3733 }
3734
3735 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3736 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3737
3738 if (req.responseHeaders && req.responseHeaders.date) {
3739 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3740 } else {
3741 _this6.masterLoaded_ = Date.now();
3742 }
3743
3744 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3745
3746 if (masterChanged) {
3747 _this6.handleMaster_();
3748
3749 _this6.syncClientServerClock_(function () {
3750 return cb(req, masterChanged);
3751 });
3752
3753 return;
3754 }
3755
3756 return cb(req, masterChanged);
3757 });
3758 }
3759 /**
3760 * Parses the master xml for UTCTiming node to sync the client clock to the server
3761 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3762 *
3763 * @param {Function} done
3764 * Function to call when clock sync has completed
3765 */
3766 ;
3767
3768 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3769 var _this7 = this;
3770
3771 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3772 // server clock
3773
3774 if (utcTiming === null) {
3775 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3776 return done();
3777 }
3778
3779 if (utcTiming.method === 'DIRECT') {
3780 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3781 return done();
3782 }
3783
3784 this.request = this.vhs_.xhr({
3785 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3786 method: utcTiming.method,
3787 withCredentials: this.withCredentials
3788 }, function (error, req) {
3789 // disposed
3790 if (!_this7.request) {
3791 return;
3792 }
3793
3794 if (error) {
3795 // sync request failed, fall back to using date header from mpd
3796 // TODO: log warning
3797 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3798 return done();
3799 }
3800
3801 var serverTime;
3802
3803 if (utcTiming.method === 'HEAD') {
3804 if (!req.responseHeaders || !req.responseHeaders.date) {
3805 // expected date header not preset, fall back to using date header from mpd
3806 // TODO: log warning
3807 serverTime = _this7.masterLoaded_;
3808 } else {
3809 serverTime = Date.parse(req.responseHeaders.date);
3810 }
3811 } else {
3812 serverTime = Date.parse(req.responseText);
3813 }
3814
3815 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3816 done();
3817 });
3818 };
3819
3820 _proto.haveMaster_ = function haveMaster_() {
3821 this.state = 'HAVE_MASTER';
3822
3823 if (this.isMaster_) {
3824 // We have the master playlist at this point, so
3825 // trigger this to allow MasterPlaylistController
3826 // to make an initial playlist selection
3827 this.trigger('loadedplaylist');
3828 } else if (!this.media_) {
3829 // no media playlist was specifically selected so select
3830 // the one the child playlist loader was created with
3831 this.media(this.childPlaylist_);
3832 }
3833 };
3834
3835 _proto.handleMaster_ = function handleMaster_() {
3836 // clear media request
3837 this.mediaRequest_ = null;
3838 var oldMaster = this.masterPlaylistLoader_.master;
3839 var newMaster = parseMasterXml({
3840 masterXml: this.masterPlaylistLoader_.masterXml_,
3841 srcUrl: this.masterPlaylistLoader_.srcUrl,
3842 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3843 sidxMapping: this.masterPlaylistLoader_.sidxMapping_,
3844 previousManifest: oldMaster
3845 }); // if we have an old master to compare the new master against
3846
3847 if (oldMaster) {
3848 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3849 } // only update master if we have a new master
3850
3851
3852 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3853 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3854
3855 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3856 this.masterPlaylistLoader_.srcUrl = location;
3857 }
3858
3859 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3860 this.updateMinimumUpdatePeriodTimeout_();
3861 }
3862
3863 return Boolean(newMaster);
3864 };
3865
3866 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3867 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
3868 // a new one will be added if needed.
3869
3870 if (mpl.createMupOnMedia_) {
3871 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
3872 mpl.createMupOnMedia_ = null;
3873 } // clear any pending timeouts
3874
3875
3876 if (mpl.minimumUpdatePeriodTimeout_) {
3877 window$1.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
3878 mpl.minimumUpdatePeriodTimeout_ = null;
3879 }
3880
3881 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3882 // MPD has no future validity, so a new one will need to be acquired when new
3883 // media segments are to be made available. Thus, we use the target duration
3884 // in this case
3885
3886 if (mup === 0) {
3887 if (mpl.media()) {
3888 mup = mpl.media().targetDuration * 1000;
3889 } else {
3890 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
3891 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
3892 }
3893 } // if minimumUpdatePeriod is invalid or <= zero, which
3894 // can happen when a live video becomes VOD. skip timeout
3895 // creation.
3896
3897
3898 if (typeof mup !== 'number' || mup <= 0) {
3899 if (mup < 0) {
3900 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
3901 }
3902
3903 return;
3904 }
3905
3906 this.createMUPTimeout_(mup);
3907 };
3908
3909 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
3910 var mpl = this.masterPlaylistLoader_;
3911 mpl.minimumUpdatePeriodTimeout_ = window$1.setTimeout(function () {
3912 mpl.minimumUpdatePeriodTimeout_ = null;
3913 mpl.trigger('minimumUpdatePeriod');
3914 mpl.createMUPTimeout_(mup);
3915 }, mup);
3916 }
3917 /**
3918 * Sends request to refresh the master xml and updates the parsed master manifest
3919 */
3920 ;
3921
3922 _proto.refreshXml_ = function refreshXml_() {
3923 var _this8 = this;
3924
3925 this.requestMaster_(function (req, masterChanged) {
3926 if (!masterChanged) {
3927 return;
3928 }
3929
3930 if (_this8.media_) {
3931 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
3932 } // This will filter out updated sidx info from the mapping
3933
3934
3935 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
3936
3937 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
3938 // TODO: do we need to reload the current playlist?
3939 _this8.refreshMedia_(_this8.media().id);
3940 });
3941 });
3942 }
3943 /**
3944 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3945 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3946 * from the master loader.
3947 */
3948 ;
3949
3950 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3951 var _this9 = this;
3952
3953 if (!mediaID) {
3954 throw new Error('refreshMedia_ must take a media id');
3955 } // for master we have to reparse the master xml
3956 // to re-create segments based on current timing values
3957 // which may change media. We only skip updating master
3958 // if this is the first time this.media_ is being set.
3959 // as master was just parsed in that case.
3960
3961
3962 if (this.media_ && this.isMaster_) {
3963 this.handleMaster_();
3964 }
3965
3966 var playlists = this.masterPlaylistLoader_.master.playlists;
3967 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3968
3969 if (mediaChanged) {
3970 this.media_ = playlists[mediaID];
3971 } else {
3972 this.trigger('playlistunchanged');
3973 }
3974
3975 if (!this.mediaUpdateTimeout) {
3976 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
3977 if (_this9.media().endList) {
3978 return;
3979 }
3980
3981 _this9.mediaUpdateTimeout = window$1.setTimeout(function () {
3982 _this9.trigger('mediaupdatetimeout');
3983
3984 createMediaUpdateTimeout();
3985 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
3986 };
3987
3988 createMediaUpdateTimeout();
3989 }
3990
3991 this.trigger('loadedplaylist');
3992 };
3993
3994 return DashPlaylistLoader;
3995}(EventTarget);
3996
3997var Config = {
3998 GOAL_BUFFER_LENGTH: 30,
3999 MAX_GOAL_BUFFER_LENGTH: 60,
4000 BACK_BUFFER_LENGTH: 30,
4001 GOAL_BUFFER_LENGTH_RATE: 1,
4002 // 0.5 MB/s
4003 INITIAL_BANDWIDTH: 4194304,
4004 // A fudge factor to apply to advertised playlist bitrates to account for
4005 // temporary flucations in client bandwidth
4006 BANDWIDTH_VARIANCE: 1.2,
4007 // How much of the buffer must be filled before we consider upswitching
4008 BUFFER_LOW_WATER_LINE: 0,
4009 MAX_BUFFER_LOW_WATER_LINE: 30,
4010 // TODO: Remove this when experimentalBufferBasedABR is removed
4011 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
4012 BUFFER_LOW_WATER_LINE_RATE: 1,
4013 // If the buffer is greater than the high water line, we won't switch down
4014 BUFFER_HIGH_WATER_LINE: 30
4015};
4016
4017var stringToArrayBuffer = function stringToArrayBuffer(string) {
4018 var view = new Uint8Array(new ArrayBuffer(string.length));
4019
4020 for (var i = 0; i < string.length; i++) {
4021 view[i] = string.charCodeAt(i);
4022 }
4023
4024 return view.buffer;
4025};
4026
4027/* global Blob, BlobBuilder, Worker */
4028// unify worker interface
4029var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
4030 // node only supports on/off
4031 workerObj.on = workerObj.addEventListener;
4032 workerObj.off = workerObj.removeEventListener;
4033 return workerObj;
4034};
4035
4036var createObjectURL = function createObjectURL(str) {
4037 try {
4038 return URL.createObjectURL(new Blob([str], {
4039 type: 'application/javascript'
4040 }));
4041 } catch (e) {
4042 var blob = new BlobBuilder();
4043 blob.append(str);
4044 return URL.createObjectURL(blob.getBlob());
4045 }
4046};
4047
4048var factory = function factory(code) {
4049 return function () {
4050 var objectUrl = createObjectURL(code);
4051 var worker = browserWorkerPolyFill(new Worker(objectUrl));
4052 worker.objURL = objectUrl;
4053 var terminate = worker.terminate;
4054 worker.on = worker.addEventListener;
4055 worker.off = worker.removeEventListener;
4056
4057 worker.terminate = function () {
4058 URL.revokeObjectURL(objectUrl);
4059 return terminate.call(this);
4060 };
4061
4062 return worker;
4063 };
4064};
4065var transform = function transform(code) {
4066 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
4067};
4068
4069var getWorkerString = function getWorkerString(fn) {
4070 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
4071};
4072
4073/* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
4074var workerCode$1 = transform(getWorkerString(function () {
4075 /**
4076 * mux.js
4077 *
4078 * Copyright (c) Brightcove
4079 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4080 *
4081 * A lightweight readable stream implemention that handles event dispatching.
4082 * Objects that inherit from streams should call init in their constructors.
4083 */
4084
4085 var Stream = function Stream() {
4086 this.init = function () {
4087 var listeners = {};
4088 /**
4089 * Add a listener for a specified event type.
4090 * @param type {string} the event name
4091 * @param listener {function} the callback to be invoked when an event of
4092 * the specified type occurs
4093 */
4094
4095 this.on = function (type, listener) {
4096 if (!listeners[type]) {
4097 listeners[type] = [];
4098 }
4099
4100 listeners[type] = listeners[type].concat(listener);
4101 };
4102 /**
4103 * Remove a listener for a specified event type.
4104 * @param type {string} the event name
4105 * @param listener {function} a function previously registered for this
4106 * type of event through `on`
4107 */
4108
4109
4110 this.off = function (type, listener) {
4111 var index;
4112
4113 if (!listeners[type]) {
4114 return false;
4115 }
4116
4117 index = listeners[type].indexOf(listener);
4118 listeners[type] = listeners[type].slice();
4119 listeners[type].splice(index, 1);
4120 return index > -1;
4121 };
4122 /**
4123 * Trigger an event of the specified type on this stream. Any additional
4124 * arguments to this function are passed as parameters to event listeners.
4125 * @param type {string} the event name
4126 */
4127
4128
4129 this.trigger = function (type) {
4130 var callbacks, i, length, args;
4131 callbacks = listeners[type];
4132
4133 if (!callbacks) {
4134 return;
4135 } // Slicing the arguments on every invocation of this method
4136 // can add a significant amount of overhead. Avoid the
4137 // intermediate object creation for the common case of a
4138 // single callback argument
4139
4140
4141 if (arguments.length === 2) {
4142 length = callbacks.length;
4143
4144 for (i = 0; i < length; ++i) {
4145 callbacks[i].call(this, arguments[1]);
4146 }
4147 } else {
4148 args = [];
4149 i = arguments.length;
4150
4151 for (i = 1; i < arguments.length; ++i) {
4152 args.push(arguments[i]);
4153 }
4154
4155 length = callbacks.length;
4156
4157 for (i = 0; i < length; ++i) {
4158 callbacks[i].apply(this, args);
4159 }
4160 }
4161 };
4162 /**
4163 * Destroys the stream and cleans up.
4164 */
4165
4166
4167 this.dispose = function () {
4168 listeners = {};
4169 };
4170 };
4171 };
4172 /**
4173 * Forwards all `data` events on this stream to the destination stream. The
4174 * destination stream should provide a method `push` to receive the data
4175 * events as they arrive.
4176 * @param destination {stream} the stream that will receive all `data` events
4177 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4178 * when the current stream emits a 'done' event
4179 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4180 */
4181
4182
4183 Stream.prototype.pipe = function (destination) {
4184 this.on('data', function (data) {
4185 destination.push(data);
4186 });
4187 this.on('done', function (flushSource) {
4188 destination.flush(flushSource);
4189 });
4190 this.on('partialdone', function (flushSource) {
4191 destination.partialFlush(flushSource);
4192 });
4193 this.on('endedtimeline', function (flushSource) {
4194 destination.endTimeline(flushSource);
4195 });
4196 this.on('reset', function (flushSource) {
4197 destination.reset(flushSource);
4198 });
4199 return destination;
4200 }; // Default stream functions that are expected to be overridden to perform
4201 // actual work. These are provided by the prototype as a sort of no-op
4202 // implementation so that we don't have to check for their existence in the
4203 // `pipe` function above.
4204
4205
4206 Stream.prototype.push = function (data) {
4207 this.trigger('data', data);
4208 };
4209
4210 Stream.prototype.flush = function (flushSource) {
4211 this.trigger('done', flushSource);
4212 };
4213
4214 Stream.prototype.partialFlush = function (flushSource) {
4215 this.trigger('partialdone', flushSource);
4216 };
4217
4218 Stream.prototype.endTimeline = function (flushSource) {
4219 this.trigger('endedtimeline', flushSource);
4220 };
4221
4222 Stream.prototype.reset = function (flushSource) {
4223 this.trigger('reset', flushSource);
4224 };
4225
4226 var stream = Stream;
4227 var MAX_UINT32$1 = Math.pow(2, 32);
4228
4229 var getUint64$2 = function getUint64(uint8) {
4230 var dv = new DataView(uint8.buffer, uint8.byteOffset, uint8.byteLength);
4231 var value;
4232
4233 if (dv.getBigUint64) {
4234 value = dv.getBigUint64(0);
4235
4236 if (value < Number.MAX_SAFE_INTEGER) {
4237 return Number(value);
4238 }
4239
4240 return value;
4241 }
4242
4243 return dv.getUint32(0) * MAX_UINT32$1 + dv.getUint32(4);
4244 };
4245
4246 var numbers = {
4247 getUint64: getUint64$2,
4248 MAX_UINT32: MAX_UINT32$1
4249 };
4250 var MAX_UINT32 = numbers.MAX_UINT32;
4251 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
4252
4253 (function () {
4254 var i;
4255 types = {
4256 avc1: [],
4257 // codingname
4258 avcC: [],
4259 btrt: [],
4260 dinf: [],
4261 dref: [],
4262 esds: [],
4263 ftyp: [],
4264 hdlr: [],
4265 mdat: [],
4266 mdhd: [],
4267 mdia: [],
4268 mfhd: [],
4269 minf: [],
4270 moof: [],
4271 moov: [],
4272 mp4a: [],
4273 // codingname
4274 mvex: [],
4275 mvhd: [],
4276 pasp: [],
4277 sdtp: [],
4278 smhd: [],
4279 stbl: [],
4280 stco: [],
4281 stsc: [],
4282 stsd: [],
4283 stsz: [],
4284 stts: [],
4285 styp: [],
4286 tfdt: [],
4287 tfhd: [],
4288 traf: [],
4289 trak: [],
4290 trun: [],
4291 trex: [],
4292 tkhd: [],
4293 vmhd: []
4294 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4295 // don't throw an error
4296
4297 if (typeof Uint8Array === 'undefined') {
4298 return;
4299 }
4300
4301 for (i in types) {
4302 if (types.hasOwnProperty(i)) {
4303 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4304 }
4305 }
4306
4307 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4308 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4309 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4310 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4311 0x00, 0x00, 0x00, // flags
4312 0x00, 0x00, 0x00, 0x00, // pre_defined
4313 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4314 0x00, 0x00, 0x00, 0x00, // reserved
4315 0x00, 0x00, 0x00, 0x00, // reserved
4316 0x00, 0x00, 0x00, 0x00, // reserved
4317 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4318 ]);
4319 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4320 0x00, 0x00, 0x00, // flags
4321 0x00, 0x00, 0x00, 0x00, // pre_defined
4322 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4323 0x00, 0x00, 0x00, 0x00, // reserved
4324 0x00, 0x00, 0x00, 0x00, // reserved
4325 0x00, 0x00, 0x00, 0x00, // reserved
4326 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4327 ]);
4328 HDLR_TYPES = {
4329 video: VIDEO_HDLR,
4330 audio: AUDIO_HDLR
4331 };
4332 DREF = new Uint8Array([0x00, // version 0
4333 0x00, 0x00, 0x00, // flags
4334 0x00, 0x00, 0x00, 0x01, // entry_count
4335 0x00, 0x00, 0x00, 0x0c, // entry_size
4336 0x75, 0x72, 0x6c, 0x20, // 'url' type
4337 0x00, // version 0
4338 0x00, 0x00, 0x01 // entry_flags
4339 ]);
4340 SMHD = new Uint8Array([0x00, // version
4341 0x00, 0x00, 0x00, // flags
4342 0x00, 0x00, // balance, 0 means centered
4343 0x00, 0x00 // reserved
4344 ]);
4345 STCO = new Uint8Array([0x00, // version
4346 0x00, 0x00, 0x00, // flags
4347 0x00, 0x00, 0x00, 0x00 // entry_count
4348 ]);
4349 STSC = STCO;
4350 STSZ = new Uint8Array([0x00, // version
4351 0x00, 0x00, 0x00, // flags
4352 0x00, 0x00, 0x00, 0x00, // sample_size
4353 0x00, 0x00, 0x00, 0x00 // sample_count
4354 ]);
4355 STTS = STCO;
4356 VMHD = new Uint8Array([0x00, // version
4357 0x00, 0x00, 0x01, // flags
4358 0x00, 0x00, // graphicsmode
4359 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4360 ]);
4361 })();
4362
4363 box = function box(type) {
4364 var payload = [],
4365 size = 0,
4366 i,
4367 result,
4368 view;
4369
4370 for (i = 1; i < arguments.length; i++) {
4371 payload.push(arguments[i]);
4372 }
4373
4374 i = payload.length; // calculate the total size we need to allocate
4375
4376 while (i--) {
4377 size += payload[i].byteLength;
4378 }
4379
4380 result = new Uint8Array(size + 8);
4381 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4382 view.setUint32(0, result.byteLength);
4383 result.set(type, 4); // copy the payload into the result
4384
4385 for (i = 0, size = 8; i < payload.length; i++) {
4386 result.set(payload[i], size);
4387 size += payload[i].byteLength;
4388 }
4389
4390 return result;
4391 };
4392
4393 dinf = function dinf() {
4394 return box(types.dinf, box(types.dref, DREF));
4395 };
4396
4397 esds = function esds(track) {
4398 return box(types.esds, new Uint8Array([0x00, // version
4399 0x00, 0x00, 0x00, // flags
4400 // ES_Descriptor
4401 0x03, // tag, ES_DescrTag
4402 0x19, // length
4403 0x00, 0x00, // ES_ID
4404 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4405 // DecoderConfigDescriptor
4406 0x04, // tag, DecoderConfigDescrTag
4407 0x11, // length
4408 0x40, // object type
4409 0x15, // streamType
4410 0x00, 0x06, 0x00, // bufferSizeDB
4411 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4412 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4413 // DecoderSpecificInfo
4414 0x05, // tag, DecoderSpecificInfoTag
4415 0x02, // length
4416 // ISO/IEC 14496-3, AudioSpecificConfig
4417 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4418 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4419 ]));
4420 };
4421
4422 ftyp = function ftyp() {
4423 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4424 };
4425
4426 hdlr = function hdlr(type) {
4427 return box(types.hdlr, HDLR_TYPES[type]);
4428 };
4429
4430 mdat = function mdat(data) {
4431 return box(types.mdat, data);
4432 };
4433
4434 mdhd = function mdhd(track) {
4435 var result = new Uint8Array([0x00, // version 0
4436 0x00, 0x00, 0x00, // flags
4437 0x00, 0x00, 0x00, 0x02, // creation_time
4438 0x00, 0x00, 0x00, 0x03, // modification_time
4439 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4440 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4441 0x55, 0xc4, // 'und' language (undetermined)
4442 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
4443 // defined. The sample rate can be parsed out of an ADTS header, for
4444 // instance.
4445
4446 if (track.samplerate) {
4447 result[12] = track.samplerate >>> 24 & 0xFF;
4448 result[13] = track.samplerate >>> 16 & 0xFF;
4449 result[14] = track.samplerate >>> 8 & 0xFF;
4450 result[15] = track.samplerate & 0xFF;
4451 }
4452
4453 return box(types.mdhd, result);
4454 };
4455
4456 mdia = function mdia(track) {
4457 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4458 };
4459
4460 mfhd = function mfhd(sequenceNumber) {
4461 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4462 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4463 ]));
4464 };
4465
4466 minf = function minf(track) {
4467 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4468 };
4469
4470 moof = function moof(sequenceNumber, tracks) {
4471 var trackFragments = [],
4472 i = tracks.length; // build traf boxes for each track fragment
4473
4474 while (i--) {
4475 trackFragments[i] = traf(tracks[i]);
4476 }
4477
4478 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4479 };
4480 /**
4481 * Returns a movie box.
4482 * @param tracks {array} the tracks associated with this movie
4483 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4484 */
4485
4486
4487 moov = function moov(tracks) {
4488 var i = tracks.length,
4489 boxes = [];
4490
4491 while (i--) {
4492 boxes[i] = trak(tracks[i]);
4493 }
4494
4495 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4496 };
4497
4498 mvex = function mvex(tracks) {
4499 var i = tracks.length,
4500 boxes = [];
4501
4502 while (i--) {
4503 boxes[i] = trex(tracks[i]);
4504 }
4505
4506 return box.apply(null, [types.mvex].concat(boxes));
4507 };
4508
4509 mvhd = function mvhd(duration) {
4510 var bytes = new Uint8Array([0x00, // version 0
4511 0x00, 0x00, 0x00, // flags
4512 0x00, 0x00, 0x00, 0x01, // creation_time
4513 0x00, 0x00, 0x00, 0x02, // modification_time
4514 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4515 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
4516 0x00, 0x01, 0x00, 0x00, // 1.0 rate
4517 0x01, 0x00, // 1.0 volume
4518 0x00, 0x00, // reserved
4519 0x00, 0x00, 0x00, 0x00, // reserved
4520 0x00, 0x00, 0x00, 0x00, // reserved
4521 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4522 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4523 0xff, 0xff, 0xff, 0xff // next_track_ID
4524 ]);
4525 return box(types.mvhd, bytes);
4526 };
4527
4528 sdtp = function sdtp(track) {
4529 var samples = track.samples || [],
4530 bytes = new Uint8Array(4 + samples.length),
4531 flags,
4532 i; // leave the full box header (4 bytes) all zero
4533 // write the sample table
4534
4535 for (i = 0; i < samples.length; i++) {
4536 flags = samples[i].flags;
4537 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
4538 }
4539
4540 return box(types.sdtp, bytes);
4541 };
4542
4543 stbl = function stbl(track) {
4544 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
4545 };
4546
4547 (function () {
4548 var videoSample, audioSample;
4549
4550 stsd = function stsd(track) {
4551 return box(types.stsd, new Uint8Array([0x00, // version 0
4552 0x00, 0x00, 0x00, // flags
4553 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
4554 };
4555
4556 videoSample = function videoSample(track) {
4557 var sps = track.sps || [],
4558 pps = track.pps || [],
4559 sequenceParameterSets = [],
4560 pictureParameterSets = [],
4561 i,
4562 avc1Box; // assemble the SPSs
4563
4564 for (i = 0; i < sps.length; i++) {
4565 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
4566 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
4567
4568 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
4569 } // assemble the PPSs
4570
4571
4572 for (i = 0; i < pps.length; i++) {
4573 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
4574 pictureParameterSets.push(pps[i].byteLength & 0xFF);
4575 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
4576 }
4577
4578 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4579 0x00, 0x01, // data_reference_index
4580 0x00, 0x00, // pre_defined
4581 0x00, 0x00, // reserved
4582 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4583 (track.width & 0xff00) >> 8, track.width & 0xff, // width
4584 (track.height & 0xff00) >> 8, track.height & 0xff, // height
4585 0x00, 0x48, 0x00, 0x00, // horizresolution
4586 0x00, 0x48, 0x00, 0x00, // vertresolution
4587 0x00, 0x00, 0x00, 0x00, // reserved
4588 0x00, 0x01, // frame_count
4589 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
4590 0x00, 0x18, // depth = 24
4591 0x11, 0x11 // pre_defined = -1
4592 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
4593 track.profileIdc, // AVCProfileIndication
4594 track.profileCompatibility, // profile_compatibility
4595 track.levelIdc, // AVCLevelIndication
4596 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
4597 ].concat([sps.length], // numOfSequenceParameterSets
4598 sequenceParameterSets, // "SPS"
4599 [pps.length], // numOfPictureParameterSets
4600 pictureParameterSets // "PPS"
4601 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
4602 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
4603 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
4604 ]))];
4605
4606 if (track.sarRatio) {
4607 var hSpacing = track.sarRatio[0],
4608 vSpacing = track.sarRatio[1];
4609 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
4610 }
4611
4612 return box.apply(null, avc1Box);
4613 };
4614
4615 audioSample = function audioSample(track) {
4616 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
4617 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4618 0x00, 0x01, // data_reference_index
4619 // AudioSampleEntry, ISO/IEC 14496-12
4620 0x00, 0x00, 0x00, 0x00, // reserved
4621 0x00, 0x00, 0x00, 0x00, // reserved
4622 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
4623 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
4624 0x00, 0x00, // pre_defined
4625 0x00, 0x00, // reserved
4626 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
4627 // MP4AudioSampleEntry, ISO/IEC 14496-14
4628 ]), esds(track));
4629 };
4630 })();
4631
4632 tkhd = function tkhd(track) {
4633 var result = new Uint8Array([0x00, // version 0
4634 0x00, 0x00, 0x07, // flags
4635 0x00, 0x00, 0x00, 0x00, // creation_time
4636 0x00, 0x00, 0x00, 0x00, // modification_time
4637 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4638 0x00, 0x00, 0x00, 0x00, // reserved
4639 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
4640 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4641 0x00, 0x00, // layer
4642 0x00, 0x00, // alternate_group
4643 0x01, 0x00, // non-audio track volume
4644 0x00, 0x00, // reserved
4645 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4646 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
4647 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
4648 ]);
4649 return box(types.tkhd, result);
4650 };
4651 /**
4652 * Generate a track fragment (traf) box. A traf box collects metadata
4653 * about tracks in a movie fragment (moof) box.
4654 */
4655
4656
4657 traf = function traf(track) {
4658 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
4659 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
4660 0x00, 0x00, 0x3a, // flags
4661 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4662 0x00, 0x00, 0x00, 0x01, // sample_description_index
4663 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4664 0x00, 0x00, 0x00, 0x00, // default_sample_size
4665 0x00, 0x00, 0x00, 0x00 // default_sample_flags
4666 ]));
4667 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / MAX_UINT32);
4668 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % MAX_UINT32);
4669 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
4670 0x00, 0x00, 0x00, // flags
4671 // baseMediaDecodeTime
4672 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
4673 // the containing moof to the first payload byte of the associated
4674 // mdat
4675
4676 dataOffset = 32 + // tfhd
4677 20 + // tfdt
4678 8 + // traf header
4679 16 + // mfhd
4680 8 + // moof header
4681 8; // mdat header
4682 // audio tracks require less metadata
4683
4684 if (track.type === 'audio') {
4685 trackFragmentRun = trun$1(track, dataOffset);
4686 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
4687 } // video tracks should contain an independent and disposable samples
4688 // box (sdtp)
4689 // generate one and adjust offsets to match
4690
4691
4692 sampleDependencyTable = sdtp(track);
4693 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
4694 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
4695 };
4696 /**
4697 * Generate a track box.
4698 * @param track {object} a track definition
4699 * @return {Uint8Array} the track box
4700 */
4701
4702
4703 trak = function trak(track) {
4704 track.duration = track.duration || 0xffffffff;
4705 return box(types.trak, tkhd(track), mdia(track));
4706 };
4707
4708 trex = function trex(track) {
4709 var result = new Uint8Array([0x00, // version 0
4710 0x00, 0x00, 0x00, // flags
4711 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4712 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
4713 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4714 0x00, 0x00, 0x00, 0x00, // default_sample_size
4715 0x00, 0x01, 0x00, 0x01 // default_sample_flags
4716 ]); // the last two bytes of default_sample_flags is the sample
4717 // degradation priority, a hint about the importance of this sample
4718 // relative to others. Lower the degradation priority for all sample
4719 // types other than video.
4720
4721 if (track.type !== 'video') {
4722 result[result.length - 1] = 0x00;
4723 }
4724
4725 return box(types.trex, result);
4726 };
4727
4728 (function () {
4729 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
4730 // duration is present for the first sample, it will be present for
4731 // all subsequent samples.
4732 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
4733
4734 trunHeader = function trunHeader(samples, offset) {
4735 var durationPresent = 0,
4736 sizePresent = 0,
4737 flagsPresent = 0,
4738 compositionTimeOffset = 0; // trun flag constants
4739
4740 if (samples.length) {
4741 if (samples[0].duration !== undefined) {
4742 durationPresent = 0x1;
4743 }
4744
4745 if (samples[0].size !== undefined) {
4746 sizePresent = 0x2;
4747 }
4748
4749 if (samples[0].flags !== undefined) {
4750 flagsPresent = 0x4;
4751 }
4752
4753 if (samples[0].compositionTimeOffset !== undefined) {
4754 compositionTimeOffset = 0x8;
4755 }
4756 }
4757
4758 return [0x00, // version 0
4759 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4760 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4761 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4762 ];
4763 };
4764
4765 videoTrun = function videoTrun(track, offset) {
4766 var bytesOffest, bytes, header, samples, sample, i;
4767 samples = track.samples || [];
4768 offset += 8 + 12 + 16 * samples.length;
4769 header = trunHeader(samples, offset);
4770 bytes = new Uint8Array(header.length + samples.length * 16);
4771 bytes.set(header);
4772 bytesOffest = header.length;
4773
4774 for (i = 0; i < samples.length; i++) {
4775 sample = samples[i];
4776 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4777 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4778 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4779 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4780
4781 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4782 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4783 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4784 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4785
4786 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4787 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4788 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4789 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4790
4791 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4792 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4793 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4794 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4795 }
4796
4797 return box(types.trun, bytes);
4798 };
4799
4800 audioTrun = function audioTrun(track, offset) {
4801 var bytes, bytesOffest, header, samples, sample, i;
4802 samples = track.samples || [];
4803 offset += 8 + 12 + 8 * samples.length;
4804 header = trunHeader(samples, offset);
4805 bytes = new Uint8Array(header.length + samples.length * 8);
4806 bytes.set(header);
4807 bytesOffest = header.length;
4808
4809 for (i = 0; i < samples.length; i++) {
4810 sample = samples[i];
4811 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4812 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4813 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4814 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4815
4816 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4817 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4818 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4819 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4820 }
4821
4822 return box(types.trun, bytes);
4823 };
4824
4825 trun$1 = function trun(track, offset) {
4826 if (track.type === 'audio') {
4827 return audioTrun(track, offset);
4828 }
4829
4830 return videoTrun(track, offset);
4831 };
4832 })();
4833
4834 var mp4Generator = {
4835 ftyp: ftyp,
4836 mdat: mdat,
4837 moof: moof,
4838 moov: moov,
4839 initSegment: function initSegment(tracks) {
4840 var fileType = ftyp(),
4841 movie = moov(tracks),
4842 result;
4843 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4844 result.set(fileType);
4845 result.set(movie, fileType.byteLength);
4846 return result;
4847 }
4848 };
4849 /**
4850 * mux.js
4851 *
4852 * Copyright (c) Brightcove
4853 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4854 */
4855 // Convert an array of nal units into an array of frames with each frame being
4856 // composed of the nal units that make up that frame
4857 // Also keep track of cummulative data about the frame from the nal units such
4858 // as the frame duration, starting pts, etc.
4859
4860 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4861 var i,
4862 currentNal,
4863 currentFrame = [],
4864 frames = []; // TODO added for LHLS, make sure this is OK
4865
4866 frames.byteLength = 0;
4867 frames.nalCount = 0;
4868 frames.duration = 0;
4869 currentFrame.byteLength = 0;
4870
4871 for (i = 0; i < nalUnits.length; i++) {
4872 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4873
4874 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4875 // Since the very first nal unit is expected to be an AUD
4876 // only push to the frames array when currentFrame is not empty
4877 if (currentFrame.length) {
4878 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4879
4880 frames.byteLength += currentFrame.byteLength;
4881 frames.nalCount += currentFrame.length;
4882 frames.duration += currentFrame.duration;
4883 frames.push(currentFrame);
4884 }
4885
4886 currentFrame = [currentNal];
4887 currentFrame.byteLength = currentNal.data.byteLength;
4888 currentFrame.pts = currentNal.pts;
4889 currentFrame.dts = currentNal.dts;
4890 } else {
4891 // Specifically flag key frames for ease of use later
4892 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4893 currentFrame.keyFrame = true;
4894 }
4895
4896 currentFrame.duration = currentNal.dts - currentFrame.dts;
4897 currentFrame.byteLength += currentNal.data.byteLength;
4898 currentFrame.push(currentNal);
4899 }
4900 } // For the last frame, use the duration of the previous frame if we
4901 // have nothing better to go on
4902
4903
4904 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4905 currentFrame.duration = frames[frames.length - 1].duration;
4906 } // Push the final frame
4907 // TODO added for LHLS, make sure this is OK
4908
4909
4910 frames.byteLength += currentFrame.byteLength;
4911 frames.nalCount += currentFrame.length;
4912 frames.duration += currentFrame.duration;
4913 frames.push(currentFrame);
4914 return frames;
4915 }; // Convert an array of frames into an array of Gop with each Gop being composed
4916 // of the frames that make up that Gop
4917 // Also keep track of cummulative data about the Gop from the frames such as the
4918 // Gop duration, starting pts, etc.
4919
4920
4921 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4922 var i,
4923 currentFrame,
4924 currentGop = [],
4925 gops = []; // We must pre-set some of the values on the Gop since we
4926 // keep running totals of these values
4927
4928 currentGop.byteLength = 0;
4929 currentGop.nalCount = 0;
4930 currentGop.duration = 0;
4931 currentGop.pts = frames[0].pts;
4932 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4933
4934 gops.byteLength = 0;
4935 gops.nalCount = 0;
4936 gops.duration = 0;
4937 gops.pts = frames[0].pts;
4938 gops.dts = frames[0].dts;
4939
4940 for (i = 0; i < frames.length; i++) {
4941 currentFrame = frames[i];
4942
4943 if (currentFrame.keyFrame) {
4944 // Since the very first frame is expected to be an keyframe
4945 // only push to the gops array when currentGop is not empty
4946 if (currentGop.length) {
4947 gops.push(currentGop);
4948 gops.byteLength += currentGop.byteLength;
4949 gops.nalCount += currentGop.nalCount;
4950 gops.duration += currentGop.duration;
4951 }
4952
4953 currentGop = [currentFrame];
4954 currentGop.nalCount = currentFrame.length;
4955 currentGop.byteLength = currentFrame.byteLength;
4956 currentGop.pts = currentFrame.pts;
4957 currentGop.dts = currentFrame.dts;
4958 currentGop.duration = currentFrame.duration;
4959 } else {
4960 currentGop.duration += currentFrame.duration;
4961 currentGop.nalCount += currentFrame.length;
4962 currentGop.byteLength += currentFrame.byteLength;
4963 currentGop.push(currentFrame);
4964 }
4965 }
4966
4967 if (gops.length && currentGop.duration <= 0) {
4968 currentGop.duration = gops[gops.length - 1].duration;
4969 }
4970
4971 gops.byteLength += currentGop.byteLength;
4972 gops.nalCount += currentGop.nalCount;
4973 gops.duration += currentGop.duration; // push the final Gop
4974
4975 gops.push(currentGop);
4976 return gops;
4977 };
4978 /*
4979 * Search for the first keyframe in the GOPs and throw away all frames
4980 * until that keyframe. Then extend the duration of the pulled keyframe
4981 * and pull the PTS and DTS of the keyframe so that it covers the time
4982 * range of the frames that were disposed.
4983 *
4984 * @param {Array} gops video GOPs
4985 * @returns {Array} modified video GOPs
4986 */
4987
4988
4989 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
4990 var currentGop;
4991
4992 if (!gops[0][0].keyFrame && gops.length > 1) {
4993 // Remove the first GOP
4994 currentGop = gops.shift();
4995 gops.byteLength -= currentGop.byteLength;
4996 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
4997 // first gop to cover the time period of the
4998 // frames we just removed
4999
5000 gops[0][0].dts = currentGop.dts;
5001 gops[0][0].pts = currentGop.pts;
5002 gops[0][0].duration += currentGop.duration;
5003 }
5004
5005 return gops;
5006 };
5007 /**
5008 * Default sample object
5009 * see ISO/IEC 14496-12:2012, section 8.6.4.3
5010 */
5011
5012
5013 var createDefaultSample = function createDefaultSample() {
5014 return {
5015 size: 0,
5016 flags: {
5017 isLeading: 0,
5018 dependsOn: 1,
5019 isDependedOn: 0,
5020 hasRedundancy: 0,
5021 degradationPriority: 0,
5022 isNonSyncSample: 1
5023 }
5024 };
5025 };
5026 /*
5027 * Collates information from a video frame into an object for eventual
5028 * entry into an MP4 sample table.
5029 *
5030 * @param {Object} frame the video frame
5031 * @param {Number} dataOffset the byte offset to position the sample
5032 * @return {Object} object containing sample table info for a frame
5033 */
5034
5035
5036 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
5037 var sample = createDefaultSample();
5038 sample.dataOffset = dataOffset;
5039 sample.compositionTimeOffset = frame.pts - frame.dts;
5040 sample.duration = frame.duration;
5041 sample.size = 4 * frame.length; // Space for nal unit size
5042
5043 sample.size += frame.byteLength;
5044
5045 if (frame.keyFrame) {
5046 sample.flags.dependsOn = 2;
5047 sample.flags.isNonSyncSample = 0;
5048 }
5049
5050 return sample;
5051 }; // generate the track's sample table from an array of gops
5052
5053
5054 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
5055 var h,
5056 i,
5057 sample,
5058 currentGop,
5059 currentFrame,
5060 dataOffset = baseDataOffset || 0,
5061 samples = [];
5062
5063 for (h = 0; h < gops.length; h++) {
5064 currentGop = gops[h];
5065
5066 for (i = 0; i < currentGop.length; i++) {
5067 currentFrame = currentGop[i];
5068 sample = sampleForFrame(currentFrame, dataOffset);
5069 dataOffset += sample.size;
5070 samples.push(sample);
5071 }
5072 }
5073
5074 return samples;
5075 }; // generate the track's raw mdat data from an array of gops
5076
5077
5078 var concatenateNalData = function concatenateNalData(gops) {
5079 var h,
5080 i,
5081 j,
5082 currentGop,
5083 currentFrame,
5084 currentNal,
5085 dataOffset = 0,
5086 nalsByteLength = gops.byteLength,
5087 numberOfNals = gops.nalCount,
5088 totalByteLength = nalsByteLength + 4 * numberOfNals,
5089 data = new Uint8Array(totalByteLength),
5090 view = new DataView(data.buffer); // For each Gop..
5091
5092 for (h = 0; h < gops.length; h++) {
5093 currentGop = gops[h]; // For each Frame..
5094
5095 for (i = 0; i < currentGop.length; i++) {
5096 currentFrame = currentGop[i]; // For each NAL..
5097
5098 for (j = 0; j < currentFrame.length; j++) {
5099 currentNal = currentFrame[j];
5100 view.setUint32(dataOffset, currentNal.data.byteLength);
5101 dataOffset += 4;
5102 data.set(currentNal.data, dataOffset);
5103 dataOffset += currentNal.data.byteLength;
5104 }
5105 }
5106 }
5107
5108 return data;
5109 }; // generate the track's sample table from a frame
5110
5111
5112 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5113 var sample,
5114 dataOffset = baseDataOffset || 0,
5115 samples = [];
5116 sample = sampleForFrame(frame, dataOffset);
5117 samples.push(sample);
5118 return samples;
5119 }; // generate the track's raw mdat data from a frame
5120
5121
5122 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5123 var i,
5124 currentNal,
5125 dataOffset = 0,
5126 nalsByteLength = frame.byteLength,
5127 numberOfNals = frame.length,
5128 totalByteLength = nalsByteLength + 4 * numberOfNals,
5129 data = new Uint8Array(totalByteLength),
5130 view = new DataView(data.buffer); // For each NAL..
5131
5132 for (i = 0; i < frame.length; i++) {
5133 currentNal = frame[i];
5134 view.setUint32(dataOffset, currentNal.data.byteLength);
5135 dataOffset += 4;
5136 data.set(currentNal.data, dataOffset);
5137 dataOffset += currentNal.data.byteLength;
5138 }
5139
5140 return data;
5141 };
5142
5143 var frameUtils = {
5144 groupNalsIntoFrames: groupNalsIntoFrames,
5145 groupFramesIntoGops: groupFramesIntoGops,
5146 extendFirstKeyFrame: extendFirstKeyFrame,
5147 generateSampleTable: generateSampleTable$1,
5148 concatenateNalData: concatenateNalData,
5149 generateSampleTableForFrame: generateSampleTableForFrame,
5150 concatenateNalDataForFrame: concatenateNalDataForFrame
5151 };
5152 /**
5153 * mux.js
5154 *
5155 * Copyright (c) Brightcove
5156 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5157 */
5158
5159 var highPrefix = [33, 16, 5, 32, 164, 27];
5160 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5161
5162 var zeroFill = function zeroFill(count) {
5163 var a = [];
5164
5165 while (count--) {
5166 a.push(0);
5167 }
5168
5169 return a;
5170 };
5171
5172 var makeTable = function makeTable(metaTable) {
5173 return Object.keys(metaTable).reduce(function (obj, key) {
5174 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5175 return arr.concat(part);
5176 }, []));
5177 return obj;
5178 }, {});
5179 };
5180
5181 var silence;
5182
5183 var silence_1 = function silence_1() {
5184 if (!silence) {
5185 // Frames-of-silence to use for filling in missing AAC frames
5186 var coneOfSilence = {
5187 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5188 88200: [highPrefix, [231], zeroFill(170), [56]],
5189 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5190 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5191 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5192 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5193 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5194 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5195 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5196 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5197 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5198 };
5199 silence = makeTable(coneOfSilence);
5200 }
5201
5202 return silence;
5203 };
5204 /**
5205 * mux.js
5206 *
5207 * Copyright (c) Brightcove
5208 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5209 */
5210
5211
5212 var ONE_SECOND_IN_TS$4 = 90000,
5213 // 90kHz clock
5214 secondsToVideoTs,
5215 secondsToAudioTs,
5216 videoTsToSeconds,
5217 audioTsToSeconds,
5218 audioTsToVideoTs,
5219 videoTsToAudioTs,
5220 metadataTsToSeconds;
5221
5222 secondsToVideoTs = function secondsToVideoTs(seconds) {
5223 return seconds * ONE_SECOND_IN_TS$4;
5224 };
5225
5226 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5227 return seconds * sampleRate;
5228 };
5229
5230 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5231 return timestamp / ONE_SECOND_IN_TS$4;
5232 };
5233
5234 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5235 return timestamp / sampleRate;
5236 };
5237
5238 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5239 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5240 };
5241
5242 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5243 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5244 };
5245 /**
5246 * Adjust ID3 tag or caption timing information by the timeline pts values
5247 * (if keepOriginalTimestamps is false) and convert to seconds
5248 */
5249
5250
5251 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5252 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5253 };
5254
5255 var clock = {
5256 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
5257 secondsToVideoTs: secondsToVideoTs,
5258 secondsToAudioTs: secondsToAudioTs,
5259 videoTsToSeconds: videoTsToSeconds,
5260 audioTsToSeconds: audioTsToSeconds,
5261 audioTsToVideoTs: audioTsToVideoTs,
5262 videoTsToAudioTs: videoTsToAudioTs,
5263 metadataTsToSeconds: metadataTsToSeconds
5264 };
5265 /**
5266 * mux.js
5267 *
5268 * Copyright (c) Brightcove
5269 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5270 */
5271
5272 /**
5273 * Sum the `byteLength` properties of the data in each AAC frame
5274 */
5275
5276 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5277 var i,
5278 currentObj,
5279 sum = 0; // sum the byteLength's all each nal unit in the frame
5280
5281 for (i = 0; i < array.length; i++) {
5282 currentObj = array[i];
5283 sum += currentObj.data.byteLength;
5284 }
5285
5286 return sum;
5287 }; // Possibly pad (prefix) the audio track with silence if appending this track
5288 // would lead to the introduction of a gap in the audio buffer
5289
5290
5291 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5292 var baseMediaDecodeTimeTs,
5293 frameDuration = 0,
5294 audioGapDuration = 0,
5295 audioFillFrameCount = 0,
5296 audioFillDuration = 0,
5297 silentFrame,
5298 i,
5299 firstFrame;
5300
5301 if (!frames.length) {
5302 return;
5303 }
5304
5305 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
5306
5307 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5308
5309 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5310 // insert the shortest possible amount (audio gap or audio to video gap)
5311 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
5312
5313 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5314 audioFillDuration = audioFillFrameCount * frameDuration;
5315 } // don't attempt to fill gaps smaller than a single frame or larger
5316 // than a half second
5317
5318
5319 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5320 return;
5321 }
5322
5323 silentFrame = silence_1()[track.samplerate];
5324
5325 if (!silentFrame) {
5326 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5327 // from the content instead
5328 silentFrame = frames[0].data;
5329 }
5330
5331 for (i = 0; i < audioFillFrameCount; i++) {
5332 firstFrame = frames[0];
5333 frames.splice(0, 0, {
5334 data: silentFrame,
5335 dts: firstFrame.dts - frameDuration,
5336 pts: firstFrame.pts - frameDuration
5337 });
5338 }
5339
5340 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5341 return audioFillDuration;
5342 }; // If the audio segment extends before the earliest allowed dts
5343 // value, remove AAC frames until starts at or after the earliest
5344 // allowed DTS so that we don't end up with a negative baseMedia-
5345 // DecodeTime for the audio track
5346
5347
5348 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5349 if (track.minSegmentDts >= earliestAllowedDts) {
5350 return adtsFrames;
5351 } // We will need to recalculate the earliest segment Dts
5352
5353
5354 track.minSegmentDts = Infinity;
5355 return adtsFrames.filter(function (currentFrame) {
5356 // If this is an allowed frame, keep it and record it's Dts
5357 if (currentFrame.dts >= earliestAllowedDts) {
5358 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5359 track.minSegmentPts = track.minSegmentDts;
5360 return true;
5361 } // Otherwise, discard it
5362
5363
5364 return false;
5365 });
5366 }; // generate the track's raw mdat data from an array of frames
5367
5368
5369 var generateSampleTable = function generateSampleTable(frames) {
5370 var i,
5371 currentFrame,
5372 samples = [];
5373
5374 for (i = 0; i < frames.length; i++) {
5375 currentFrame = frames[i];
5376 samples.push({
5377 size: currentFrame.data.byteLength,
5378 duration: 1024 // For AAC audio, all samples contain 1024 samples
5379
5380 });
5381 }
5382
5383 return samples;
5384 }; // generate the track's sample table from an array of frames
5385
5386
5387 var concatenateFrameData = function concatenateFrameData(frames) {
5388 var i,
5389 currentFrame,
5390 dataOffset = 0,
5391 data = new Uint8Array(sumFrameByteLengths(frames));
5392
5393 for (i = 0; i < frames.length; i++) {
5394 currentFrame = frames[i];
5395 data.set(currentFrame.data, dataOffset);
5396 dataOffset += currentFrame.data.byteLength;
5397 }
5398
5399 return data;
5400 };
5401
5402 var audioFrameUtils = {
5403 prefixWithSilence: prefixWithSilence,
5404 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5405 generateSampleTable: generateSampleTable,
5406 concatenateFrameData: concatenateFrameData
5407 };
5408 /**
5409 * mux.js
5410 *
5411 * Copyright (c) Brightcove
5412 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5413 */
5414
5415 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
5416 /**
5417 * Store information about the start and end of the track and the
5418 * duration for each frame/sample we process in order to calculate
5419 * the baseMediaDecodeTime
5420 */
5421
5422 var collectDtsInfo = function collectDtsInfo(track, data) {
5423 if (typeof data.pts === 'number') {
5424 if (track.timelineStartInfo.pts === undefined) {
5425 track.timelineStartInfo.pts = data.pts;
5426 }
5427
5428 if (track.minSegmentPts === undefined) {
5429 track.minSegmentPts = data.pts;
5430 } else {
5431 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5432 }
5433
5434 if (track.maxSegmentPts === undefined) {
5435 track.maxSegmentPts = data.pts;
5436 } else {
5437 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5438 }
5439 }
5440
5441 if (typeof data.dts === 'number') {
5442 if (track.timelineStartInfo.dts === undefined) {
5443 track.timelineStartInfo.dts = data.dts;
5444 }
5445
5446 if (track.minSegmentDts === undefined) {
5447 track.minSegmentDts = data.dts;
5448 } else {
5449 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5450 }
5451
5452 if (track.maxSegmentDts === undefined) {
5453 track.maxSegmentDts = data.dts;
5454 } else {
5455 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5456 }
5457 }
5458 };
5459 /**
5460 * Clear values used to calculate the baseMediaDecodeTime between
5461 * tracks
5462 */
5463
5464
5465 var clearDtsInfo = function clearDtsInfo(track) {
5466 delete track.minSegmentDts;
5467 delete track.maxSegmentDts;
5468 delete track.minSegmentPts;
5469 delete track.maxSegmentPts;
5470 };
5471 /**
5472 * Calculate the track's baseMediaDecodeTime based on the earliest
5473 * DTS the transmuxer has ever seen and the minimum DTS for the
5474 * current track
5475 * @param track {object} track metadata configuration
5476 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5477 * in the source; false to adjust the first segment to start at 0.
5478 */
5479
5480
5481 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5482 var baseMediaDecodeTime,
5483 scale,
5484 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
5485
5486 if (!keepOriginalTimestamps) {
5487 minSegmentDts -= track.timelineStartInfo.dts;
5488 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5489 // we want the start of the first segment to be placed
5490
5491
5492 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
5493
5494 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
5495
5496 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5497
5498 if (track.type === 'audio') {
5499 // Audio has a different clock equal to the sampling_rate so we need to
5500 // scale the PTS values into the clock rate of the track
5501 scale = track.samplerate / ONE_SECOND_IN_TS$3;
5502 baseMediaDecodeTime *= scale;
5503 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5504 }
5505
5506 return baseMediaDecodeTime;
5507 };
5508
5509 var trackDecodeInfo = {
5510 clearDtsInfo: clearDtsInfo,
5511 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5512 collectDtsInfo: collectDtsInfo
5513 };
5514 /**
5515 * mux.js
5516 *
5517 * Copyright (c) Brightcove
5518 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5519 *
5520 * Reads in-band caption information from a video elementary
5521 * stream. Captions must follow the CEA-708 standard for injection
5522 * into an MPEG-2 transport streams.
5523 * @see https://en.wikipedia.org/wiki/CEA-708
5524 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
5525 */
5526 // payload type field to indicate how they are to be
5527 // interpreted. CEAS-708 caption content is always transmitted with
5528 // payload type 0x04.
5529
5530 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
5531 RBSP_TRAILING_BITS = 128;
5532 /**
5533 * Parse a supplemental enhancement information (SEI) NAL unit.
5534 * Stops parsing once a message of type ITU T T35 has been found.
5535 *
5536 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
5537 * @return {object} the parsed SEI payload
5538 * @see Rec. ITU-T H.264, 7.3.2.3.1
5539 */
5540
5541 var parseSei = function parseSei(bytes) {
5542 var i = 0,
5543 result = {
5544 payloadType: -1,
5545 payloadSize: 0
5546 },
5547 payloadType = 0,
5548 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
5549
5550 while (i < bytes.byteLength) {
5551 // stop once we have hit the end of the sei_rbsp
5552 if (bytes[i] === RBSP_TRAILING_BITS) {
5553 break;
5554 } // Parse payload type
5555
5556
5557 while (bytes[i] === 0xFF) {
5558 payloadType += 255;
5559 i++;
5560 }
5561
5562 payloadType += bytes[i++]; // Parse payload size
5563
5564 while (bytes[i] === 0xFF) {
5565 payloadSize += 255;
5566 i++;
5567 }
5568
5569 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
5570 // there can only ever be one caption message in a frame's sei
5571
5572 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
5573 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
5574
5575 if (userIdentifier === 'GA94') {
5576 result.payloadType = payloadType;
5577 result.payloadSize = payloadSize;
5578 result.payload = bytes.subarray(i, i + payloadSize);
5579 break;
5580 } else {
5581 result.payload = void 0;
5582 }
5583 } // skip the payload and parse the next message
5584
5585
5586 i += payloadSize;
5587 payloadType = 0;
5588 payloadSize = 0;
5589 }
5590
5591 return result;
5592 }; // see ANSI/SCTE 128-1 (2013), section 8.1
5593
5594
5595 var parseUserData = function parseUserData(sei) {
5596 // itu_t_t35_contry_code must be 181 (United States) for
5597 // captions
5598 if (sei.payload[0] !== 181) {
5599 return null;
5600 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
5601
5602
5603 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
5604 return null;
5605 } // the user_identifier should be "GA94" to indicate ATSC1 data
5606
5607
5608 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
5609 return null;
5610 } // finally, user_data_type_code should be 0x03 for caption data
5611
5612
5613 if (sei.payload[7] !== 0x03) {
5614 return null;
5615 } // return the user_data_type_structure and strip the trailing
5616 // marker bits
5617
5618
5619 return sei.payload.subarray(8, sei.payload.length - 1);
5620 }; // see CEA-708-D, section 4.4
5621
5622
5623 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
5624 var results = [],
5625 i,
5626 count,
5627 offset,
5628 data; // if this is just filler, return immediately
5629
5630 if (!(userData[0] & 0x40)) {
5631 return results;
5632 } // parse out the cc_data_1 and cc_data_2 fields
5633
5634
5635 count = userData[0] & 0x1f;
5636
5637 for (i = 0; i < count; i++) {
5638 offset = i * 3;
5639 data = {
5640 type: userData[offset + 2] & 0x03,
5641 pts: pts
5642 }; // capture cc data when cc_valid is 1
5643
5644 if (userData[offset + 2] & 0x04) {
5645 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
5646 results.push(data);
5647 }
5648 }
5649
5650 return results;
5651 };
5652
5653 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
5654 var length = data.byteLength,
5655 emulationPreventionBytesPositions = [],
5656 i = 1,
5657 newLength,
5658 newData; // Find all `Emulation Prevention Bytes`
5659
5660 while (i < length - 2) {
5661 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
5662 emulationPreventionBytesPositions.push(i + 2);
5663 i += 2;
5664 } else {
5665 i++;
5666 }
5667 } // If no Emulation Prevention Bytes were found just return the original
5668 // array
5669
5670
5671 if (emulationPreventionBytesPositions.length === 0) {
5672 return data;
5673 } // Create a new array to hold the NAL unit data
5674
5675
5676 newLength = length - emulationPreventionBytesPositions.length;
5677 newData = new Uint8Array(newLength);
5678 var sourceIndex = 0;
5679
5680 for (i = 0; i < newLength; sourceIndex++, i++) {
5681 if (sourceIndex === emulationPreventionBytesPositions[0]) {
5682 // Skip this byte
5683 sourceIndex++; // Remove this position index
5684
5685 emulationPreventionBytesPositions.shift();
5686 }
5687
5688 newData[i] = data[sourceIndex];
5689 }
5690
5691 return newData;
5692 }; // exports
5693
5694
5695 var captionPacketParser = {
5696 parseSei: parseSei,
5697 parseUserData: parseUserData,
5698 parseCaptionPackets: parseCaptionPackets,
5699 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
5700 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
5701 }; // Link To Transport
5702 // -----------------
5703
5704 var CaptionStream$1 = function CaptionStream(options) {
5705 options = options || {};
5706 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
5707
5708 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
5709 this.captionPackets_ = [];
5710 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
5711 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
5712 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
5713 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
5714 ];
5715
5716 if (this.parse708captions_) {
5717 this.cc708Stream_ = new Cea708Stream({
5718 captionServices: options.captionServices
5719 }); // eslint-disable-line no-use-before-define
5720 }
5721
5722 this.reset(); // forward data and done events from CCs to this CaptionStream
5723
5724 this.ccStreams_.forEach(function (cc) {
5725 cc.on('data', this.trigger.bind(this, 'data'));
5726 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
5727 cc.on('done', this.trigger.bind(this, 'done'));
5728 }, this);
5729
5730 if (this.parse708captions_) {
5731 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
5732 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
5733 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
5734 }
5735 };
5736
5737 CaptionStream$1.prototype = new stream();
5738
5739 CaptionStream$1.prototype.push = function (event) {
5740 var sei, userData, newCaptionPackets; // only examine SEI NALs
5741
5742 if (event.nalUnitType !== 'sei_rbsp') {
5743 return;
5744 } // parse the sei
5745
5746
5747 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5748
5749 if (!sei.payload) {
5750 return;
5751 } // ignore everything but user_data_registered_itu_t_t35
5752
5753
5754 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5755 return;
5756 } // parse out the user data payload
5757
5758
5759 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5760
5761 if (!userData) {
5762 return;
5763 } // Sometimes, the same segment # will be downloaded twice. To stop the
5764 // caption data from being processed twice, we track the latest dts we've
5765 // received and ignore everything with a dts before that. However, since
5766 // data for a specific dts can be split across packets on either side of
5767 // a segment boundary, we need to make sure we *don't* ignore the packets
5768 // from the *next* segment that have dts === this.latestDts_. By constantly
5769 // tracking the number of packets received with dts === this.latestDts_, we
5770 // know how many should be ignored once we start receiving duplicates.
5771
5772
5773 if (event.dts < this.latestDts_) {
5774 // We've started getting older data, so set the flag.
5775 this.ignoreNextEqualDts_ = true;
5776 return;
5777 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5778 this.numSameDts_--;
5779
5780 if (!this.numSameDts_) {
5781 // We've received the last duplicate packet, time to start processing again
5782 this.ignoreNextEqualDts_ = false;
5783 }
5784
5785 return;
5786 } // parse out CC data packets and save them for later
5787
5788
5789 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5790 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5791
5792 if (this.latestDts_ !== event.dts) {
5793 this.numSameDts_ = 0;
5794 }
5795
5796 this.numSameDts_++;
5797 this.latestDts_ = event.dts;
5798 };
5799
5800 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
5801 this.ccStreams_.forEach(function (cc) {
5802 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5803 }, this);
5804 };
5805
5806 CaptionStream$1.prototype.flushStream = function (flushType) {
5807 // make sure we actually parsed captions before proceeding
5808 if (!this.captionPackets_.length) {
5809 this.flushCCStreams(flushType);
5810 return;
5811 } // In Chrome, the Array#sort function is not stable so add a
5812 // presortIndex that we can use to ensure we get a stable-sort
5813
5814
5815 this.captionPackets_.forEach(function (elem, idx) {
5816 elem.presortIndex = idx;
5817 }); // sort caption byte-pairs based on their PTS values
5818
5819 this.captionPackets_.sort(function (a, b) {
5820 if (a.pts === b.pts) {
5821 return a.presortIndex - b.presortIndex;
5822 }
5823
5824 return a.pts - b.pts;
5825 });
5826 this.captionPackets_.forEach(function (packet) {
5827 if (packet.type < 2) {
5828 // Dispatch packet to the right Cea608Stream
5829 this.dispatchCea608Packet(packet);
5830 } else {
5831 // Dispatch packet to the Cea708Stream
5832 this.dispatchCea708Packet(packet);
5833 }
5834 }, this);
5835 this.captionPackets_.length = 0;
5836 this.flushCCStreams(flushType);
5837 };
5838
5839 CaptionStream$1.prototype.flush = function () {
5840 return this.flushStream('flush');
5841 }; // Only called if handling partial data
5842
5843
5844 CaptionStream$1.prototype.partialFlush = function () {
5845 return this.flushStream('partialFlush');
5846 };
5847
5848 CaptionStream$1.prototype.reset = function () {
5849 this.latestDts_ = null;
5850 this.ignoreNextEqualDts_ = false;
5851 this.numSameDts_ = 0;
5852 this.activeCea608Channel_ = [null, null];
5853 this.ccStreams_.forEach(function (ccStream) {
5854 ccStream.reset();
5855 });
5856 }; // From the CEA-608 spec:
5857
5858 /*
5859 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5860 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5861 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5862 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5863 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5864 * to switch to captioning or Text.
5865 */
5866 // With that in mind, we ignore any data between an XDS control code and a
5867 // subsequent closed-captioning control code.
5868
5869
5870 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
5871 // NOTE: packet.type is the CEA608 field
5872 if (this.setsTextOrXDSActive(packet)) {
5873 this.activeCea608Channel_[packet.type] = null;
5874 } else if (this.setsChannel1Active(packet)) {
5875 this.activeCea608Channel_[packet.type] = 0;
5876 } else if (this.setsChannel2Active(packet)) {
5877 this.activeCea608Channel_[packet.type] = 1;
5878 }
5879
5880 if (this.activeCea608Channel_[packet.type] === null) {
5881 // If we haven't received anything to set the active channel, or the
5882 // packets are Text/XDS data, discard the data; we don't want jumbled
5883 // captions
5884 return;
5885 }
5886
5887 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5888 };
5889
5890 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
5891 return (packet.ccData & 0x7800) === 0x1000;
5892 };
5893
5894 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
5895 return (packet.ccData & 0x7800) === 0x1800;
5896 };
5897
5898 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
5899 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5900 };
5901
5902 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
5903 if (this.parse708captions_) {
5904 this.cc708Stream_.push(packet);
5905 }
5906 }; // ----------------------
5907 // Session to Application
5908 // ----------------------
5909 // This hash maps special and extended character codes to their
5910 // proper Unicode equivalent. The first one-byte key is just a
5911 // non-standard character code. The two-byte keys that follow are
5912 // the extended CEA708 character codes, along with the preceding
5913 // 0x10 extended character byte to distinguish these codes from
5914 // non-extended character codes. Every CEA708 character code that
5915 // is not in this object maps directly to a standard unicode
5916 // character code.
5917 // The transparent space and non-breaking transparent space are
5918 // technically not fully supported since there is no code to
5919 // make them transparent, so they have normal non-transparent
5920 // stand-ins.
5921 // The special closed caption (CC) character isn't a standard
5922 // unicode character, so a fairly similar unicode character was
5923 // chosen in it's place.
5924
5925
5926 var CHARACTER_TRANSLATION_708 = {
5927 0x7f: 0x266a,
5928 // ♪
5929 0x1020: 0x20,
5930 // Transparent Space
5931 0x1021: 0xa0,
5932 // Nob-breaking Transparent Space
5933 0x1025: 0x2026,
5934 // …
5935 0x102a: 0x0160,
5936 // Š
5937 0x102c: 0x0152,
5938 // Œ
5939 0x1030: 0x2588,
5940 // █
5941 0x1031: 0x2018,
5942 // ‘
5943 0x1032: 0x2019,
5944 // ’
5945 0x1033: 0x201c,
5946 // “
5947 0x1034: 0x201d,
5948 // ”
5949 0x1035: 0x2022,
5950 // •
5951 0x1039: 0x2122,
5952 // ™
5953 0x103a: 0x0161,
5954 // š
5955 0x103c: 0x0153,
5956 // œ
5957 0x103d: 0x2120,
5958 // ℠
5959 0x103f: 0x0178,
5960 // Ÿ
5961 0x1076: 0x215b,
5962 // ⅛
5963 0x1077: 0x215c,
5964 // ⅜
5965 0x1078: 0x215d,
5966 // ⅝
5967 0x1079: 0x215e,
5968 // ⅞
5969 0x107a: 0x23d0,
5970 // ⏐
5971 0x107b: 0x23a4,
5972 // ⎤
5973 0x107c: 0x23a3,
5974 // ⎣
5975 0x107d: 0x23af,
5976 // ⎯
5977 0x107e: 0x23a6,
5978 // ⎦
5979 0x107f: 0x23a1,
5980 // ⎡
5981 0x10a0: 0x3138 // ㄸ (CC char)
5982
5983 };
5984
5985 var get708CharFromCode = function get708CharFromCode(code) {
5986 var newCode = CHARACTER_TRANSLATION_708[code] || code;
5987
5988 if (code & 0x1000 && code === newCode) {
5989 // Invalid extended code
5990 return '';
5991 }
5992
5993 return String.fromCharCode(newCode);
5994 };
5995
5996 var within708TextBlock = function within708TextBlock(b) {
5997 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
5998 };
5999
6000 var Cea708Window = function Cea708Window(windowNum) {
6001 this.windowNum = windowNum;
6002 this.reset();
6003 };
6004
6005 Cea708Window.prototype.reset = function () {
6006 this.clearText();
6007 this.pendingNewLine = false;
6008 this.winAttr = {};
6009 this.penAttr = {};
6010 this.penLoc = {};
6011 this.penColor = {}; // These default values are arbitrary,
6012 // defineWindow will usually override them
6013
6014 this.visible = 0;
6015 this.rowLock = 0;
6016 this.columnLock = 0;
6017 this.priority = 0;
6018 this.relativePositioning = 0;
6019 this.anchorVertical = 0;
6020 this.anchorHorizontal = 0;
6021 this.anchorPoint = 0;
6022 this.rowCount = 1;
6023 this.virtualRowCount = this.rowCount + 1;
6024 this.columnCount = 41;
6025 this.windowStyle = 0;
6026 this.penStyle = 0;
6027 };
6028
6029 Cea708Window.prototype.getText = function () {
6030 return this.rows.join('\n');
6031 };
6032
6033 Cea708Window.prototype.clearText = function () {
6034 this.rows = [''];
6035 this.rowIdx = 0;
6036 };
6037
6038 Cea708Window.prototype.newLine = function (pts) {
6039 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
6040 this.beforeRowOverflow(pts);
6041 }
6042
6043 if (this.rows.length > 0) {
6044 this.rows.push('');
6045 this.rowIdx++;
6046 } // Show all virtual rows since there's no visible scrolling
6047
6048
6049 while (this.rows.length > this.virtualRowCount) {
6050 this.rows.shift();
6051 this.rowIdx--;
6052 }
6053 };
6054
6055 Cea708Window.prototype.isEmpty = function () {
6056 if (this.rows.length === 0) {
6057 return true;
6058 } else if (this.rows.length === 1) {
6059 return this.rows[0] === '';
6060 }
6061
6062 return false;
6063 };
6064
6065 Cea708Window.prototype.addText = function (text) {
6066 this.rows[this.rowIdx] += text;
6067 };
6068
6069 Cea708Window.prototype.backspace = function () {
6070 if (!this.isEmpty()) {
6071 var row = this.rows[this.rowIdx];
6072 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
6073 }
6074 };
6075
6076 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
6077 this.serviceNum = serviceNum;
6078 this.text = '';
6079 this.currentWindow = new Cea708Window(-1);
6080 this.windows = [];
6081 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
6082
6083 if (typeof encoding === 'string') {
6084 this.createTextDecoder(encoding);
6085 }
6086 };
6087 /**
6088 * Initialize service windows
6089 * Must be run before service use
6090 *
6091 * @param {Integer} pts PTS value
6092 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
6093 */
6094
6095
6096 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
6097 this.startPts = pts;
6098
6099 for (var win = 0; win < 8; win++) {
6100 this.windows[win] = new Cea708Window(win);
6101
6102 if (typeof beforeRowOverflow === 'function') {
6103 this.windows[win].beforeRowOverflow = beforeRowOverflow;
6104 }
6105 }
6106 };
6107 /**
6108 * Set current window of service to be affected by commands
6109 *
6110 * @param {Integer} windowNum Window number
6111 */
6112
6113
6114 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
6115 this.currentWindow = this.windows[windowNum];
6116 };
6117 /**
6118 * Try to create a TextDecoder if it is natively supported
6119 */
6120
6121
6122 Cea708Service.prototype.createTextDecoder = function (encoding) {
6123 if (typeof TextDecoder === 'undefined') {
6124 this.stream.trigger('log', {
6125 level: 'warn',
6126 message: 'The `encoding` option is unsupported without TextDecoder support'
6127 });
6128 } else {
6129 try {
6130 this.textDecoder_ = new TextDecoder(encoding);
6131 } catch (error) {
6132 this.stream.trigger('log', {
6133 level: 'warn',
6134 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
6135 });
6136 }
6137 }
6138 };
6139
6140 var Cea708Stream = function Cea708Stream(options) {
6141 options = options || {};
6142 Cea708Stream.prototype.init.call(this);
6143 var self = this;
6144 var captionServices = options.captionServices || {};
6145 var captionServiceEncodings = {};
6146 var serviceProps; // Get service encodings from captionServices option block
6147
6148 Object.keys(captionServices).forEach(function (serviceName) {
6149 serviceProps = captionServices[serviceName];
6150
6151 if (/^SERVICE/.test(serviceName)) {
6152 captionServiceEncodings[serviceName] = serviceProps.encoding;
6153 }
6154 });
6155 this.serviceEncodings = captionServiceEncodings;
6156 this.current708Packet = null;
6157 this.services = {};
6158
6159 this.push = function (packet) {
6160 if (packet.type === 3) {
6161 // 708 packet start
6162 self.new708Packet();
6163 self.add708Bytes(packet);
6164 } else {
6165 if (self.current708Packet === null) {
6166 // This should only happen at the start of a file if there's no packet start.
6167 self.new708Packet();
6168 }
6169
6170 self.add708Bytes(packet);
6171 }
6172 };
6173 };
6174
6175 Cea708Stream.prototype = new stream();
6176 /**
6177 * Push current 708 packet, create new 708 packet.
6178 */
6179
6180 Cea708Stream.prototype.new708Packet = function () {
6181 if (this.current708Packet !== null) {
6182 this.push708Packet();
6183 }
6184
6185 this.current708Packet = {
6186 data: [],
6187 ptsVals: []
6188 };
6189 };
6190 /**
6191 * Add pts and both bytes from packet into current 708 packet.
6192 */
6193
6194
6195 Cea708Stream.prototype.add708Bytes = function (packet) {
6196 var data = packet.ccData;
6197 var byte0 = data >>> 8;
6198 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
6199 // that service blocks will always line up with byte pairs.
6200
6201 this.current708Packet.ptsVals.push(packet.pts);
6202 this.current708Packet.data.push(byte0);
6203 this.current708Packet.data.push(byte1);
6204 };
6205 /**
6206 * Parse completed 708 packet into service blocks and push each service block.
6207 */
6208
6209
6210 Cea708Stream.prototype.push708Packet = function () {
6211 var packet708 = this.current708Packet;
6212 var packetData = packet708.data;
6213 var serviceNum = null;
6214 var blockSize = null;
6215 var i = 0;
6216 var b = packetData[i++];
6217 packet708.seq = b >> 6;
6218 packet708.sizeCode = b & 0x3f; // 0b00111111;
6219
6220 for (; i < packetData.length; i++) {
6221 b = packetData[i++];
6222 serviceNum = b >> 5;
6223 blockSize = b & 0x1f; // 0b00011111
6224
6225 if (serviceNum === 7 && blockSize > 0) {
6226 // Extended service num
6227 b = packetData[i++];
6228 serviceNum = b;
6229 }
6230
6231 this.pushServiceBlock(serviceNum, i, blockSize);
6232
6233 if (blockSize > 0) {
6234 i += blockSize - 1;
6235 }
6236 }
6237 };
6238 /**
6239 * Parse service block, execute commands, read text.
6240 *
6241 * Note: While many of these commands serve important purposes,
6242 * many others just parse out the parameters or attributes, but
6243 * nothing is done with them because this is not a full and complete
6244 * implementation of the entire 708 spec.
6245 *
6246 * @param {Integer} serviceNum Service number
6247 * @param {Integer} start Start index of the 708 packet data
6248 * @param {Integer} size Block size
6249 */
6250
6251
6252 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
6253 var b;
6254 var i = start;
6255 var packetData = this.current708Packet.data;
6256 var service = this.services[serviceNum];
6257
6258 if (!service) {
6259 service = this.initService(serviceNum, i);
6260 }
6261
6262 for (; i < start + size && i < packetData.length; i++) {
6263 b = packetData[i];
6264
6265 if (within708TextBlock(b)) {
6266 i = this.handleText(i, service);
6267 } else if (b === 0x18) {
6268 i = this.multiByteCharacter(i, service);
6269 } else if (b === 0x10) {
6270 i = this.extendedCommands(i, service);
6271 } else if (0x80 <= b && b <= 0x87) {
6272 i = this.setCurrentWindow(i, service);
6273 } else if (0x98 <= b && b <= 0x9f) {
6274 i = this.defineWindow(i, service);
6275 } else if (b === 0x88) {
6276 i = this.clearWindows(i, service);
6277 } else if (b === 0x8c) {
6278 i = this.deleteWindows(i, service);
6279 } else if (b === 0x89) {
6280 i = this.displayWindows(i, service);
6281 } else if (b === 0x8a) {
6282 i = this.hideWindows(i, service);
6283 } else if (b === 0x8b) {
6284 i = this.toggleWindows(i, service);
6285 } else if (b === 0x97) {
6286 i = this.setWindowAttributes(i, service);
6287 } else if (b === 0x90) {
6288 i = this.setPenAttributes(i, service);
6289 } else if (b === 0x91) {
6290 i = this.setPenColor(i, service);
6291 } else if (b === 0x92) {
6292 i = this.setPenLocation(i, service);
6293 } else if (b === 0x8f) {
6294 service = this.reset(i, service);
6295 } else if (b === 0x08) {
6296 // BS: Backspace
6297 service.currentWindow.backspace();
6298 } else if (b === 0x0c) {
6299 // FF: Form feed
6300 service.currentWindow.clearText();
6301 } else if (b === 0x0d) {
6302 // CR: Carriage return
6303 service.currentWindow.pendingNewLine = true;
6304 } else if (b === 0x0e) {
6305 // HCR: Horizontal carriage return
6306 service.currentWindow.clearText();
6307 } else if (b === 0x8d) {
6308 // DLY: Delay, nothing to do
6309 i++;
6310 } else ;
6311 }
6312 };
6313 /**
6314 * Execute an extended command
6315 *
6316 * @param {Integer} i Current index in the 708 packet
6317 * @param {Service} service The service object to be affected
6318 * @return {Integer} New index after parsing
6319 */
6320
6321
6322 Cea708Stream.prototype.extendedCommands = function (i, service) {
6323 var packetData = this.current708Packet.data;
6324 var b = packetData[++i];
6325
6326 if (within708TextBlock(b)) {
6327 i = this.handleText(i, service, {
6328 isExtended: true
6329 });
6330 }
6331
6332 return i;
6333 };
6334 /**
6335 * Get PTS value of a given byte index
6336 *
6337 * @param {Integer} byteIndex Index of the byte
6338 * @return {Integer} PTS
6339 */
6340
6341
6342 Cea708Stream.prototype.getPts = function (byteIndex) {
6343 // There's 1 pts value per 2 bytes
6344 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
6345 };
6346 /**
6347 * Initializes a service
6348 *
6349 * @param {Integer} serviceNum Service number
6350 * @return {Service} Initialized service object
6351 */
6352
6353
6354 Cea708Stream.prototype.initService = function (serviceNum, i) {
6355 var serviceName = 'SERVICE' + serviceNum;
6356 var self = this;
6357 var serviceName;
6358 var encoding;
6359
6360 if (serviceName in this.serviceEncodings) {
6361 encoding = this.serviceEncodings[serviceName];
6362 }
6363
6364 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
6365 this.services[serviceNum].init(this.getPts(i), function (pts) {
6366 self.flushDisplayed(pts, self.services[serviceNum]);
6367 });
6368 return this.services[serviceNum];
6369 };
6370 /**
6371 * Execute text writing to current window
6372 *
6373 * @param {Integer} i Current index in the 708 packet
6374 * @param {Service} service The service object to be affected
6375 * @return {Integer} New index after parsing
6376 */
6377
6378
6379 Cea708Stream.prototype.handleText = function (i, service, options) {
6380 var isExtended = options && options.isExtended;
6381 var isMultiByte = options && options.isMultiByte;
6382 var packetData = this.current708Packet.data;
6383 var extended = isExtended ? 0x1000 : 0x0000;
6384 var currentByte = packetData[i];
6385 var nextByte = packetData[i + 1];
6386 var win = service.currentWindow;
6387 var char;
6388 var charCodeArray; // Use the TextDecoder if one was created for this service
6389
6390 if (service.textDecoder_ && !isExtended) {
6391 if (isMultiByte) {
6392 charCodeArray = [currentByte, nextByte];
6393 i++;
6394 } else {
6395 charCodeArray = [currentByte];
6396 }
6397
6398 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
6399 } else {
6400 char = get708CharFromCode(extended | currentByte);
6401 }
6402
6403 if (win.pendingNewLine && !win.isEmpty()) {
6404 win.newLine(this.getPts(i));
6405 }
6406
6407 win.pendingNewLine = false;
6408 win.addText(char);
6409 return i;
6410 };
6411 /**
6412 * Handle decoding of multibyte character
6413 *
6414 * @param {Integer} i Current index in the 708 packet
6415 * @param {Service} service The service object to be affected
6416 * @return {Integer} New index after parsing
6417 */
6418
6419
6420 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
6421 var packetData = this.current708Packet.data;
6422 var firstByte = packetData[i + 1];
6423 var secondByte = packetData[i + 2];
6424
6425 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
6426 i = this.handleText(++i, service, {
6427 isMultiByte: true
6428 });
6429 }
6430
6431 return i;
6432 };
6433 /**
6434 * Parse and execute the CW# command.
6435 *
6436 * Set the current window.
6437 *
6438 * @param {Integer} i Current index in the 708 packet
6439 * @param {Service} service The service object to be affected
6440 * @return {Integer} New index after parsing
6441 */
6442
6443
6444 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
6445 var packetData = this.current708Packet.data;
6446 var b = packetData[i];
6447 var windowNum = b & 0x07;
6448 service.setCurrentWindow(windowNum);
6449 return i;
6450 };
6451 /**
6452 * Parse and execute the DF# command.
6453 *
6454 * Define a window and set it as the current window.
6455 *
6456 * @param {Integer} i Current index in the 708 packet
6457 * @param {Service} service The service object to be affected
6458 * @return {Integer} New index after parsing
6459 */
6460
6461
6462 Cea708Stream.prototype.defineWindow = function (i, service) {
6463 var packetData = this.current708Packet.data;
6464 var b = packetData[i];
6465 var windowNum = b & 0x07;
6466 service.setCurrentWindow(windowNum);
6467 var win = service.currentWindow;
6468 b = packetData[++i];
6469 win.visible = (b & 0x20) >> 5; // v
6470
6471 win.rowLock = (b & 0x10) >> 4; // rl
6472
6473 win.columnLock = (b & 0x08) >> 3; // cl
6474
6475 win.priority = b & 0x07; // p
6476
6477 b = packetData[++i];
6478 win.relativePositioning = (b & 0x80) >> 7; // rp
6479
6480 win.anchorVertical = b & 0x7f; // av
6481
6482 b = packetData[++i];
6483 win.anchorHorizontal = b; // ah
6484
6485 b = packetData[++i];
6486 win.anchorPoint = (b & 0xf0) >> 4; // ap
6487
6488 win.rowCount = b & 0x0f; // rc
6489
6490 b = packetData[++i];
6491 win.columnCount = b & 0x3f; // cc
6492
6493 b = packetData[++i];
6494 win.windowStyle = (b & 0x38) >> 3; // ws
6495
6496 win.penStyle = b & 0x07; // ps
6497 // The spec says there are (rowCount+1) "virtual rows"
6498
6499 win.virtualRowCount = win.rowCount + 1;
6500 return i;
6501 };
6502 /**
6503 * Parse and execute the SWA command.
6504 *
6505 * Set attributes of the current window.
6506 *
6507 * @param {Integer} i Current index in the 708 packet
6508 * @param {Service} service The service object to be affected
6509 * @return {Integer} New index after parsing
6510 */
6511
6512
6513 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
6514 var packetData = this.current708Packet.data;
6515 var b = packetData[i];
6516 var winAttr = service.currentWindow.winAttr;
6517 b = packetData[++i];
6518 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
6519
6520 winAttr.fillRed = (b & 0x30) >> 4; // fr
6521
6522 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
6523
6524 winAttr.fillBlue = b & 0x03; // fb
6525
6526 b = packetData[++i];
6527 winAttr.borderType = (b & 0xc0) >> 6; // bt
6528
6529 winAttr.borderRed = (b & 0x30) >> 4; // br
6530
6531 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
6532
6533 winAttr.borderBlue = b & 0x03; // bb
6534
6535 b = packetData[++i];
6536 winAttr.borderType += (b & 0x80) >> 5; // bt
6537
6538 winAttr.wordWrap = (b & 0x40) >> 6; // ww
6539
6540 winAttr.printDirection = (b & 0x30) >> 4; // pd
6541
6542 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
6543
6544 winAttr.justify = b & 0x03; // j
6545
6546 b = packetData[++i];
6547 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
6548
6549 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
6550
6551 winAttr.displayEffect = b & 0x03; // de
6552
6553 return i;
6554 };
6555 /**
6556 * Gather text from all displayed windows and push a caption to output.
6557 *
6558 * @param {Integer} i Current index in the 708 packet
6559 * @param {Service} service The service object to be affected
6560 */
6561
6562
6563 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
6564 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
6565 // display text in the correct order, but sample files so far have not shown any issue.
6566
6567 for (var winId = 0; winId < 8; winId++) {
6568 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
6569 displayedText.push(service.windows[winId].getText());
6570 }
6571 }
6572
6573 service.endPts = pts;
6574 service.text = displayedText.join('\n\n');
6575 this.pushCaption(service);
6576 service.startPts = pts;
6577 };
6578 /**
6579 * Push a caption to output if the caption contains text.
6580 *
6581 * @param {Service} service The service object to be affected
6582 */
6583
6584
6585 Cea708Stream.prototype.pushCaption = function (service) {
6586 if (service.text !== '') {
6587 this.trigger('data', {
6588 startPts: service.startPts,
6589 endPts: service.endPts,
6590 text: service.text,
6591 stream: 'cc708_' + service.serviceNum
6592 });
6593 service.text = '';
6594 service.startPts = service.endPts;
6595 }
6596 };
6597 /**
6598 * Parse and execute the DSW command.
6599 *
6600 * Set visible property of windows based on the parsed bitmask.
6601 *
6602 * @param {Integer} i Current index in the 708 packet
6603 * @param {Service} service The service object to be affected
6604 * @return {Integer} New index after parsing
6605 */
6606
6607
6608 Cea708Stream.prototype.displayWindows = function (i, service) {
6609 var packetData = this.current708Packet.data;
6610 var b = packetData[++i];
6611 var pts = this.getPts(i);
6612 this.flushDisplayed(pts, service);
6613
6614 for (var winId = 0; winId < 8; winId++) {
6615 if (b & 0x01 << winId) {
6616 service.windows[winId].visible = 1;
6617 }
6618 }
6619
6620 return i;
6621 };
6622 /**
6623 * Parse and execute the HDW command.
6624 *
6625 * Set visible property of windows based on the parsed bitmask.
6626 *
6627 * @param {Integer} i Current index in the 708 packet
6628 * @param {Service} service The service object to be affected
6629 * @return {Integer} New index after parsing
6630 */
6631
6632
6633 Cea708Stream.prototype.hideWindows = function (i, service) {
6634 var packetData = this.current708Packet.data;
6635 var b = packetData[++i];
6636 var pts = this.getPts(i);
6637 this.flushDisplayed(pts, service);
6638
6639 for (var winId = 0; winId < 8; winId++) {
6640 if (b & 0x01 << winId) {
6641 service.windows[winId].visible = 0;
6642 }
6643 }
6644
6645 return i;
6646 };
6647 /**
6648 * Parse and execute the TGW command.
6649 *
6650 * Set visible property of windows based on the parsed bitmask.
6651 *
6652 * @param {Integer} i Current index in the 708 packet
6653 * @param {Service} service The service object to be affected
6654 * @return {Integer} New index after parsing
6655 */
6656
6657
6658 Cea708Stream.prototype.toggleWindows = function (i, service) {
6659 var packetData = this.current708Packet.data;
6660 var b = packetData[++i];
6661 var pts = this.getPts(i);
6662 this.flushDisplayed(pts, service);
6663
6664 for (var winId = 0; winId < 8; winId++) {
6665 if (b & 0x01 << winId) {
6666 service.windows[winId].visible ^= 1;
6667 }
6668 }
6669
6670 return i;
6671 };
6672 /**
6673 * Parse and execute the CLW command.
6674 *
6675 * Clear text of windows based on the parsed bitmask.
6676 *
6677 * @param {Integer} i Current index in the 708 packet
6678 * @param {Service} service The service object to be affected
6679 * @return {Integer} New index after parsing
6680 */
6681
6682
6683 Cea708Stream.prototype.clearWindows = function (i, service) {
6684 var packetData = this.current708Packet.data;
6685 var b = packetData[++i];
6686 var pts = this.getPts(i);
6687 this.flushDisplayed(pts, service);
6688
6689 for (var winId = 0; winId < 8; winId++) {
6690 if (b & 0x01 << winId) {
6691 service.windows[winId].clearText();
6692 }
6693 }
6694
6695 return i;
6696 };
6697 /**
6698 * Parse and execute the DLW command.
6699 *
6700 * Re-initialize windows based on the parsed bitmask.
6701 *
6702 * @param {Integer} i Current index in the 708 packet
6703 * @param {Service} service The service object to be affected
6704 * @return {Integer} New index after parsing
6705 */
6706
6707
6708 Cea708Stream.prototype.deleteWindows = function (i, service) {
6709 var packetData = this.current708Packet.data;
6710 var b = packetData[++i];
6711 var pts = this.getPts(i);
6712 this.flushDisplayed(pts, service);
6713
6714 for (var winId = 0; winId < 8; winId++) {
6715 if (b & 0x01 << winId) {
6716 service.windows[winId].reset();
6717 }
6718 }
6719
6720 return i;
6721 };
6722 /**
6723 * Parse and execute the SPA command.
6724 *
6725 * Set pen attributes of the current window.
6726 *
6727 * @param {Integer} i Current index in the 708 packet
6728 * @param {Service} service The service object to be affected
6729 * @return {Integer} New index after parsing
6730 */
6731
6732
6733 Cea708Stream.prototype.setPenAttributes = function (i, service) {
6734 var packetData = this.current708Packet.data;
6735 var b = packetData[i];
6736 var penAttr = service.currentWindow.penAttr;
6737 b = packetData[++i];
6738 penAttr.textTag = (b & 0xf0) >> 4; // tt
6739
6740 penAttr.offset = (b & 0x0c) >> 2; // o
6741
6742 penAttr.penSize = b & 0x03; // s
6743
6744 b = packetData[++i];
6745 penAttr.italics = (b & 0x80) >> 7; // i
6746
6747 penAttr.underline = (b & 0x40) >> 6; // u
6748
6749 penAttr.edgeType = (b & 0x38) >> 3; // et
6750
6751 penAttr.fontStyle = b & 0x07; // fs
6752
6753 return i;
6754 };
6755 /**
6756 * Parse and execute the SPC command.
6757 *
6758 * Set pen color of the current window.
6759 *
6760 * @param {Integer} i Current index in the 708 packet
6761 * @param {Service} service The service object to be affected
6762 * @return {Integer} New index after parsing
6763 */
6764
6765
6766 Cea708Stream.prototype.setPenColor = function (i, service) {
6767 var packetData = this.current708Packet.data;
6768 var b = packetData[i];
6769 var penColor = service.currentWindow.penColor;
6770 b = packetData[++i];
6771 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
6772
6773 penColor.fgRed = (b & 0x30) >> 4; // fr
6774
6775 penColor.fgGreen = (b & 0x0c) >> 2; // fg
6776
6777 penColor.fgBlue = b & 0x03; // fb
6778
6779 b = packetData[++i];
6780 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
6781
6782 penColor.bgRed = (b & 0x30) >> 4; // br
6783
6784 penColor.bgGreen = (b & 0x0c) >> 2; // bg
6785
6786 penColor.bgBlue = b & 0x03; // bb
6787
6788 b = packetData[++i];
6789 penColor.edgeRed = (b & 0x30) >> 4; // er
6790
6791 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
6792
6793 penColor.edgeBlue = b & 0x03; // eb
6794
6795 return i;
6796 };
6797 /**
6798 * Parse and execute the SPL command.
6799 *
6800 * Set pen location of the current window.
6801 *
6802 * @param {Integer} i Current index in the 708 packet
6803 * @param {Service} service The service object to be affected
6804 * @return {Integer} New index after parsing
6805 */
6806
6807
6808 Cea708Stream.prototype.setPenLocation = function (i, service) {
6809 var packetData = this.current708Packet.data;
6810 var b = packetData[i];
6811 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
6812
6813 service.currentWindow.pendingNewLine = true;
6814 b = packetData[++i];
6815 penLoc.row = b & 0x0f; // r
6816
6817 b = packetData[++i];
6818 penLoc.column = b & 0x3f; // c
6819
6820 return i;
6821 };
6822 /**
6823 * Execute the RST command.
6824 *
6825 * Reset service to a clean slate. Re-initialize.
6826 *
6827 * @param {Integer} i Current index in the 708 packet
6828 * @param {Service} service The service object to be affected
6829 * @return {Service} Re-initialized service
6830 */
6831
6832
6833 Cea708Stream.prototype.reset = function (i, service) {
6834 var pts = this.getPts(i);
6835 this.flushDisplayed(pts, service);
6836 return this.initService(service.serviceNum, i);
6837 }; // This hash maps non-ASCII, special, and extended character codes to their
6838 // proper Unicode equivalent. The first keys that are only a single byte
6839 // are the non-standard ASCII characters, which simply map the CEA608 byte
6840 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6841 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6842 // can be performed regardless of the field and data channel on which the
6843 // character code was received.
6844
6845
6846 var CHARACTER_TRANSLATION = {
6847 0x2a: 0xe1,
6848 // á
6849 0x5c: 0xe9,
6850 // é
6851 0x5e: 0xed,
6852 // í
6853 0x5f: 0xf3,
6854 // ó
6855 0x60: 0xfa,
6856 // ú
6857 0x7b: 0xe7,
6858 // ç
6859 0x7c: 0xf7,
6860 // ÷
6861 0x7d: 0xd1,
6862 // Ñ
6863 0x7e: 0xf1,
6864 // ñ
6865 0x7f: 0x2588,
6866 // █
6867 0x0130: 0xae,
6868 // ®
6869 0x0131: 0xb0,
6870 // °
6871 0x0132: 0xbd,
6872 // ½
6873 0x0133: 0xbf,
6874 // ¿
6875 0x0134: 0x2122,
6876 // ™
6877 0x0135: 0xa2,
6878 // ¢
6879 0x0136: 0xa3,
6880 // £
6881 0x0137: 0x266a,
6882 // ♪
6883 0x0138: 0xe0,
6884 // à
6885 0x0139: 0xa0,
6886 //
6887 0x013a: 0xe8,
6888 // è
6889 0x013b: 0xe2,
6890 // â
6891 0x013c: 0xea,
6892 // ê
6893 0x013d: 0xee,
6894 // î
6895 0x013e: 0xf4,
6896 // ô
6897 0x013f: 0xfb,
6898 // û
6899 0x0220: 0xc1,
6900 // Á
6901 0x0221: 0xc9,
6902 // É
6903 0x0222: 0xd3,
6904 // Ó
6905 0x0223: 0xda,
6906 // Ú
6907 0x0224: 0xdc,
6908 // Ü
6909 0x0225: 0xfc,
6910 // ü
6911 0x0226: 0x2018,
6912 // ‘
6913 0x0227: 0xa1,
6914 // ¡
6915 0x0228: 0x2a,
6916 // *
6917 0x0229: 0x27,
6918 // '
6919 0x022a: 0x2014,
6920 // —
6921 0x022b: 0xa9,
6922 // ©
6923 0x022c: 0x2120,
6924 // ℠
6925 0x022d: 0x2022,
6926 // •
6927 0x022e: 0x201c,
6928 // “
6929 0x022f: 0x201d,
6930 // ”
6931 0x0230: 0xc0,
6932 // À
6933 0x0231: 0xc2,
6934 // Â
6935 0x0232: 0xc7,
6936 // Ç
6937 0x0233: 0xc8,
6938 // È
6939 0x0234: 0xca,
6940 // Ê
6941 0x0235: 0xcb,
6942 // Ë
6943 0x0236: 0xeb,
6944 // ë
6945 0x0237: 0xce,
6946 // Î
6947 0x0238: 0xcf,
6948 // Ï
6949 0x0239: 0xef,
6950 // ï
6951 0x023a: 0xd4,
6952 // Ô
6953 0x023b: 0xd9,
6954 // Ù
6955 0x023c: 0xf9,
6956 // ù
6957 0x023d: 0xdb,
6958 // Û
6959 0x023e: 0xab,
6960 // «
6961 0x023f: 0xbb,
6962 // »
6963 0x0320: 0xc3,
6964 // Ã
6965 0x0321: 0xe3,
6966 // ã
6967 0x0322: 0xcd,
6968 // Í
6969 0x0323: 0xcc,
6970 // Ì
6971 0x0324: 0xec,
6972 // ì
6973 0x0325: 0xd2,
6974 // Ò
6975 0x0326: 0xf2,
6976 // ò
6977 0x0327: 0xd5,
6978 // Õ
6979 0x0328: 0xf5,
6980 // õ
6981 0x0329: 0x7b,
6982 // {
6983 0x032a: 0x7d,
6984 // }
6985 0x032b: 0x5c,
6986 // \
6987 0x032c: 0x5e,
6988 // ^
6989 0x032d: 0x5f,
6990 // _
6991 0x032e: 0x7c,
6992 // |
6993 0x032f: 0x7e,
6994 // ~
6995 0x0330: 0xc4,
6996 // Ä
6997 0x0331: 0xe4,
6998 // ä
6999 0x0332: 0xd6,
7000 // Ö
7001 0x0333: 0xf6,
7002 // ö
7003 0x0334: 0xdf,
7004 // ß
7005 0x0335: 0xa5,
7006 // ¥
7007 0x0336: 0xa4,
7008 // ¤
7009 0x0337: 0x2502,
7010 // │
7011 0x0338: 0xc5,
7012 // Å
7013 0x0339: 0xe5,
7014 // å
7015 0x033a: 0xd8,
7016 // Ø
7017 0x033b: 0xf8,
7018 // ø
7019 0x033c: 0x250c,
7020 // ┌
7021 0x033d: 0x2510,
7022 // ┐
7023 0x033e: 0x2514,
7024 // └
7025 0x033f: 0x2518 // ┘
7026
7027 };
7028
7029 var getCharFromCode = function getCharFromCode(code) {
7030 if (code === null) {
7031 return '';
7032 }
7033
7034 code = CHARACTER_TRANSLATION[code] || code;
7035 return String.fromCharCode(code);
7036 }; // the index of the last row in a CEA-608 display buffer
7037
7038
7039 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
7040 // getting it through bit logic.
7041
7042 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
7043 // cells. The "bottom" row is the last element in the outer array.
7044
7045 var createDisplayBuffer = function createDisplayBuffer() {
7046 var result = [],
7047 i = BOTTOM_ROW + 1;
7048
7049 while (i--) {
7050 result.push('');
7051 }
7052
7053 return result;
7054 };
7055
7056 var Cea608Stream = function Cea608Stream(field, dataChannel) {
7057 Cea608Stream.prototype.init.call(this);
7058 this.field_ = field || 0;
7059 this.dataChannel_ = dataChannel || 0;
7060 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
7061 this.setConstants();
7062 this.reset();
7063
7064 this.push = function (packet) {
7065 var data, swap, char0, char1, text; // remove the parity bits
7066
7067 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
7068
7069 if (data === this.lastControlCode_) {
7070 this.lastControlCode_ = null;
7071 return;
7072 } // Store control codes
7073
7074
7075 if ((data & 0xf000) === 0x1000) {
7076 this.lastControlCode_ = data;
7077 } else if (data !== this.PADDING_) {
7078 this.lastControlCode_ = null;
7079 }
7080
7081 char0 = data >>> 8;
7082 char1 = data & 0xff;
7083
7084 if (data === this.PADDING_) {
7085 return;
7086 } else if (data === this.RESUME_CAPTION_LOADING_) {
7087 this.mode_ = 'popOn';
7088 } else if (data === this.END_OF_CAPTION_) {
7089 // If an EOC is received while in paint-on mode, the displayed caption
7090 // text should be swapped to non-displayed memory as if it was a pop-on
7091 // caption. Because of that, we should explicitly switch back to pop-on
7092 // mode
7093 this.mode_ = 'popOn';
7094 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
7095
7096 this.flushDisplayed(packet.pts); // flip memory
7097
7098 swap = this.displayed_;
7099 this.displayed_ = this.nonDisplayed_;
7100 this.nonDisplayed_ = swap; // start measuring the time to display the caption
7101
7102 this.startPts_ = packet.pts;
7103 } else if (data === this.ROLL_UP_2_ROWS_) {
7104 this.rollUpRows_ = 2;
7105 this.setRollUp(packet.pts);
7106 } else if (data === this.ROLL_UP_3_ROWS_) {
7107 this.rollUpRows_ = 3;
7108 this.setRollUp(packet.pts);
7109 } else if (data === this.ROLL_UP_4_ROWS_) {
7110 this.rollUpRows_ = 4;
7111 this.setRollUp(packet.pts);
7112 } else if (data === this.CARRIAGE_RETURN_) {
7113 this.clearFormatting(packet.pts);
7114 this.flushDisplayed(packet.pts);
7115 this.shiftRowsUp_();
7116 this.startPts_ = packet.pts;
7117 } else if (data === this.BACKSPACE_) {
7118 if (this.mode_ === 'popOn') {
7119 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7120 } else {
7121 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7122 }
7123 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
7124 this.flushDisplayed(packet.pts);
7125 this.displayed_ = createDisplayBuffer();
7126 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
7127 this.nonDisplayed_ = createDisplayBuffer();
7128 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
7129 if (this.mode_ !== 'paintOn') {
7130 // NOTE: This should be removed when proper caption positioning is
7131 // implemented
7132 this.flushDisplayed(packet.pts);
7133 this.displayed_ = createDisplayBuffer();
7134 }
7135
7136 this.mode_ = 'paintOn';
7137 this.startPts_ = packet.pts; // Append special characters to caption text
7138 } else if (this.isSpecialCharacter(char0, char1)) {
7139 // Bitmask char0 so that we can apply character transformations
7140 // regardless of field and data channel.
7141 // Then byte-shift to the left and OR with char1 so we can pass the
7142 // entire character code to `getCharFromCode`.
7143 char0 = (char0 & 0x03) << 8;
7144 text = getCharFromCode(char0 | char1);
7145 this[this.mode_](packet.pts, text);
7146 this.column_++; // Append extended characters to caption text
7147 } else if (this.isExtCharacter(char0, char1)) {
7148 // Extended characters always follow their "non-extended" equivalents.
7149 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
7150 // decoders are supposed to drop the "è", while compliant decoders
7151 // backspace the "e" and insert "è".
7152 // Delete the previous character
7153 if (this.mode_ === 'popOn') {
7154 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7155 } else {
7156 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7157 } // Bitmask char0 so that we can apply character transformations
7158 // regardless of field and data channel.
7159 // Then byte-shift to the left and OR with char1 so we can pass the
7160 // entire character code to `getCharFromCode`.
7161
7162
7163 char0 = (char0 & 0x03) << 8;
7164 text = getCharFromCode(char0 | char1);
7165 this[this.mode_](packet.pts, text);
7166 this.column_++; // Process mid-row codes
7167 } else if (this.isMidRowCode(char0, char1)) {
7168 // Attributes are not additive, so clear all formatting
7169 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
7170 // should be replaced with spaces, so add one now
7171
7172 this[this.mode_](packet.pts, ' ');
7173 this.column_++;
7174
7175 if ((char1 & 0xe) === 0xe) {
7176 this.addFormatting(packet.pts, ['i']);
7177 }
7178
7179 if ((char1 & 0x1) === 0x1) {
7180 this.addFormatting(packet.pts, ['u']);
7181 } // Detect offset control codes and adjust cursor
7182
7183 } else if (this.isOffsetControlCode(char0, char1)) {
7184 // Cursor position is set by indent PAC (see below) in 4-column
7185 // increments, with an additional offset code of 1-3 to reach any
7186 // of the 32 columns specified by CEA-608. So all we need to do
7187 // here is increment the column cursor by the given offset.
7188 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
7189 } else if (this.isPAC(char0, char1)) {
7190 // There's no logic for PAC -> row mapping, so we have to just
7191 // find the row code in an array and use its index :(
7192 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
7193
7194 if (this.mode_ === 'rollUp') {
7195 // This implies that the base row is incorrectly set.
7196 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
7197 // of roll-up rows set.
7198 if (row - this.rollUpRows_ + 1 < 0) {
7199 row = this.rollUpRows_ - 1;
7200 }
7201
7202 this.setRollUp(packet.pts, row);
7203 }
7204
7205 if (row !== this.row_) {
7206 // formatting is only persistent for current row
7207 this.clearFormatting(packet.pts);
7208 this.row_ = row;
7209 } // All PACs can apply underline, so detect and apply
7210 // (All odd-numbered second bytes set underline)
7211
7212
7213 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
7214 this.addFormatting(packet.pts, ['u']);
7215 }
7216
7217 if ((data & 0x10) === 0x10) {
7218 // We've got an indent level code. Each successive even number
7219 // increments the column cursor by 4, so we can get the desired
7220 // column position by bit-shifting to the right (to get n/2)
7221 // and multiplying by 4.
7222 this.column_ = ((data & 0xe) >> 1) * 4;
7223 }
7224
7225 if (this.isColorPAC(char1)) {
7226 // it's a color code, though we only support white, which
7227 // can be either normal or italicized. white italics can be
7228 // either 0x4e or 0x6e depending on the row, so we just
7229 // bitwise-and with 0xe to see if italics should be turned on
7230 if ((char1 & 0xe) === 0xe) {
7231 this.addFormatting(packet.pts, ['i']);
7232 }
7233 } // We have a normal character in char0, and possibly one in char1
7234
7235 } else if (this.isNormalChar(char0)) {
7236 if (char1 === 0x00) {
7237 char1 = null;
7238 }
7239
7240 text = getCharFromCode(char0);
7241 text += getCharFromCode(char1);
7242 this[this.mode_](packet.pts, text);
7243 this.column_ += text.length;
7244 } // finish data processing
7245
7246 };
7247 };
7248
7249 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
7250 // display buffer
7251
7252 Cea608Stream.prototype.flushDisplayed = function (pts) {
7253 var content = this.displayed_ // remove spaces from the start and end of the string
7254 .map(function (row, index) {
7255 try {
7256 return row.trim();
7257 } catch (e) {
7258 // Ordinarily, this shouldn't happen. However, caption
7259 // parsing errors should not throw exceptions and
7260 // break playback.
7261 this.trigger('log', {
7262 level: 'warn',
7263 message: 'Skipping a malformed 608 caption at index ' + index + '.'
7264 });
7265 return '';
7266 }
7267 }, this) // combine all text rows to display in one cue
7268 .join('\n') // and remove blank rows from the start and end, but not the middle
7269 .replace(/^\n+|\n+$/g, '');
7270
7271 if (content.length) {
7272 this.trigger('data', {
7273 startPts: this.startPts_,
7274 endPts: pts,
7275 text: content,
7276 stream: this.name_
7277 });
7278 }
7279 };
7280 /**
7281 * Zero out the data, used for startup and on seek
7282 */
7283
7284
7285 Cea608Stream.prototype.reset = function () {
7286 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
7287 // actually display captions. If a caption is shifted to a row
7288 // with a lower index than this, it is cleared from the display
7289 // buffer
7290
7291 this.topRow_ = 0;
7292 this.startPts_ = 0;
7293 this.displayed_ = createDisplayBuffer();
7294 this.nonDisplayed_ = createDisplayBuffer();
7295 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
7296
7297 this.column_ = 0;
7298 this.row_ = BOTTOM_ROW;
7299 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
7300
7301 this.formatting_ = [];
7302 };
7303 /**
7304 * Sets up control code and related constants for this instance
7305 */
7306
7307
7308 Cea608Stream.prototype.setConstants = function () {
7309 // The following attributes have these uses:
7310 // ext_ : char0 for mid-row codes, and the base for extended
7311 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
7312 // extended codes)
7313 // control_: char0 for control codes, except byte-shifted to the
7314 // left so that we can do this.control_ | CONTROL_CODE
7315 // offset_: char0 for tab offset codes
7316 //
7317 // It's also worth noting that control codes, and _only_ control codes,
7318 // differ between field 1 and field2. Field 2 control codes are always
7319 // their field 1 value plus 1. That's why there's the "| field" on the
7320 // control value.
7321 if (this.dataChannel_ === 0) {
7322 this.BASE_ = 0x10;
7323 this.EXT_ = 0x11;
7324 this.CONTROL_ = (0x14 | this.field_) << 8;
7325 this.OFFSET_ = 0x17;
7326 } else if (this.dataChannel_ === 1) {
7327 this.BASE_ = 0x18;
7328 this.EXT_ = 0x19;
7329 this.CONTROL_ = (0x1c | this.field_) << 8;
7330 this.OFFSET_ = 0x1f;
7331 } // Constants for the LSByte command codes recognized by Cea608Stream. This
7332 // list is not exhaustive. For a more comprehensive listing and semantics see
7333 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
7334 // Padding
7335
7336
7337 this.PADDING_ = 0x0000; // Pop-on Mode
7338
7339 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
7340 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
7341
7342 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
7343 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
7344 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
7345 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
7346
7347 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
7348
7349 this.BACKSPACE_ = this.CONTROL_ | 0x21;
7350 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
7351 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
7352 };
7353 /**
7354 * Detects if the 2-byte packet data is a special character
7355 *
7356 * Special characters have a second byte in the range 0x30 to 0x3f,
7357 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
7358 * data channel 2).
7359 *
7360 * @param {Integer} char0 The first byte
7361 * @param {Integer} char1 The second byte
7362 * @return {Boolean} Whether the 2 bytes are an special character
7363 */
7364
7365
7366 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
7367 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
7368 };
7369 /**
7370 * Detects if the 2-byte packet data is an extended character
7371 *
7372 * Extended characters have a second byte in the range 0x20 to 0x3f,
7373 * with the first byte being 0x12 or 0x13 (for data channel 1) or
7374 * 0x1a or 0x1b (for data channel 2).
7375 *
7376 * @param {Integer} char0 The first byte
7377 * @param {Integer} char1 The second byte
7378 * @return {Boolean} Whether the 2 bytes are an extended character
7379 */
7380
7381
7382 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
7383 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
7384 };
7385 /**
7386 * Detects if the 2-byte packet is a mid-row code
7387 *
7388 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
7389 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
7390 * channel 2).
7391 *
7392 * @param {Integer} char0 The first byte
7393 * @param {Integer} char1 The second byte
7394 * @return {Boolean} Whether the 2 bytes are a mid-row code
7395 */
7396
7397
7398 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
7399 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
7400 };
7401 /**
7402 * Detects if the 2-byte packet is an offset control code
7403 *
7404 * Offset control codes have a second byte in the range 0x21 to 0x23,
7405 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
7406 * data channel 2).
7407 *
7408 * @param {Integer} char0 The first byte
7409 * @param {Integer} char1 The second byte
7410 * @return {Boolean} Whether the 2 bytes are an offset control code
7411 */
7412
7413
7414 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
7415 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
7416 };
7417 /**
7418 * Detects if the 2-byte packet is a Preamble Address Code
7419 *
7420 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
7421 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
7422 * range 0x40 to 0x7f.
7423 *
7424 * @param {Integer} char0 The first byte
7425 * @param {Integer} char1 The second byte
7426 * @return {Boolean} Whether the 2 bytes are a PAC
7427 */
7428
7429
7430 Cea608Stream.prototype.isPAC = function (char0, char1) {
7431 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
7432 };
7433 /**
7434 * Detects if a packet's second byte is in the range of a PAC color code
7435 *
7436 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
7437 * 0x60 to 0x6f.
7438 *
7439 * @param {Integer} char1 The second byte
7440 * @return {Boolean} Whether the byte is a color PAC
7441 */
7442
7443
7444 Cea608Stream.prototype.isColorPAC = function (char1) {
7445 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
7446 };
7447 /**
7448 * Detects if a single byte is in the range of a normal character
7449 *
7450 * Normal text bytes are in the range 0x20 to 0x7f.
7451 *
7452 * @param {Integer} char The byte
7453 * @return {Boolean} Whether the byte is a normal character
7454 */
7455
7456
7457 Cea608Stream.prototype.isNormalChar = function (char) {
7458 return char >= 0x20 && char <= 0x7f;
7459 };
7460 /**
7461 * Configures roll-up
7462 *
7463 * @param {Integer} pts Current PTS
7464 * @param {Integer} newBaseRow Used by PACs to slide the current window to
7465 * a new position
7466 */
7467
7468
7469 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
7470 // Reset the base row to the bottom row when switching modes
7471 if (this.mode_ !== 'rollUp') {
7472 this.row_ = BOTTOM_ROW;
7473 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
7474
7475 this.flushDisplayed(pts);
7476 this.nonDisplayed_ = createDisplayBuffer();
7477 this.displayed_ = createDisplayBuffer();
7478 }
7479
7480 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
7481 // move currently displayed captions (up or down) to the new base row
7482 for (var i = 0; i < this.rollUpRows_; i++) {
7483 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
7484 this.displayed_[this.row_ - i] = '';
7485 }
7486 }
7487
7488 if (newBaseRow === undefined) {
7489 newBaseRow = this.row_;
7490 }
7491
7492 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
7493 }; // Adds the opening HTML tag for the passed character to the caption text,
7494 // and keeps track of it for later closing
7495
7496
7497 Cea608Stream.prototype.addFormatting = function (pts, format) {
7498 this.formatting_ = this.formatting_.concat(format);
7499 var text = format.reduce(function (text, format) {
7500 return text + '<' + format + '>';
7501 }, '');
7502 this[this.mode_](pts, text);
7503 }; // Adds HTML closing tags for current formatting to caption text and
7504 // clears remembered formatting
7505
7506
7507 Cea608Stream.prototype.clearFormatting = function (pts) {
7508 if (!this.formatting_.length) {
7509 return;
7510 }
7511
7512 var text = this.formatting_.reverse().reduce(function (text, format) {
7513 return text + '</' + format + '>';
7514 }, '');
7515 this.formatting_ = [];
7516 this[this.mode_](pts, text);
7517 }; // Mode Implementations
7518
7519
7520 Cea608Stream.prototype.popOn = function (pts, text) {
7521 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
7522
7523 baseRow += text;
7524 this.nonDisplayed_[this.row_] = baseRow;
7525 };
7526
7527 Cea608Stream.prototype.rollUp = function (pts, text) {
7528 var baseRow = this.displayed_[this.row_];
7529 baseRow += text;
7530 this.displayed_[this.row_] = baseRow;
7531 };
7532
7533 Cea608Stream.prototype.shiftRowsUp_ = function () {
7534 var i; // clear out inactive rows
7535
7536 for (i = 0; i < this.topRow_; i++) {
7537 this.displayed_[i] = '';
7538 }
7539
7540 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7541 this.displayed_[i] = '';
7542 } // shift displayed rows up
7543
7544
7545 for (i = this.topRow_; i < this.row_; i++) {
7546 this.displayed_[i] = this.displayed_[i + 1];
7547 } // clear out the bottom row
7548
7549
7550 this.displayed_[this.row_] = '';
7551 };
7552
7553 Cea608Stream.prototype.paintOn = function (pts, text) {
7554 var baseRow = this.displayed_[this.row_];
7555 baseRow += text;
7556 this.displayed_[this.row_] = baseRow;
7557 }; // exports
7558
7559
7560 var captionStream = {
7561 CaptionStream: CaptionStream$1,
7562 Cea608Stream: Cea608Stream,
7563 Cea708Stream: Cea708Stream
7564 };
7565 /**
7566 * mux.js
7567 *
7568 * Copyright (c) Brightcove
7569 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7570 */
7571
7572 var streamTypes = {
7573 H264_STREAM_TYPE: 0x1B,
7574 ADTS_STREAM_TYPE: 0x0F,
7575 METADATA_STREAM_TYPE: 0x15
7576 };
7577 var MAX_TS = 8589934592;
7578 var RO_THRESH = 4294967296;
7579 var TYPE_SHARED = 'shared';
7580
7581 var handleRollover$1 = function handleRollover(value, reference) {
7582 var direction = 1;
7583
7584 if (value > reference) {
7585 // If the current timestamp value is greater than our reference timestamp and we detect a
7586 // timestamp rollover, this means the roll over is happening in the opposite direction.
7587 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7588 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7589 // rollover point. In loading this segment, the timestamp values will be very large,
7590 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7591 // the time stamp to be `value - 2^33`.
7592 direction = -1;
7593 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7594 // cause an incorrect adjustment.
7595
7596
7597 while (Math.abs(reference - value) > RO_THRESH) {
7598 value += direction * MAX_TS;
7599 }
7600
7601 return value;
7602 };
7603
7604 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
7605 var lastDTS, referenceDTS;
7606 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
7607 // video and audio. We could use `undefined` here, but having a string
7608 // makes debugging a little clearer.
7609
7610 this.type_ = type || TYPE_SHARED;
7611
7612 this.push = function (data) {
7613 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7614 // streams will only accept data that matches their type.
7615 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7616 return;
7617 }
7618
7619 if (referenceDTS === undefined) {
7620 referenceDTS = data.dts;
7621 }
7622
7623 data.dts = handleRollover$1(data.dts, referenceDTS);
7624 data.pts = handleRollover$1(data.pts, referenceDTS);
7625 lastDTS = data.dts;
7626 this.trigger('data', data);
7627 };
7628
7629 this.flush = function () {
7630 referenceDTS = lastDTS;
7631 this.trigger('done');
7632 };
7633
7634 this.endTimeline = function () {
7635 this.flush();
7636 this.trigger('endedtimeline');
7637 };
7638
7639 this.discontinuity = function () {
7640 referenceDTS = void 0;
7641 lastDTS = void 0;
7642 };
7643
7644 this.reset = function () {
7645 this.discontinuity();
7646 this.trigger('reset');
7647 };
7648 };
7649
7650 TimestampRolloverStream$1.prototype = new stream();
7651 var timestampRolloverStream = {
7652 TimestampRolloverStream: TimestampRolloverStream$1,
7653 handleRollover: handleRollover$1
7654 };
7655
7656 var percentEncode$1 = function percentEncode(bytes, start, end) {
7657 var i,
7658 result = '';
7659
7660 for (i = start; i < end; i++) {
7661 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7662 }
7663
7664 return result;
7665 },
7666 // return the string representation of the specified byte range,
7667 // interpreted as UTf-8.
7668 parseUtf8 = function parseUtf8(bytes, start, end) {
7669 return decodeURIComponent(percentEncode$1(bytes, start, end));
7670 },
7671 // return the string representation of the specified byte range,
7672 // interpreted as ISO-8859-1.
7673 parseIso88591$1 = function parseIso88591(bytes, start, end) {
7674 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
7675 },
7676 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
7677 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7678 },
7679 tagParsers = {
7680 TXXX: function TXXX(tag) {
7681 var i;
7682
7683 if (tag.data[0] !== 3) {
7684 // ignore frames with unrecognized character encodings
7685 return;
7686 }
7687
7688 for (i = 1; i < tag.data.length; i++) {
7689 if (tag.data[i] === 0) {
7690 // parse the text fields
7691 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
7692
7693 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7694 break;
7695 }
7696 }
7697
7698 tag.data = tag.value;
7699 },
7700 WXXX: function WXXX(tag) {
7701 var i;
7702
7703 if (tag.data[0] !== 3) {
7704 // ignore frames with unrecognized character encodings
7705 return;
7706 }
7707
7708 for (i = 1; i < tag.data.length; i++) {
7709 if (tag.data[i] === 0) {
7710 // parse the description and URL fields
7711 tag.description = parseUtf8(tag.data, 1, i);
7712 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7713 break;
7714 }
7715 }
7716 },
7717 PRIV: function PRIV(tag) {
7718 var i;
7719
7720 for (i = 0; i < tag.data.length; i++) {
7721 if (tag.data[i] === 0) {
7722 // parse the description and URL fields
7723 tag.owner = parseIso88591$1(tag.data, 0, i);
7724 break;
7725 }
7726 }
7727
7728 tag.privateData = tag.data.subarray(i + 1);
7729 tag.data = tag.privateData;
7730 }
7731 },
7732 _MetadataStream;
7733
7734 _MetadataStream = function MetadataStream(options) {
7735 var settings = {
7736 // the bytes of the program-level descriptor field in MP2T
7737 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7738 // program element descriptors"
7739 descriptor: options && options.descriptor
7740 },
7741 // the total size in bytes of the ID3 tag being parsed
7742 tagSize = 0,
7743 // tag data that is not complete enough to be parsed
7744 buffer = [],
7745 // the total number of bytes currently in the buffer
7746 bufferSize = 0,
7747 i;
7748
7749 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
7750 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7751
7752
7753 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7754
7755 if (settings.descriptor) {
7756 for (i = 0; i < settings.descriptor.length; i++) {
7757 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7758 }
7759 }
7760
7761 this.push = function (chunk) {
7762 var tag, frameStart, frameSize, frame, i, frameHeader;
7763
7764 if (chunk.type !== 'timed-metadata') {
7765 return;
7766 } // if data_alignment_indicator is set in the PES header,
7767 // we must have the start of a new ID3 tag. Assume anything
7768 // remaining in the buffer was malformed and throw it out
7769
7770
7771 if (chunk.dataAlignmentIndicator) {
7772 bufferSize = 0;
7773 buffer.length = 0;
7774 } // ignore events that don't look like ID3 data
7775
7776
7777 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7778 this.trigger('log', {
7779 level: 'warn',
7780 message: 'Skipping unrecognized metadata packet'
7781 });
7782 return;
7783 } // add this chunk to the data we've collected so far
7784
7785
7786 buffer.push(chunk);
7787 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
7788
7789 if (buffer.length === 1) {
7790 // the frame size is transmitted as a 28-bit integer in the
7791 // last four bytes of the ID3 header.
7792 // The most significant bit of each byte is dropped and the
7793 // results concatenated to recover the actual value.
7794 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
7795 // convenient for our comparisons to include it
7796
7797 tagSize += 10;
7798 } // if the entire frame has not arrived, wait for more data
7799
7800
7801 if (bufferSize < tagSize) {
7802 return;
7803 } // collect the entire frame so it can be parsed
7804
7805
7806 tag = {
7807 data: new Uint8Array(tagSize),
7808 frames: [],
7809 pts: buffer[0].pts,
7810 dts: buffer[0].dts
7811 };
7812
7813 for (i = 0; i < tagSize;) {
7814 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7815 i += buffer[0].data.byteLength;
7816 bufferSize -= buffer[0].data.byteLength;
7817 buffer.shift();
7818 } // find the start of the first frame and the end of the tag
7819
7820
7821 frameStart = 10;
7822
7823 if (tag.data[5] & 0x40) {
7824 // advance the frame start past the extended header
7825 frameStart += 4; // header size field
7826
7827 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
7828
7829 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
7830 } // parse one or more ID3 frames
7831 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7832
7833
7834 do {
7835 // determine the number of bytes in this frame
7836 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
7837
7838 if (frameSize < 1) {
7839 this.trigger('log', {
7840 level: 'warn',
7841 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
7842 });
7843 return;
7844 }
7845
7846 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7847 frame = {
7848 id: frameHeader,
7849 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7850 };
7851 frame.key = frame.id;
7852
7853 if (tagParsers[frame.id]) {
7854 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7855 // time for raw AAC data
7856
7857 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7858 var d = frame.data,
7859 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7860 size *= 4;
7861 size += d[7] & 0x03;
7862 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7863 // on the value of this frame
7864 // we couldn't have known the appropriate pts and dts before
7865 // parsing this ID3 tag so set those values now
7866
7867 if (tag.pts === undefined && tag.dts === undefined) {
7868 tag.pts = frame.timeStamp;
7869 tag.dts = frame.timeStamp;
7870 }
7871
7872 this.trigger('timestamp', frame);
7873 }
7874 }
7875
7876 tag.frames.push(frame);
7877 frameStart += 10; // advance past the frame header
7878
7879 frameStart += frameSize; // advance past the frame body
7880 } while (frameStart < tagSize);
7881
7882 this.trigger('data', tag);
7883 };
7884 };
7885
7886 _MetadataStream.prototype = new stream();
7887 var metadataStream = _MetadataStream;
7888 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
7889
7890 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7891
7892
7893 var MP2T_PACKET_LENGTH$1 = 188,
7894 // bytes
7895 SYNC_BYTE$1 = 0x47;
7896 /**
7897 * Splits an incoming stream of binary data into MPEG-2 Transport
7898 * Stream packets.
7899 */
7900
7901 _TransportPacketStream = function TransportPacketStream() {
7902 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
7903 bytesInBuffer = 0;
7904
7905 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7906
7907 /**
7908 * Split a stream of data into M2TS packets
7909 **/
7910
7911
7912 this.push = function (bytes) {
7913 var startIndex = 0,
7914 endIndex = MP2T_PACKET_LENGTH$1,
7915 everything; // If there are bytes remaining from the last segment, prepend them to the
7916 // bytes that were pushed in
7917
7918 if (bytesInBuffer) {
7919 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7920 everything.set(buffer.subarray(0, bytesInBuffer));
7921 everything.set(bytes, bytesInBuffer);
7922 bytesInBuffer = 0;
7923 } else {
7924 everything = bytes;
7925 } // While we have enough data for a packet
7926
7927
7928 while (endIndex < everything.byteLength) {
7929 // Look for a pair of start and end sync bytes in the data..
7930 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
7931 // We found a packet so emit it and jump one whole packet forward in
7932 // the stream
7933 this.trigger('data', everything.subarray(startIndex, endIndex));
7934 startIndex += MP2T_PACKET_LENGTH$1;
7935 endIndex += MP2T_PACKET_LENGTH$1;
7936 continue;
7937 } // If we get here, we have somehow become de-synchronized and we need to step
7938 // forward one byte at a time until we find a pair of sync bytes that denote
7939 // a packet
7940
7941
7942 startIndex++;
7943 endIndex++;
7944 } // If there was some data left over at the end of the segment that couldn't
7945 // possibly be a whole packet, keep it because it might be the start of a packet
7946 // that continues in the next segment
7947
7948
7949 if (startIndex < everything.byteLength) {
7950 buffer.set(everything.subarray(startIndex), 0);
7951 bytesInBuffer = everything.byteLength - startIndex;
7952 }
7953 };
7954 /**
7955 * Passes identified M2TS packets to the TransportParseStream to be parsed
7956 **/
7957
7958
7959 this.flush = function () {
7960 // If the buffer contains a whole packet when we are being flushed, emit it
7961 // and empty the buffer. Otherwise hold onto the data because it may be
7962 // important for decoding the next segment
7963 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
7964 this.trigger('data', buffer);
7965 bytesInBuffer = 0;
7966 }
7967
7968 this.trigger('done');
7969 };
7970
7971 this.endTimeline = function () {
7972 this.flush();
7973 this.trigger('endedtimeline');
7974 };
7975
7976 this.reset = function () {
7977 bytesInBuffer = 0;
7978 this.trigger('reset');
7979 };
7980 };
7981
7982 _TransportPacketStream.prototype = new stream();
7983 /**
7984 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7985 * forms of the individual transport stream packets.
7986 */
7987
7988 _TransportParseStream = function TransportParseStream() {
7989 var parsePsi, parsePat, parsePmt, self;
7990
7991 _TransportParseStream.prototype.init.call(this);
7992
7993 self = this;
7994 this.packetsWaitingForPmt = [];
7995 this.programMapTable = undefined;
7996
7997 parsePsi = function parsePsi(payload, psi) {
7998 var offset = 0; // PSI packets may be split into multiple sections and those
7999 // sections may be split into multiple packets. If a PSI
8000 // section starts in this packet, the payload_unit_start_indicator
8001 // will be true and the first byte of the payload will indicate
8002 // the offset from the current position to the start of the
8003 // section.
8004
8005 if (psi.payloadUnitStartIndicator) {
8006 offset += payload[offset] + 1;
8007 }
8008
8009 if (psi.type === 'pat') {
8010 parsePat(payload.subarray(offset), psi);
8011 } else {
8012 parsePmt(payload.subarray(offset), psi);
8013 }
8014 };
8015
8016 parsePat = function parsePat(payload, pat) {
8017 pat.section_number = payload[7]; // eslint-disable-line camelcase
8018
8019 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
8020 // skip the PSI header and parse the first PMT entry
8021
8022 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
8023 pat.pmtPid = self.pmtPid;
8024 };
8025 /**
8026 * Parse out the relevant fields of a Program Map Table (PMT).
8027 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
8028 * packet. The first byte in this array should be the table_id
8029 * field.
8030 * @param pmt {object} the object that should be decorated with
8031 * fields parsed from the PMT.
8032 */
8033
8034
8035 parsePmt = function parsePmt(payload, pmt) {
8036 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
8037 // take effect. We don't believe this should ever be the case
8038 // for HLS but we'll ignore "forward" PMT declarations if we see
8039 // them. Future PMT declarations have the current_next_indicator
8040 // set to zero.
8041
8042 if (!(payload[5] & 0x01)) {
8043 return;
8044 } // overwrite any existing program map table
8045
8046
8047 self.programMapTable = {
8048 video: null,
8049 audio: null,
8050 'timed-metadata': {}
8051 }; // the mapping table ends at the end of the current section
8052
8053 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
8054 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
8055 // long the program info descriptors are
8056
8057 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
8058
8059 offset = 12 + programInfoLength;
8060
8061 while (offset < tableEnd) {
8062 var streamType = payload[offset];
8063 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
8064 // TODO: should this be done for metadata too? for now maintain behavior of
8065 // multiple metadata streams
8066
8067 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
8068 self.programMapTable.video = pid;
8069 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
8070 self.programMapTable.audio = pid;
8071 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
8072 // map pid to stream type for metadata streams
8073 self.programMapTable['timed-metadata'][pid] = streamType;
8074 } // move to the next table entry
8075 // skip past the elementary stream descriptors, if present
8076
8077
8078 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
8079 } // record the map on the packet as well
8080
8081
8082 pmt.programMapTable = self.programMapTable;
8083 };
8084 /**
8085 * Deliver a new MP2T packet to the next stream in the pipeline.
8086 */
8087
8088
8089 this.push = function (packet) {
8090 var result = {},
8091 offset = 4;
8092 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
8093
8094 result.pid = packet[1] & 0x1f;
8095 result.pid <<= 8;
8096 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
8097 // fifth byte of the TS packet header. The adaptation field is
8098 // used to add stuffing to PES packets that don't fill a complete
8099 // TS packet, and to specify some forms of timing and control data
8100 // that we do not currently use.
8101
8102 if ((packet[3] & 0x30) >>> 4 > 0x01) {
8103 offset += packet[offset] + 1;
8104 } // parse the rest of the packet based on the type
8105
8106
8107 if (result.pid === 0) {
8108 result.type = 'pat';
8109 parsePsi(packet.subarray(offset), result);
8110 this.trigger('data', result);
8111 } else if (result.pid === this.pmtPid) {
8112 result.type = 'pmt';
8113 parsePsi(packet.subarray(offset), result);
8114 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
8115
8116 while (this.packetsWaitingForPmt.length) {
8117 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
8118 }
8119 } else if (this.programMapTable === undefined) {
8120 // When we have not seen a PMT yet, defer further processing of
8121 // PES packets until one has been parsed
8122 this.packetsWaitingForPmt.push([packet, offset, result]);
8123 } else {
8124 this.processPes_(packet, offset, result);
8125 }
8126 };
8127
8128 this.processPes_ = function (packet, offset, result) {
8129 // set the appropriate stream type
8130 if (result.pid === this.programMapTable.video) {
8131 result.streamType = streamTypes.H264_STREAM_TYPE;
8132 } else if (result.pid === this.programMapTable.audio) {
8133 result.streamType = streamTypes.ADTS_STREAM_TYPE;
8134 } else {
8135 // if not video or audio, it is timed-metadata or unknown
8136 // if unknown, streamType will be undefined
8137 result.streamType = this.programMapTable['timed-metadata'][result.pid];
8138 }
8139
8140 result.type = 'pes';
8141 result.data = packet.subarray(offset);
8142 this.trigger('data', result);
8143 };
8144 };
8145
8146 _TransportParseStream.prototype = new stream();
8147 _TransportParseStream.STREAM_TYPES = {
8148 h264: 0x1b,
8149 adts: 0x0f
8150 };
8151 /**
8152 * Reconsistutes program elementary stream (PES) packets from parsed
8153 * transport stream packets. That is, if you pipe an
8154 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
8155 * events will be events which capture the bytes for individual PES
8156 * packets plus relevant metadata that has been extracted from the
8157 * container.
8158 */
8159
8160 _ElementaryStream = function ElementaryStream() {
8161 var self = this,
8162 segmentHadPmt = false,
8163 // PES packet fragments
8164 video = {
8165 data: [],
8166 size: 0
8167 },
8168 audio = {
8169 data: [],
8170 size: 0
8171 },
8172 timedMetadata = {
8173 data: [],
8174 size: 0
8175 },
8176 programMapTable,
8177 parsePes = function parsePes(payload, pes) {
8178 var ptsDtsFlags;
8179 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
8180
8181 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
8182 // that are frame data that is continuing from the previous fragment. This
8183 // is to check that the pes data is the start of a new pes payload
8184
8185 if (startPrefix !== 1) {
8186 return;
8187 } // get the packet length, this will be 0 for video
8188
8189
8190 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
8191
8192 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
8193 // and a DTS value. Determine what combination of values is
8194 // available to work with.
8195
8196 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
8197 // performs all bitwise operations on 32-bit integers but javascript
8198 // supports a much greater range (52-bits) of integer using standard
8199 // mathematical operations.
8200 // We construct a 31-bit value using bitwise operators over the 31
8201 // most significant bits and then multiply by 4 (equal to a left-shift
8202 // of 2) before we add the final 2 least significant bits of the
8203 // timestamp (equal to an OR.)
8204
8205 if (ptsDtsFlags & 0xC0) {
8206 // the PTS and DTS are not written out directly. For information
8207 // on how they are encoded, see
8208 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
8209 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
8210 pes.pts *= 4; // Left shift by 2
8211
8212 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
8213
8214 pes.dts = pes.pts;
8215
8216 if (ptsDtsFlags & 0x40) {
8217 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
8218 pes.dts *= 4; // Left shift by 2
8219
8220 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
8221 }
8222 } // the data section starts immediately after the PES header.
8223 // pes_header_data_length specifies the number of header bytes
8224 // that follow the last byte of the field.
8225
8226
8227 pes.data = payload.subarray(9 + payload[8]);
8228 },
8229
8230 /**
8231 * Pass completely parsed PES packets to the next stream in the pipeline
8232 **/
8233 flushStream = function flushStream(stream, type, forceFlush) {
8234 var packetData = new Uint8Array(stream.size),
8235 event = {
8236 type: type
8237 },
8238 i = 0,
8239 offset = 0,
8240 packetFlushable = false,
8241 fragment; // do nothing if there is not enough buffered data for a complete
8242 // PES header
8243
8244 if (!stream.data.length || stream.size < 9) {
8245 return;
8246 }
8247
8248 event.trackId = stream.data[0].pid; // reassemble the packet
8249
8250 for (i = 0; i < stream.data.length; i++) {
8251 fragment = stream.data[i];
8252 packetData.set(fragment.data, offset);
8253 offset += fragment.data.byteLength;
8254 } // parse assembled packet's PES header
8255
8256
8257 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
8258 // check that there is enough stream data to fill the packet
8259
8260 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
8261
8262 if (forceFlush || packetFlushable) {
8263 stream.size = 0;
8264 stream.data.length = 0;
8265 } // only emit packets that are complete. this is to avoid assembling
8266 // incomplete PES packets due to poor segmentation
8267
8268
8269 if (packetFlushable) {
8270 self.trigger('data', event);
8271 }
8272 };
8273
8274 _ElementaryStream.prototype.init.call(this);
8275 /**
8276 * Identifies M2TS packet types and parses PES packets using metadata
8277 * parsed from the PMT
8278 **/
8279
8280
8281 this.push = function (data) {
8282 ({
8283 pat: function pat() {// we have to wait for the PMT to arrive as well before we
8284 // have any meaningful metadata
8285 },
8286 pes: function pes() {
8287 var stream, streamType;
8288
8289 switch (data.streamType) {
8290 case streamTypes.H264_STREAM_TYPE:
8291 stream = video;
8292 streamType = 'video';
8293 break;
8294
8295 case streamTypes.ADTS_STREAM_TYPE:
8296 stream = audio;
8297 streamType = 'audio';
8298 break;
8299
8300 case streamTypes.METADATA_STREAM_TYPE:
8301 stream = timedMetadata;
8302 streamType = 'timed-metadata';
8303 break;
8304
8305 default:
8306 // ignore unknown stream types
8307 return;
8308 } // if a new packet is starting, we can flush the completed
8309 // packet
8310
8311
8312 if (data.payloadUnitStartIndicator) {
8313 flushStream(stream, streamType, true);
8314 } // buffer this fragment until we are sure we've received the
8315 // complete payload
8316
8317
8318 stream.data.push(data);
8319 stream.size += data.data.byteLength;
8320 },
8321 pmt: function pmt() {
8322 var event = {
8323 type: 'metadata',
8324 tracks: []
8325 };
8326 programMapTable = data.programMapTable; // translate audio and video streams to tracks
8327
8328 if (programMapTable.video !== null) {
8329 event.tracks.push({
8330 timelineStartInfo: {
8331 baseMediaDecodeTime: 0
8332 },
8333 id: +programMapTable.video,
8334 codec: 'avc',
8335 type: 'video'
8336 });
8337 }
8338
8339 if (programMapTable.audio !== null) {
8340 event.tracks.push({
8341 timelineStartInfo: {
8342 baseMediaDecodeTime: 0
8343 },
8344 id: +programMapTable.audio,
8345 codec: 'adts',
8346 type: 'audio'
8347 });
8348 }
8349
8350 segmentHadPmt = true;
8351 self.trigger('data', event);
8352 }
8353 })[data.type]();
8354 };
8355
8356 this.reset = function () {
8357 video.size = 0;
8358 video.data.length = 0;
8359 audio.size = 0;
8360 audio.data.length = 0;
8361 this.trigger('reset');
8362 };
8363 /**
8364 * Flush any remaining input. Video PES packets may be of variable
8365 * length. Normally, the start of a new video packet can trigger the
8366 * finalization of the previous packet. That is not possible if no
8367 * more video is forthcoming, however. In that case, some other
8368 * mechanism (like the end of the file) has to be employed. When it is
8369 * clear that no additional data is forthcoming, calling this method
8370 * will flush the buffered packets.
8371 */
8372
8373
8374 this.flushStreams_ = function () {
8375 // !!THIS ORDER IS IMPORTANT!!
8376 // video first then audio
8377 flushStream(video, 'video');
8378 flushStream(audio, 'audio');
8379 flushStream(timedMetadata, 'timed-metadata');
8380 };
8381
8382 this.flush = function () {
8383 // if on flush we haven't had a pmt emitted
8384 // and we have a pmt to emit. emit the pmt
8385 // so that we trigger a trackinfo downstream.
8386 if (!segmentHadPmt && programMapTable) {
8387 var pmt = {
8388 type: 'metadata',
8389 tracks: []
8390 }; // translate audio and video streams to tracks
8391
8392 if (programMapTable.video !== null) {
8393 pmt.tracks.push({
8394 timelineStartInfo: {
8395 baseMediaDecodeTime: 0
8396 },
8397 id: +programMapTable.video,
8398 codec: 'avc',
8399 type: 'video'
8400 });
8401 }
8402
8403 if (programMapTable.audio !== null) {
8404 pmt.tracks.push({
8405 timelineStartInfo: {
8406 baseMediaDecodeTime: 0
8407 },
8408 id: +programMapTable.audio,
8409 codec: 'adts',
8410 type: 'audio'
8411 });
8412 }
8413
8414 self.trigger('data', pmt);
8415 }
8416
8417 segmentHadPmt = false;
8418 this.flushStreams_();
8419 this.trigger('done');
8420 };
8421 };
8422
8423 _ElementaryStream.prototype = new stream();
8424 var m2ts = {
8425 PAT_PID: 0x0000,
8426 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
8427 TransportPacketStream: _TransportPacketStream,
8428 TransportParseStream: _TransportParseStream,
8429 ElementaryStream: _ElementaryStream,
8430 TimestampRolloverStream: TimestampRolloverStream,
8431 CaptionStream: captionStream.CaptionStream,
8432 Cea608Stream: captionStream.Cea608Stream,
8433 Cea708Stream: captionStream.Cea708Stream,
8434 MetadataStream: metadataStream
8435 };
8436
8437 for (var type in streamTypes) {
8438 if (streamTypes.hasOwnProperty(type)) {
8439 m2ts[type] = streamTypes[type];
8440 }
8441 }
8442
8443 var m2ts_1 = m2ts;
8444 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
8445
8446 var _AdtsStream;
8447
8448 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8449 /*
8450 * Accepts a ElementaryStream and emits data events with parsed
8451 * AAC Audio Frames of the individual packets. Input audio in ADTS
8452 * format is unpacked and re-emitted as AAC frames.
8453 *
8454 * @see http://wiki.multimedia.cx/index.php?title=ADTS
8455 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
8456 */
8457
8458 _AdtsStream = function AdtsStream(handlePartialSegments) {
8459 var buffer,
8460 frameNum = 0;
8461
8462 _AdtsStream.prototype.init.call(this);
8463
8464 this.skipWarn_ = function (start, end) {
8465 this.trigger('log', {
8466 level: 'warn',
8467 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
8468 });
8469 };
8470
8471 this.push = function (packet) {
8472 var i = 0,
8473 frameLength,
8474 protectionSkipBytes,
8475 oldBuffer,
8476 sampleCount,
8477 adtsFrameDuration;
8478
8479 if (!handlePartialSegments) {
8480 frameNum = 0;
8481 }
8482
8483 if (packet.type !== 'audio') {
8484 // ignore non-audio data
8485 return;
8486 } // Prepend any data in the buffer to the input data so that we can parse
8487 // aac frames the cross a PES packet boundary
8488
8489
8490 if (buffer && buffer.length) {
8491 oldBuffer = buffer;
8492 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
8493 buffer.set(oldBuffer);
8494 buffer.set(packet.data, oldBuffer.byteLength);
8495 } else {
8496 buffer = packet.data;
8497 } // unpack any ADTS frames which have been fully received
8498 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
8499
8500
8501 var skip; // We use i + 7 here because we want to be able to parse the entire header.
8502 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
8503
8504 while (i + 7 < buffer.length) {
8505 // Look for the start of an ADTS header..
8506 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
8507 if (typeof skip !== 'number') {
8508 skip = i;
8509 } // If a valid header was not found, jump one forward and attempt to
8510 // find a valid ADTS header starting at the next byte
8511
8512
8513 i++;
8514 continue;
8515 }
8516
8517 if (typeof skip === 'number') {
8518 this.skipWarn_(skip, i);
8519 skip = null;
8520 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
8521 // end of the ADTS header
8522
8523
8524 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
8525 // end of the sync sequence
8526 // NOTE: frame length includes the size of the header
8527
8528 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
8529 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
8530 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
8531 // then we have to wait for more data
8532
8533 if (buffer.byteLength - i < frameLength) {
8534 break;
8535 } // Otherwise, deliver the complete AAC frame
8536
8537
8538 this.trigger('data', {
8539 pts: packet.pts + frameNum * adtsFrameDuration,
8540 dts: packet.dts + frameNum * adtsFrameDuration,
8541 sampleCount: sampleCount,
8542 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
8543 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
8544 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
8545 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
8546 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
8547 samplesize: 16,
8548 // data is the frame without it's header
8549 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
8550 });
8551 frameNum++;
8552 i += frameLength;
8553 }
8554
8555 if (typeof skip === 'number') {
8556 this.skipWarn_(skip, i);
8557 skip = null;
8558 } // remove processed bytes from the buffer.
8559
8560
8561 buffer = buffer.subarray(i);
8562 };
8563
8564 this.flush = function () {
8565 frameNum = 0;
8566 this.trigger('done');
8567 };
8568
8569 this.reset = function () {
8570 buffer = void 0;
8571 this.trigger('reset');
8572 };
8573
8574 this.endTimeline = function () {
8575 buffer = void 0;
8576 this.trigger('endedtimeline');
8577 };
8578 };
8579
8580 _AdtsStream.prototype = new stream();
8581 var adts = _AdtsStream;
8582 /**
8583 * mux.js
8584 *
8585 * Copyright (c) Brightcove
8586 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8587 */
8588
8589 var ExpGolomb;
8590 /**
8591 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
8592 * scheme used by h264.
8593 */
8594
8595 ExpGolomb = function ExpGolomb(workingData) {
8596 var // the number of bytes left to examine in workingData
8597 workingBytesAvailable = workingData.byteLength,
8598 // the current word being examined
8599 workingWord = 0,
8600 // :uint
8601 // the number of bits left to examine in the current word
8602 workingBitsAvailable = 0; // :uint;
8603 // ():uint
8604
8605 this.length = function () {
8606 return 8 * workingBytesAvailable;
8607 }; // ():uint
8608
8609
8610 this.bitsAvailable = function () {
8611 return 8 * workingBytesAvailable + workingBitsAvailable;
8612 }; // ():void
8613
8614
8615 this.loadWord = function () {
8616 var position = workingData.byteLength - workingBytesAvailable,
8617 workingBytes = new Uint8Array(4),
8618 availableBytes = Math.min(4, workingBytesAvailable);
8619
8620 if (availableBytes === 0) {
8621 throw new Error('no bytes available');
8622 }
8623
8624 workingBytes.set(workingData.subarray(position, position + availableBytes));
8625 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
8626
8627 workingBitsAvailable = availableBytes * 8;
8628 workingBytesAvailable -= availableBytes;
8629 }; // (count:int):void
8630
8631
8632 this.skipBits = function (count) {
8633 var skipBytes; // :int
8634
8635 if (workingBitsAvailable > count) {
8636 workingWord <<= count;
8637 workingBitsAvailable -= count;
8638 } else {
8639 count -= workingBitsAvailable;
8640 skipBytes = Math.floor(count / 8);
8641 count -= skipBytes * 8;
8642 workingBytesAvailable -= skipBytes;
8643 this.loadWord();
8644 workingWord <<= count;
8645 workingBitsAvailable -= count;
8646 }
8647 }; // (size:int):uint
8648
8649
8650 this.readBits = function (size) {
8651 var bits = Math.min(workingBitsAvailable, size),
8652 // :uint
8653 valu = workingWord >>> 32 - bits; // :uint
8654 // if size > 31, handle error
8655
8656 workingBitsAvailable -= bits;
8657
8658 if (workingBitsAvailable > 0) {
8659 workingWord <<= bits;
8660 } else if (workingBytesAvailable > 0) {
8661 this.loadWord();
8662 }
8663
8664 bits = size - bits;
8665
8666 if (bits > 0) {
8667 return valu << bits | this.readBits(bits);
8668 }
8669
8670 return valu;
8671 }; // ():uint
8672
8673
8674 this.skipLeadingZeros = function () {
8675 var leadingZeroCount; // :uint
8676
8677 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8678 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8679 // the first bit of working word is 1
8680 workingWord <<= leadingZeroCount;
8681 workingBitsAvailable -= leadingZeroCount;
8682 return leadingZeroCount;
8683 }
8684 } // we exhausted workingWord and still have not found a 1
8685
8686
8687 this.loadWord();
8688 return leadingZeroCount + this.skipLeadingZeros();
8689 }; // ():void
8690
8691
8692 this.skipUnsignedExpGolomb = function () {
8693 this.skipBits(1 + this.skipLeadingZeros());
8694 }; // ():void
8695
8696
8697 this.skipExpGolomb = function () {
8698 this.skipBits(1 + this.skipLeadingZeros());
8699 }; // ():uint
8700
8701
8702 this.readUnsignedExpGolomb = function () {
8703 var clz = this.skipLeadingZeros(); // :uint
8704
8705 return this.readBits(clz + 1) - 1;
8706 }; // ():int
8707
8708
8709 this.readExpGolomb = function () {
8710 var valu = this.readUnsignedExpGolomb(); // :int
8711
8712 if (0x01 & valu) {
8713 // the number is odd if the low order bit is set
8714 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8715 }
8716
8717 return -1 * (valu >>> 1); // divide by two then make it negative
8718 }; // Some convenience functions
8719 // :Boolean
8720
8721
8722 this.readBoolean = function () {
8723 return this.readBits(1) === 1;
8724 }; // ():int
8725
8726
8727 this.readUnsignedByte = function () {
8728 return this.readBits(8);
8729 };
8730
8731 this.loadWord();
8732 };
8733
8734 var expGolomb = ExpGolomb;
8735
8736 var _H264Stream, _NalByteStream;
8737
8738 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8739 /**
8740 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8741 */
8742
8743 _NalByteStream = function NalByteStream() {
8744 var syncPoint = 0,
8745 i,
8746 buffer;
8747
8748 _NalByteStream.prototype.init.call(this);
8749 /*
8750 * Scans a byte stream and triggers a data event with the NAL units found.
8751 * @param {Object} data Event received from H264Stream
8752 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8753 *
8754 * @see H264Stream.push
8755 */
8756
8757
8758 this.push = function (data) {
8759 var swapBuffer;
8760
8761 if (!buffer) {
8762 buffer = data.data;
8763 } else {
8764 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8765 swapBuffer.set(buffer);
8766 swapBuffer.set(data.data, buffer.byteLength);
8767 buffer = swapBuffer;
8768 }
8769
8770 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
8771 // scan for NAL unit boundaries
8772 // a match looks like this:
8773 // 0 0 1 .. NAL .. 0 0 1
8774 // ^ sync point ^ i
8775 // or this:
8776 // 0 0 1 .. NAL .. 0 0 0
8777 // ^ sync point ^ i
8778 // advance the sync point to a NAL start, if necessary
8779
8780 for (; syncPoint < len - 3; syncPoint++) {
8781 if (buffer[syncPoint + 2] === 1) {
8782 // the sync point is properly aligned
8783 i = syncPoint + 5;
8784 break;
8785 }
8786 }
8787
8788 while (i < len) {
8789 // look at the current byte to determine if we've hit the end of
8790 // a NAL unit boundary
8791 switch (buffer[i]) {
8792 case 0:
8793 // skip past non-sync sequences
8794 if (buffer[i - 1] !== 0) {
8795 i += 2;
8796 break;
8797 } else if (buffer[i - 2] !== 0) {
8798 i++;
8799 break;
8800 } // deliver the NAL unit if it isn't empty
8801
8802
8803 if (syncPoint + 3 !== i - 2) {
8804 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8805 } // drop trailing zeroes
8806
8807
8808 do {
8809 i++;
8810 } while (buffer[i] !== 1 && i < len);
8811
8812 syncPoint = i - 2;
8813 i += 3;
8814 break;
8815
8816 case 1:
8817 // skip past non-sync sequences
8818 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8819 i += 3;
8820 break;
8821 } // deliver the NAL unit
8822
8823
8824 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8825 syncPoint = i - 2;
8826 i += 3;
8827 break;
8828
8829 default:
8830 // the current byte isn't a one or zero, so it cannot be part
8831 // of a sync sequence
8832 i += 3;
8833 break;
8834 }
8835 } // filter out the NAL units that were delivered
8836
8837
8838 buffer = buffer.subarray(syncPoint);
8839 i -= syncPoint;
8840 syncPoint = 0;
8841 };
8842
8843 this.reset = function () {
8844 buffer = null;
8845 syncPoint = 0;
8846 this.trigger('reset');
8847 };
8848
8849 this.flush = function () {
8850 // deliver the last buffered NAL unit
8851 if (buffer && buffer.byteLength > 3) {
8852 this.trigger('data', buffer.subarray(syncPoint + 3));
8853 } // reset the stream state
8854
8855
8856 buffer = null;
8857 syncPoint = 0;
8858 this.trigger('done');
8859 };
8860
8861 this.endTimeline = function () {
8862 this.flush();
8863 this.trigger('endedtimeline');
8864 };
8865 };
8866
8867 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
8868 // see Recommendation ITU-T H.264 (4/2013),
8869 // 7.3.2.1.1 Sequence parameter set data syntax
8870
8871 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8872 100: true,
8873 110: true,
8874 122: true,
8875 244: true,
8876 44: true,
8877 83: true,
8878 86: true,
8879 118: true,
8880 128: true,
8881 // TODO: the three profiles below don't
8882 // appear to have sps data in the specificiation anymore?
8883 138: true,
8884 139: true,
8885 134: true
8886 };
8887 /**
8888 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8889 * events.
8890 */
8891
8892 _H264Stream = function H264Stream() {
8893 var nalByteStream = new _NalByteStream(),
8894 self,
8895 trackId,
8896 currentPts,
8897 currentDts,
8898 discardEmulationPreventionBytes,
8899 readSequenceParameterSet,
8900 skipScalingList;
8901
8902 _H264Stream.prototype.init.call(this);
8903
8904 self = this;
8905 /*
8906 * Pushes a packet from a stream onto the NalByteStream
8907 *
8908 * @param {Object} packet - A packet received from a stream
8909 * @param {Uint8Array} packet.data - The raw bytes of the packet
8910 * @param {Number} packet.dts - Decode timestamp of the packet
8911 * @param {Number} packet.pts - Presentation timestamp of the packet
8912 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8913 * @param {('video'|'audio')} packet.type - The type of packet
8914 *
8915 */
8916
8917 this.push = function (packet) {
8918 if (packet.type !== 'video') {
8919 return;
8920 }
8921
8922 trackId = packet.trackId;
8923 currentPts = packet.pts;
8924 currentDts = packet.dts;
8925 nalByteStream.push(packet);
8926 };
8927 /*
8928 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8929 * for the NALUs to the next stream component.
8930 * Also, preprocess caption and sequence parameter NALUs.
8931 *
8932 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8933 * @see NalByteStream.push
8934 */
8935
8936
8937 nalByteStream.on('data', function (data) {
8938 var event = {
8939 trackId: trackId,
8940 pts: currentPts,
8941 dts: currentDts,
8942 data: data,
8943 nalUnitTypeCode: data[0] & 0x1f
8944 };
8945
8946 switch (event.nalUnitTypeCode) {
8947 case 0x05:
8948 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8949 break;
8950
8951 case 0x06:
8952 event.nalUnitType = 'sei_rbsp';
8953 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8954 break;
8955
8956 case 0x07:
8957 event.nalUnitType = 'seq_parameter_set_rbsp';
8958 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8959 event.config = readSequenceParameterSet(event.escapedRBSP);
8960 break;
8961
8962 case 0x08:
8963 event.nalUnitType = 'pic_parameter_set_rbsp';
8964 break;
8965
8966 case 0x09:
8967 event.nalUnitType = 'access_unit_delimiter_rbsp';
8968 break;
8969 } // This triggers data on the H264Stream
8970
8971
8972 self.trigger('data', event);
8973 });
8974 nalByteStream.on('done', function () {
8975 self.trigger('done');
8976 });
8977 nalByteStream.on('partialdone', function () {
8978 self.trigger('partialdone');
8979 });
8980 nalByteStream.on('reset', function () {
8981 self.trigger('reset');
8982 });
8983 nalByteStream.on('endedtimeline', function () {
8984 self.trigger('endedtimeline');
8985 });
8986
8987 this.flush = function () {
8988 nalByteStream.flush();
8989 };
8990
8991 this.partialFlush = function () {
8992 nalByteStream.partialFlush();
8993 };
8994
8995 this.reset = function () {
8996 nalByteStream.reset();
8997 };
8998
8999 this.endTimeline = function () {
9000 nalByteStream.endTimeline();
9001 };
9002 /**
9003 * Advance the ExpGolomb decoder past a scaling list. The scaling
9004 * list is optionally transmitted as part of a sequence parameter
9005 * set and is not relevant to transmuxing.
9006 * @param count {number} the number of entries in this scaling list
9007 * @param expGolombDecoder {object} an ExpGolomb pointed to the
9008 * start of a scaling list
9009 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
9010 */
9011
9012
9013 skipScalingList = function skipScalingList(count, expGolombDecoder) {
9014 var lastScale = 8,
9015 nextScale = 8,
9016 j,
9017 deltaScale;
9018
9019 for (j = 0; j < count; j++) {
9020 if (nextScale !== 0) {
9021 deltaScale = expGolombDecoder.readExpGolomb();
9022 nextScale = (lastScale + deltaScale + 256) % 256;
9023 }
9024
9025 lastScale = nextScale === 0 ? lastScale : nextScale;
9026 }
9027 };
9028 /**
9029 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
9030 * Sequence Payload"
9031 * @param data {Uint8Array} the bytes of a RBSP from a NAL
9032 * unit
9033 * @return {Uint8Array} the RBSP without any Emulation
9034 * Prevention Bytes
9035 */
9036
9037
9038 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
9039 var length = data.byteLength,
9040 emulationPreventionBytesPositions = [],
9041 i = 1,
9042 newLength,
9043 newData; // Find all `Emulation Prevention Bytes`
9044
9045 while (i < length - 2) {
9046 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
9047 emulationPreventionBytesPositions.push(i + 2);
9048 i += 2;
9049 } else {
9050 i++;
9051 }
9052 } // If no Emulation Prevention Bytes were found just return the original
9053 // array
9054
9055
9056 if (emulationPreventionBytesPositions.length === 0) {
9057 return data;
9058 } // Create a new array to hold the NAL unit data
9059
9060
9061 newLength = length - emulationPreventionBytesPositions.length;
9062 newData = new Uint8Array(newLength);
9063 var sourceIndex = 0;
9064
9065 for (i = 0; i < newLength; sourceIndex++, i++) {
9066 if (sourceIndex === emulationPreventionBytesPositions[0]) {
9067 // Skip this byte
9068 sourceIndex++; // Remove this position index
9069
9070 emulationPreventionBytesPositions.shift();
9071 }
9072
9073 newData[i] = data[sourceIndex];
9074 }
9075
9076 return newData;
9077 };
9078 /**
9079 * Read a sequence parameter set and return some interesting video
9080 * properties. A sequence parameter set is the H264 metadata that
9081 * describes the properties of upcoming video frames.
9082 * @param data {Uint8Array} the bytes of a sequence parameter set
9083 * @return {object} an object with configuration parsed from the
9084 * sequence parameter set, including the dimensions of the
9085 * associated video frames.
9086 */
9087
9088
9089 readSequenceParameterSet = function readSequenceParameterSet(data) {
9090 var frameCropLeftOffset = 0,
9091 frameCropRightOffset = 0,
9092 frameCropTopOffset = 0,
9093 frameCropBottomOffset = 0,
9094 expGolombDecoder,
9095 profileIdc,
9096 levelIdc,
9097 profileCompatibility,
9098 chromaFormatIdc,
9099 picOrderCntType,
9100 numRefFramesInPicOrderCntCycle,
9101 picWidthInMbsMinus1,
9102 picHeightInMapUnitsMinus1,
9103 frameMbsOnlyFlag,
9104 scalingListCount,
9105 sarRatio = [1, 1],
9106 aspectRatioIdc,
9107 i;
9108 expGolombDecoder = new expGolomb(data);
9109 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
9110
9111 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
9112
9113 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
9114
9115 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
9116 // some profiles have more optional data we don't need
9117
9118 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
9119 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
9120
9121 if (chromaFormatIdc === 3) {
9122 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
9123 }
9124
9125 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
9126
9127 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
9128
9129 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
9130
9131 if (expGolombDecoder.readBoolean()) {
9132 // seq_scaling_matrix_present_flag
9133 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
9134
9135 for (i = 0; i < scalingListCount; i++) {
9136 if (expGolombDecoder.readBoolean()) {
9137 // seq_scaling_list_present_flag[ i ]
9138 if (i < 6) {
9139 skipScalingList(16, expGolombDecoder);
9140 } else {
9141 skipScalingList(64, expGolombDecoder);
9142 }
9143 }
9144 }
9145 }
9146 }
9147
9148 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
9149
9150 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
9151
9152 if (picOrderCntType === 0) {
9153 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
9154 } else if (picOrderCntType === 1) {
9155 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
9156
9157 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
9158
9159 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
9160
9161 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
9162
9163 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
9164 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
9165 }
9166 }
9167
9168 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
9169
9170 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
9171
9172 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9173 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9174 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
9175
9176 if (frameMbsOnlyFlag === 0) {
9177 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
9178 }
9179
9180 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
9181
9182 if (expGolombDecoder.readBoolean()) {
9183 // frame_cropping_flag
9184 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
9185 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
9186 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
9187 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
9188 }
9189
9190 if (expGolombDecoder.readBoolean()) {
9191 // vui_parameters_present_flag
9192 if (expGolombDecoder.readBoolean()) {
9193 // aspect_ratio_info_present_flag
9194 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
9195
9196 switch (aspectRatioIdc) {
9197 case 1:
9198 sarRatio = [1, 1];
9199 break;
9200
9201 case 2:
9202 sarRatio = [12, 11];
9203 break;
9204
9205 case 3:
9206 sarRatio = [10, 11];
9207 break;
9208
9209 case 4:
9210 sarRatio = [16, 11];
9211 break;
9212
9213 case 5:
9214 sarRatio = [40, 33];
9215 break;
9216
9217 case 6:
9218 sarRatio = [24, 11];
9219 break;
9220
9221 case 7:
9222 sarRatio = [20, 11];
9223 break;
9224
9225 case 8:
9226 sarRatio = [32, 11];
9227 break;
9228
9229 case 9:
9230 sarRatio = [80, 33];
9231 break;
9232
9233 case 10:
9234 sarRatio = [18, 11];
9235 break;
9236
9237 case 11:
9238 sarRatio = [15, 11];
9239 break;
9240
9241 case 12:
9242 sarRatio = [64, 33];
9243 break;
9244
9245 case 13:
9246 sarRatio = [160, 99];
9247 break;
9248
9249 case 14:
9250 sarRatio = [4, 3];
9251 break;
9252
9253 case 15:
9254 sarRatio = [3, 2];
9255 break;
9256
9257 case 16:
9258 sarRatio = [2, 1];
9259 break;
9260
9261 case 255:
9262 {
9263 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
9264 break;
9265 }
9266 }
9267
9268 if (sarRatio) {
9269 sarRatio[0] / sarRatio[1];
9270 }
9271 }
9272 }
9273
9274 return {
9275 profileIdc: profileIdc,
9276 levelIdc: levelIdc,
9277 profileCompatibility: profileCompatibility,
9278 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
9279 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
9280 // sar is sample aspect ratio
9281 sarRatio: sarRatio
9282 };
9283 };
9284 };
9285
9286 _H264Stream.prototype = new stream();
9287 var h264 = {
9288 H264Stream: _H264Stream,
9289 NalByteStream: _NalByteStream
9290 };
9291 /**
9292 * mux.js
9293 *
9294 * Copyright (c) Brightcove
9295 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9296 *
9297 * Utilities to detect basic properties and metadata about Aac data.
9298 */
9299
9300 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
9301
9302 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
9303 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
9304 flags = header[byteIndex + 5],
9305 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
9306
9307 returnSize = returnSize >= 0 ? returnSize : 0;
9308
9309 if (footerPresent) {
9310 return returnSize + 20;
9311 }
9312
9313 return returnSize + 10;
9314 };
9315
9316 var getId3Offset = function getId3Offset(data, offset) {
9317 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
9318 return offset;
9319 }
9320
9321 offset += parseId3TagSize(data, offset);
9322 return getId3Offset(data, offset);
9323 }; // TODO: use vhs-utils
9324
9325
9326 var isLikelyAacData$1 = function isLikelyAacData(data) {
9327 var offset = getId3Offset(data, 0);
9328 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
9329 // is not mp3 data but aac data.
9330 (data[offset + 1] & 0x16) === 0x10;
9331 };
9332
9333 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
9334 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
9335 }; // return a percent-encoded representation of the specified byte range
9336 // @see http://en.wikipedia.org/wiki/Percent-encoding
9337
9338
9339 var percentEncode = function percentEncode(bytes, start, end) {
9340 var i,
9341 result = '';
9342
9343 for (i = start; i < end; i++) {
9344 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
9345 }
9346
9347 return result;
9348 }; // return the string representation of the specified byte range,
9349 // interpreted as ISO-8859-1.
9350
9351
9352 var parseIso88591 = function parseIso88591(bytes, start, end) {
9353 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
9354 };
9355
9356 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
9357 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9358 middle = header[byteIndex + 4] << 3,
9359 highTwo = header[byteIndex + 3] & 0x3 << 11;
9360 return highTwo | middle | lowThree;
9361 };
9362
9363 var parseType$2 = function parseType(header, byteIndex) {
9364 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
9365 return 'timed-metadata';
9366 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
9367 return 'audio';
9368 }
9369
9370 return null;
9371 };
9372
9373 var parseSampleRate = function parseSampleRate(packet) {
9374 var i = 0;
9375
9376 while (i + 5 < packet.length) {
9377 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
9378 // If a valid header was not found, jump one forward and attempt to
9379 // find a valid ADTS header starting at the next byte
9380 i++;
9381 continue;
9382 }
9383
9384 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
9385 }
9386
9387 return null;
9388 };
9389
9390 var parseAacTimestamp = function parseAacTimestamp(packet) {
9391 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
9392
9393 frameStart = 10;
9394
9395 if (packet[5] & 0x40) {
9396 // advance the frame start past the extended header
9397 frameStart += 4; // header size field
9398
9399 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
9400 } // parse one or more ID3 frames
9401 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
9402
9403
9404 do {
9405 // determine the number of bytes in this frame
9406 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
9407
9408 if (frameSize < 1) {
9409 return null;
9410 }
9411
9412 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
9413
9414 if (frameHeader === 'PRIV') {
9415 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
9416
9417 for (var i = 0; i < frame.byteLength; i++) {
9418 if (frame[i] === 0) {
9419 var owner = parseIso88591(frame, 0, i);
9420
9421 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
9422 var d = frame.subarray(i + 1);
9423 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
9424 size *= 4;
9425 size += d[7] & 0x03;
9426 return size;
9427 }
9428
9429 break;
9430 }
9431 }
9432 }
9433
9434 frameStart += 10; // advance past the frame header
9435
9436 frameStart += frameSize; // advance past the frame body
9437 } while (frameStart < packet.byteLength);
9438
9439 return null;
9440 };
9441
9442 var utils = {
9443 isLikelyAacData: isLikelyAacData$1,
9444 parseId3TagSize: parseId3TagSize,
9445 parseAdtsSize: parseAdtsSize,
9446 parseType: parseType$2,
9447 parseSampleRate: parseSampleRate,
9448 parseAacTimestamp: parseAacTimestamp
9449 };
9450
9451 var _AacStream;
9452 /**
9453 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
9454 */
9455
9456
9457 _AacStream = function AacStream() {
9458 var everything = new Uint8Array(),
9459 timeStamp = 0;
9460
9461 _AacStream.prototype.init.call(this);
9462
9463 this.setTimestamp = function (timestamp) {
9464 timeStamp = timestamp;
9465 };
9466
9467 this.push = function (bytes) {
9468 var frameSize = 0,
9469 byteIndex = 0,
9470 bytesLeft,
9471 chunk,
9472 packet,
9473 tempLength; // If there are bytes remaining from the last segment, prepend them to the
9474 // bytes that were pushed in
9475
9476 if (everything.length) {
9477 tempLength = everything.length;
9478 everything = new Uint8Array(bytes.byteLength + tempLength);
9479 everything.set(everything.subarray(0, tempLength));
9480 everything.set(bytes, tempLength);
9481 } else {
9482 everything = bytes;
9483 }
9484
9485 while (everything.length - byteIndex >= 3) {
9486 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
9487 // Exit early because we don't have enough to parse
9488 // the ID3 tag header
9489 if (everything.length - byteIndex < 10) {
9490 break;
9491 } // check framesize
9492
9493
9494 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9495 // to emit a full packet
9496 // Add to byteIndex to support multiple ID3 tags in sequence
9497
9498 if (byteIndex + frameSize > everything.length) {
9499 break;
9500 }
9501
9502 chunk = {
9503 type: 'timed-metadata',
9504 data: everything.subarray(byteIndex, byteIndex + frameSize)
9505 };
9506 this.trigger('data', chunk);
9507 byteIndex += frameSize;
9508 continue;
9509 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
9510 // Exit early because we don't have enough to parse
9511 // the ADTS frame header
9512 if (everything.length - byteIndex < 7) {
9513 break;
9514 }
9515
9516 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9517 // to emit a full packet
9518
9519 if (byteIndex + frameSize > everything.length) {
9520 break;
9521 }
9522
9523 packet = {
9524 type: 'audio',
9525 data: everything.subarray(byteIndex, byteIndex + frameSize),
9526 pts: timeStamp,
9527 dts: timeStamp
9528 };
9529 this.trigger('data', packet);
9530 byteIndex += frameSize;
9531 continue;
9532 }
9533
9534 byteIndex++;
9535 }
9536
9537 bytesLeft = everything.length - byteIndex;
9538
9539 if (bytesLeft > 0) {
9540 everything = everything.subarray(byteIndex);
9541 } else {
9542 everything = new Uint8Array();
9543 }
9544 };
9545
9546 this.reset = function () {
9547 everything = new Uint8Array();
9548 this.trigger('reset');
9549 };
9550
9551 this.endTimeline = function () {
9552 everything = new Uint8Array();
9553 this.trigger('endedtimeline');
9554 };
9555 };
9556
9557 _AacStream.prototype = new stream();
9558 var aac = _AacStream; // constants
9559
9560 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
9561 var audioProperties = AUDIO_PROPERTIES;
9562 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
9563 var videoProperties = VIDEO_PROPERTIES;
9564 var H264Stream = h264.H264Stream;
9565 var isLikelyAacData = utils.isLikelyAacData;
9566 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
9567
9568 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
9569
9570 var retriggerForStream = function retriggerForStream(key, event) {
9571 event.stream = key;
9572 this.trigger('log', event);
9573 };
9574
9575 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
9576 var keys = Object.keys(pipeline);
9577
9578 for (var i = 0; i < keys.length; i++) {
9579 var key = keys[i]; // skip non-stream keys and headOfPipeline
9580 // which is just a duplicate
9581
9582 if (key === 'headOfPipeline' || !pipeline[key].on) {
9583 continue;
9584 }
9585
9586 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
9587 }
9588 };
9589 /**
9590 * Compare two arrays (even typed) for same-ness
9591 */
9592
9593
9594 var arrayEquals = function arrayEquals(a, b) {
9595 var i;
9596
9597 if (a.length !== b.length) {
9598 return false;
9599 } // compare the value of each element in the array
9600
9601
9602 for (i = 0; i < a.length; i++) {
9603 if (a[i] !== b[i]) {
9604 return false;
9605 }
9606 }
9607
9608 return true;
9609 };
9610
9611 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
9612 var ptsOffsetFromDts = startPts - startDts,
9613 decodeDuration = endDts - startDts,
9614 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
9615 // however, the player time values will reflect a start from the baseMediaDecodeTime.
9616 // In order to provide relevant values for the player times, base timing info on the
9617 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
9618
9619 return {
9620 start: {
9621 dts: baseMediaDecodeTime,
9622 pts: baseMediaDecodeTime + ptsOffsetFromDts
9623 },
9624 end: {
9625 dts: baseMediaDecodeTime + decodeDuration,
9626 pts: baseMediaDecodeTime + presentationDuration
9627 },
9628 prependedContentDuration: prependedContentDuration,
9629 baseMediaDecodeTime: baseMediaDecodeTime
9630 };
9631 };
9632 /**
9633 * Constructs a single-track, ISO BMFF media segment from AAC data
9634 * events. The output of this stream can be fed to a SourceBuffer
9635 * configured with a suitable initialization segment.
9636 * @param track {object} track metadata configuration
9637 * @param options {object} transmuxer options object
9638 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9639 * in the source; false to adjust the first segment to start at 0.
9640 */
9641
9642
9643 _AudioSegmentStream = function AudioSegmentStream(track, options) {
9644 var adtsFrames = [],
9645 sequenceNumber,
9646 earliestAllowedDts = 0,
9647 audioAppendStartTs = 0,
9648 videoBaseMediaDecodeTime = Infinity;
9649 options = options || {};
9650 sequenceNumber = options.firstSequenceNumber || 0;
9651
9652 _AudioSegmentStream.prototype.init.call(this);
9653
9654 this.push = function (data) {
9655 trackDecodeInfo.collectDtsInfo(track, data);
9656
9657 if (track) {
9658 audioProperties.forEach(function (prop) {
9659 track[prop] = data[prop];
9660 });
9661 } // buffer audio data until end() is called
9662
9663
9664 adtsFrames.push(data);
9665 };
9666
9667 this.setEarliestDts = function (earliestDts) {
9668 earliestAllowedDts = earliestDts;
9669 };
9670
9671 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9672 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9673 };
9674
9675 this.setAudioAppendStart = function (timestamp) {
9676 audioAppendStartTs = timestamp;
9677 };
9678
9679 this.flush = function () {
9680 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
9681
9682 if (adtsFrames.length === 0) {
9683 this.trigger('done', 'AudioSegmentStream');
9684 return;
9685 }
9686
9687 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9688 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
9689
9690 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9691 // samples (that is, adts frames) in the audio data
9692
9693 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9694
9695 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9696 adtsFrames = [];
9697 moof = mp4Generator.moof(sequenceNumber, [track]);
9698 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
9699
9700 sequenceNumber++;
9701 boxes.set(moof);
9702 boxes.set(mdat, moof.byteLength);
9703 trackDecodeInfo.clearDtsInfo(track);
9704 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
9705 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9706 // valid use-case where an init segment/data should be triggered without associated
9707 // frames. Leaving for now, but should be looked into.
9708
9709 if (frames.length) {
9710 segmentDuration = frames.length * frameDuration;
9711 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
9712 // frame info is in video clock cycles. Convert to match expectation of
9713 // listeners (that all timestamps will be based on video clock cycles).
9714 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
9715 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
9716 this.trigger('timingInfo', {
9717 start: frames[0].pts,
9718 end: frames[0].pts + segmentDuration
9719 });
9720 }
9721
9722 this.trigger('data', {
9723 track: track,
9724 boxes: boxes
9725 });
9726 this.trigger('done', 'AudioSegmentStream');
9727 };
9728
9729 this.reset = function () {
9730 trackDecodeInfo.clearDtsInfo(track);
9731 adtsFrames = [];
9732 this.trigger('reset');
9733 };
9734 };
9735
9736 _AudioSegmentStream.prototype = new stream();
9737 /**
9738 * Constructs a single-track, ISO BMFF media segment from H264 data
9739 * events. The output of this stream can be fed to a SourceBuffer
9740 * configured with a suitable initialization segment.
9741 * @param track {object} track metadata configuration
9742 * @param options {object} transmuxer options object
9743 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9744 * gopsToAlignWith list when attempting to align gop pts
9745 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9746 * in the source; false to adjust the first segment to start at 0.
9747 */
9748
9749 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9750 var sequenceNumber,
9751 nalUnits = [],
9752 gopsToAlignWith = [],
9753 config,
9754 pps;
9755 options = options || {};
9756 sequenceNumber = options.firstSequenceNumber || 0;
9757
9758 _VideoSegmentStream.prototype.init.call(this);
9759
9760 delete track.minPTS;
9761 this.gopCache_ = [];
9762 /**
9763 * Constructs a ISO BMFF segment given H264 nalUnits
9764 * @param {Object} nalUnit A data event representing a nalUnit
9765 * @param {String} nalUnit.nalUnitType
9766 * @param {Object} nalUnit.config Properties for a mp4 track
9767 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9768 * @see lib/codecs/h264.js
9769 **/
9770
9771 this.push = function (nalUnit) {
9772 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
9773
9774 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9775 config = nalUnit.config;
9776 track.sps = [nalUnit.data];
9777 videoProperties.forEach(function (prop) {
9778 track[prop] = config[prop];
9779 }, this);
9780 }
9781
9782 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9783 pps = nalUnit.data;
9784 track.pps = [nalUnit.data];
9785 } // buffer video until flush() is called
9786
9787
9788 nalUnits.push(nalUnit);
9789 };
9790 /**
9791 * Pass constructed ISO BMFF track and boxes on to the
9792 * next stream in the pipeline
9793 **/
9794
9795
9796 this.flush = function () {
9797 var frames,
9798 gopForFusion,
9799 gops,
9800 moof,
9801 mdat,
9802 boxes,
9803 prependedContentDuration = 0,
9804 firstGop,
9805 lastGop; // Throw away nalUnits at the start of the byte stream until
9806 // we find the first AUD
9807
9808 while (nalUnits.length) {
9809 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9810 break;
9811 }
9812
9813 nalUnits.shift();
9814 } // Return early if no video data has been observed
9815
9816
9817 if (nalUnits.length === 0) {
9818 this.resetStream_();
9819 this.trigger('done', 'VideoSegmentStream');
9820 return;
9821 } // Organize the raw nal-units into arrays that represent
9822 // higher-level constructs such as frames and gops
9823 // (group-of-pictures)
9824
9825
9826 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9827 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
9828 // a problem since MSE (on Chrome) requires a leading keyframe.
9829 //
9830 // We have two approaches to repairing this situation:
9831 // 1) GOP-FUSION:
9832 // This is where we keep track of the GOPS (group-of-pictures)
9833 // from previous fragments and attempt to find one that we can
9834 // prepend to the current fragment in order to create a valid
9835 // fragment.
9836 // 2) KEYFRAME-PULLING:
9837 // Here we search for the first keyframe in the fragment and
9838 // throw away all the frames between the start of the fragment
9839 // and that keyframe. We then extend the duration and pull the
9840 // PTS of the keyframe forward so that it covers the time range
9841 // of the frames that were disposed of.
9842 //
9843 // #1 is far prefereable over #2 which can cause "stuttering" but
9844 // requires more things to be just right.
9845
9846 if (!gops[0][0].keyFrame) {
9847 // Search for a gop for fusion from our gopCache
9848 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9849
9850 if (gopForFusion) {
9851 // in order to provide more accurate timing information about the segment, save
9852 // the number of seconds prepended to the original segment due to GOP fusion
9853 prependedContentDuration = gopForFusion.duration;
9854 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
9855 // new gop at the beginning
9856
9857 gops.byteLength += gopForFusion.byteLength;
9858 gops.nalCount += gopForFusion.nalCount;
9859 gops.pts = gopForFusion.pts;
9860 gops.dts = gopForFusion.dts;
9861 gops.duration += gopForFusion.duration;
9862 } else {
9863 // If we didn't find a candidate gop fall back to keyframe-pulling
9864 gops = frameUtils.extendFirstKeyFrame(gops);
9865 }
9866 } // Trim gops to align with gopsToAlignWith
9867
9868
9869 if (gopsToAlignWith.length) {
9870 var alignedGops;
9871
9872 if (options.alignGopsAtEnd) {
9873 alignedGops = this.alignGopsAtEnd_(gops);
9874 } else {
9875 alignedGops = this.alignGopsAtStart_(gops);
9876 }
9877
9878 if (!alignedGops) {
9879 // save all the nals in the last GOP into the gop cache
9880 this.gopCache_.unshift({
9881 gop: gops.pop(),
9882 pps: track.pps,
9883 sps: track.sps
9884 }); // Keep a maximum of 6 GOPs in the cache
9885
9886 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9887
9888 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
9889
9890 this.resetStream_();
9891 this.trigger('done', 'VideoSegmentStream');
9892 return;
9893 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9894 // when recalculated before sending off to CoalesceStream
9895
9896
9897 trackDecodeInfo.clearDtsInfo(track);
9898 gops = alignedGops;
9899 }
9900
9901 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
9902 // samples (that is, frames) in the video data
9903
9904 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
9905
9906 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9907 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9908 this.trigger('processedGopsInfo', gops.map(function (gop) {
9909 return {
9910 pts: gop.pts,
9911 dts: gop.dts,
9912 byteLength: gop.byteLength
9913 };
9914 }));
9915 firstGop = gops[0];
9916 lastGop = gops[gops.length - 1];
9917 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9918 this.trigger('timingInfo', {
9919 start: gops[0].pts,
9920 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9921 }); // save all the nals in the last GOP into the gop cache
9922
9923 this.gopCache_.unshift({
9924 gop: gops.pop(),
9925 pps: track.pps,
9926 sps: track.sps
9927 }); // Keep a maximum of 6 GOPs in the cache
9928
9929 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9930
9931 nalUnits = [];
9932 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9933 this.trigger('timelineStartInfo', track.timelineStartInfo);
9934 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9935 // throwing away hundreds of media segment fragments
9936
9937 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9938
9939 sequenceNumber++;
9940 boxes.set(moof);
9941 boxes.set(mdat, moof.byteLength);
9942 this.trigger('data', {
9943 track: track,
9944 boxes: boxes
9945 });
9946 this.resetStream_(); // Continue with the flush process now
9947
9948 this.trigger('done', 'VideoSegmentStream');
9949 };
9950
9951 this.reset = function () {
9952 this.resetStream_();
9953 nalUnits = [];
9954 this.gopCache_.length = 0;
9955 gopsToAlignWith.length = 0;
9956 this.trigger('reset');
9957 };
9958
9959 this.resetStream_ = function () {
9960 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9961 // for instance, when we are rendition switching
9962
9963 config = undefined;
9964 pps = undefined;
9965 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9966 // return it or return null if no good candidate was found
9967
9968
9969 this.getGopForFusion_ = function (nalUnit) {
9970 var halfSecond = 45000,
9971 // Half-a-second in a 90khz clock
9972 allowableOverlap = 10000,
9973 // About 3 frames @ 30fps
9974 nearestDistance = Infinity,
9975 dtsDistance,
9976 nearestGopObj,
9977 currentGop,
9978 currentGopObj,
9979 i; // Search for the GOP nearest to the beginning of this nal unit
9980
9981 for (i = 0; i < this.gopCache_.length; i++) {
9982 currentGopObj = this.gopCache_[i];
9983 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
9984
9985 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9986 continue;
9987 } // Reject Gops that would require a negative baseMediaDecodeTime
9988
9989
9990 if (currentGop.dts < track.timelineStartInfo.dts) {
9991 continue;
9992 } // The distance between the end of the gop and the start of the nalUnit
9993
9994
9995 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
9996 // a half-second of the nal unit
9997
9998 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9999 // Always use the closest GOP we found if there is more than
10000 // one candidate
10001 if (!nearestGopObj || nearestDistance > dtsDistance) {
10002 nearestGopObj = currentGopObj;
10003 nearestDistance = dtsDistance;
10004 }
10005 }
10006 }
10007
10008 if (nearestGopObj) {
10009 return nearestGopObj.gop;
10010 }
10011
10012 return null;
10013 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10014 // of gopsToAlignWith starting from the START of the list
10015
10016
10017 this.alignGopsAtStart_ = function (gops) {
10018 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
10019 byteLength = gops.byteLength;
10020 nalCount = gops.nalCount;
10021 duration = gops.duration;
10022 alignIndex = gopIndex = 0;
10023
10024 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
10025 align = gopsToAlignWith[alignIndex];
10026 gop = gops[gopIndex];
10027
10028 if (align.pts === gop.pts) {
10029 break;
10030 }
10031
10032 if (gop.pts > align.pts) {
10033 // this current gop starts after the current gop we want to align on, so increment
10034 // align index
10035 alignIndex++;
10036 continue;
10037 } // current gop starts before the current gop we want to align on. so increment gop
10038 // index
10039
10040
10041 gopIndex++;
10042 byteLength -= gop.byteLength;
10043 nalCount -= gop.nalCount;
10044 duration -= gop.duration;
10045 }
10046
10047 if (gopIndex === 0) {
10048 // no gops to trim
10049 return gops;
10050 }
10051
10052 if (gopIndex === gops.length) {
10053 // all gops trimmed, skip appending all gops
10054 return null;
10055 }
10056
10057 alignedGops = gops.slice(gopIndex);
10058 alignedGops.byteLength = byteLength;
10059 alignedGops.duration = duration;
10060 alignedGops.nalCount = nalCount;
10061 alignedGops.pts = alignedGops[0].pts;
10062 alignedGops.dts = alignedGops[0].dts;
10063 return alignedGops;
10064 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10065 // of gopsToAlignWith starting from the END of the list
10066
10067
10068 this.alignGopsAtEnd_ = function (gops) {
10069 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
10070 alignIndex = gopsToAlignWith.length - 1;
10071 gopIndex = gops.length - 1;
10072 alignEndIndex = null;
10073 matchFound = false;
10074
10075 while (alignIndex >= 0 && gopIndex >= 0) {
10076 align = gopsToAlignWith[alignIndex];
10077 gop = gops[gopIndex];
10078
10079 if (align.pts === gop.pts) {
10080 matchFound = true;
10081 break;
10082 }
10083
10084 if (align.pts > gop.pts) {
10085 alignIndex--;
10086 continue;
10087 }
10088
10089 if (alignIndex === gopsToAlignWith.length - 1) {
10090 // gop.pts is greater than the last alignment candidate. If no match is found
10091 // by the end of this loop, we still want to append gops that come after this
10092 // point
10093 alignEndIndex = gopIndex;
10094 }
10095
10096 gopIndex--;
10097 }
10098
10099 if (!matchFound && alignEndIndex === null) {
10100 return null;
10101 }
10102
10103 var trimIndex;
10104
10105 if (matchFound) {
10106 trimIndex = gopIndex;
10107 } else {
10108 trimIndex = alignEndIndex;
10109 }
10110
10111 if (trimIndex === 0) {
10112 return gops;
10113 }
10114
10115 var alignedGops = gops.slice(trimIndex);
10116 var metadata = alignedGops.reduce(function (total, gop) {
10117 total.byteLength += gop.byteLength;
10118 total.duration += gop.duration;
10119 total.nalCount += gop.nalCount;
10120 return total;
10121 }, {
10122 byteLength: 0,
10123 duration: 0,
10124 nalCount: 0
10125 });
10126 alignedGops.byteLength = metadata.byteLength;
10127 alignedGops.duration = metadata.duration;
10128 alignedGops.nalCount = metadata.nalCount;
10129 alignedGops.pts = alignedGops[0].pts;
10130 alignedGops.dts = alignedGops[0].dts;
10131 return alignedGops;
10132 };
10133
10134 this.alignGopsWith = function (newGopsToAlignWith) {
10135 gopsToAlignWith = newGopsToAlignWith;
10136 };
10137 };
10138
10139 _VideoSegmentStream.prototype = new stream();
10140 /**
10141 * A Stream that can combine multiple streams (ie. audio & video)
10142 * into a single output segment for MSE. Also supports audio-only
10143 * and video-only streams.
10144 * @param options {object} transmuxer options object
10145 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
10146 * in the source; false to adjust the first segment to start at media timeline start.
10147 */
10148
10149 _CoalesceStream = function CoalesceStream(options, metadataStream) {
10150 // Number of Tracks per output segment
10151 // If greater than 1, we combine multiple
10152 // tracks into a single segment
10153 this.numberOfTracks = 0;
10154 this.metadataStream = metadataStream;
10155 options = options || {};
10156
10157 if (typeof options.remux !== 'undefined') {
10158 this.remuxTracks = !!options.remux;
10159 } else {
10160 this.remuxTracks = true;
10161 }
10162
10163 if (typeof options.keepOriginalTimestamps === 'boolean') {
10164 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
10165 } else {
10166 this.keepOriginalTimestamps = false;
10167 }
10168
10169 this.pendingTracks = [];
10170 this.videoTrack = null;
10171 this.pendingBoxes = [];
10172 this.pendingCaptions = [];
10173 this.pendingMetadata = [];
10174 this.pendingBytes = 0;
10175 this.emittedTracks = 0;
10176
10177 _CoalesceStream.prototype.init.call(this); // Take output from multiple
10178
10179
10180 this.push = function (output) {
10181 // buffer incoming captions until the associated video segment
10182 // finishes
10183 if (output.text) {
10184 return this.pendingCaptions.push(output);
10185 } // buffer incoming id3 tags until the final flush
10186
10187
10188 if (output.frames) {
10189 return this.pendingMetadata.push(output);
10190 } // Add this track to the list of pending tracks and store
10191 // important information required for the construction of
10192 // the final segment
10193
10194
10195 this.pendingTracks.push(output.track);
10196 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
10197 // We unshift audio and push video because
10198 // as of Chrome 75 when switching from
10199 // one init segment to another if the video
10200 // mdat does not appear after the audio mdat
10201 // only audio will play for the duration of our transmux.
10202
10203 if (output.track.type === 'video') {
10204 this.videoTrack = output.track;
10205 this.pendingBoxes.push(output.boxes);
10206 }
10207
10208 if (output.track.type === 'audio') {
10209 this.audioTrack = output.track;
10210 this.pendingBoxes.unshift(output.boxes);
10211 }
10212 };
10213 };
10214
10215 _CoalesceStream.prototype = new stream();
10216
10217 _CoalesceStream.prototype.flush = function (flushSource) {
10218 var offset = 0,
10219 event = {
10220 captions: [],
10221 captionStreams: {},
10222 metadata: [],
10223 info: {}
10224 },
10225 caption,
10226 id3,
10227 initSegment,
10228 timelineStartPts = 0,
10229 i;
10230
10231 if (this.pendingTracks.length < this.numberOfTracks) {
10232 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
10233 // Return because we haven't received a flush from a data-generating
10234 // portion of the segment (meaning that we have only recieved meta-data
10235 // or captions.)
10236 return;
10237 } else if (this.remuxTracks) {
10238 // Return until we have enough tracks from the pipeline to remux (if we
10239 // are remuxing audio and video into a single MP4)
10240 return;
10241 } else if (this.pendingTracks.length === 0) {
10242 // In the case where we receive a flush without any data having been
10243 // received we consider it an emitted track for the purposes of coalescing
10244 // `done` events.
10245 // We do this for the case where there is an audio and video track in the
10246 // segment but no audio data. (seen in several playlists with alternate
10247 // audio tracks and no audio present in the main TS segments.)
10248 this.emittedTracks++;
10249
10250 if (this.emittedTracks >= this.numberOfTracks) {
10251 this.trigger('done');
10252 this.emittedTracks = 0;
10253 }
10254
10255 return;
10256 }
10257 }
10258
10259 if (this.videoTrack) {
10260 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
10261 videoProperties.forEach(function (prop) {
10262 event.info[prop] = this.videoTrack[prop];
10263 }, this);
10264 } else if (this.audioTrack) {
10265 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
10266 audioProperties.forEach(function (prop) {
10267 event.info[prop] = this.audioTrack[prop];
10268 }, this);
10269 }
10270
10271 if (this.videoTrack || this.audioTrack) {
10272 if (this.pendingTracks.length === 1) {
10273 event.type = this.pendingTracks[0].type;
10274 } else {
10275 event.type = 'combined';
10276 }
10277
10278 this.emittedTracks += this.pendingTracks.length;
10279 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
10280
10281 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
10282 // and track definitions
10283
10284 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
10285
10286 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
10287
10288 for (i = 0; i < this.pendingBoxes.length; i++) {
10289 event.data.set(this.pendingBoxes[i], offset);
10290 offset += this.pendingBoxes[i].byteLength;
10291 } // Translate caption PTS times into second offsets to match the
10292 // video timeline for the segment, and add track info
10293
10294
10295 for (i = 0; i < this.pendingCaptions.length; i++) {
10296 caption = this.pendingCaptions[i];
10297 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
10298 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
10299 event.captionStreams[caption.stream] = true;
10300 event.captions.push(caption);
10301 } // Translate ID3 frame PTS times into second offsets to match the
10302 // video timeline for the segment
10303
10304
10305 for (i = 0; i < this.pendingMetadata.length; i++) {
10306 id3 = this.pendingMetadata[i];
10307 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
10308 event.metadata.push(id3);
10309 } // We add this to every single emitted segment even though we only need
10310 // it for the first
10311
10312
10313 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
10314
10315 this.pendingTracks.length = 0;
10316 this.videoTrack = null;
10317 this.pendingBoxes.length = 0;
10318 this.pendingCaptions.length = 0;
10319 this.pendingBytes = 0;
10320 this.pendingMetadata.length = 0; // Emit the built segment
10321 // We include captions and ID3 tags for backwards compatibility,
10322 // ideally we should send only video and audio in the data event
10323
10324 this.trigger('data', event); // Emit each caption to the outside world
10325 // Ideally, this would happen immediately on parsing captions,
10326 // but we need to ensure that video data is sent back first
10327 // so that caption timing can be adjusted to match video timing
10328
10329 for (i = 0; i < event.captions.length; i++) {
10330 caption = event.captions[i];
10331 this.trigger('caption', caption);
10332 } // Emit each id3 tag to the outside world
10333 // Ideally, this would happen immediately on parsing the tag,
10334 // but we need to ensure that video data is sent back first
10335 // so that ID3 frame timing can be adjusted to match video timing
10336
10337
10338 for (i = 0; i < event.metadata.length; i++) {
10339 id3 = event.metadata[i];
10340 this.trigger('id3Frame', id3);
10341 }
10342 } // Only emit `done` if all tracks have been flushed and emitted
10343
10344
10345 if (this.emittedTracks >= this.numberOfTracks) {
10346 this.trigger('done');
10347 this.emittedTracks = 0;
10348 }
10349 };
10350
10351 _CoalesceStream.prototype.setRemux = function (val) {
10352 this.remuxTracks = val;
10353 };
10354 /**
10355 * A Stream that expects MP2T binary data as input and produces
10356 * corresponding media segments, suitable for use with Media Source
10357 * Extension (MSE) implementations that support the ISO BMFF byte
10358 * stream format, like Chrome.
10359 */
10360
10361
10362 _Transmuxer = function Transmuxer(options) {
10363 var self = this,
10364 hasFlushed = true,
10365 videoTrack,
10366 audioTrack;
10367
10368 _Transmuxer.prototype.init.call(this);
10369
10370 options = options || {};
10371 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10372 this.transmuxPipeline_ = {};
10373
10374 this.setupAacPipeline = function () {
10375 var pipeline = {};
10376 this.transmuxPipeline_ = pipeline;
10377 pipeline.type = 'aac';
10378 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10379
10380 pipeline.aacStream = new aac();
10381 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
10382 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
10383 pipeline.adtsStream = new adts();
10384 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10385 pipeline.headOfPipeline = pipeline.aacStream;
10386 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
10387 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
10388 pipeline.metadataStream.on('timestamp', function (frame) {
10389 pipeline.aacStream.setTimestamp(frame.timeStamp);
10390 });
10391 pipeline.aacStream.on('data', function (data) {
10392 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10393 return;
10394 }
10395
10396 audioTrack = audioTrack || {
10397 timelineStartInfo: {
10398 baseMediaDecodeTime: self.baseMediaDecodeTime
10399 },
10400 codec: 'adts',
10401 type: 'audio'
10402 }; // hook up the audio segment stream to the first track with aac data
10403
10404 pipeline.coalesceStream.numberOfTracks++;
10405 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10406 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10407 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10408
10409 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
10410
10411 self.trigger('trackinfo', {
10412 hasAudio: !!audioTrack,
10413 hasVideo: !!videoTrack
10414 });
10415 }); // Re-emit any data coming from the coalesce stream to the outside world
10416
10417 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
10418
10419 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10420 addPipelineLogRetriggers(this, pipeline);
10421 };
10422
10423 this.setupTsPipeline = function () {
10424 var pipeline = {};
10425 this.transmuxPipeline_ = pipeline;
10426 pipeline.type = 'ts';
10427 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10428
10429 pipeline.packetStream = new m2ts_1.TransportPacketStream();
10430 pipeline.parseStream = new m2ts_1.TransportParseStream();
10431 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
10432 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
10433 pipeline.adtsStream = new adts();
10434 pipeline.h264Stream = new H264Stream();
10435 pipeline.captionStream = new m2ts_1.CaptionStream(options);
10436 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10437 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
10438
10439 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
10440 // demux the streams
10441
10442 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
10443 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
10444 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
10445
10446 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
10447 pipeline.elementaryStream.on('data', function (data) {
10448 var i;
10449
10450 if (data.type === 'metadata') {
10451 i = data.tracks.length; // scan the tracks listed in the metadata
10452
10453 while (i--) {
10454 if (!videoTrack && data.tracks[i].type === 'video') {
10455 videoTrack = data.tracks[i];
10456 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10457 } else if (!audioTrack && data.tracks[i].type === 'audio') {
10458 audioTrack = data.tracks[i];
10459 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10460 }
10461 } // hook up the video segment stream to the first track with h264 data
10462
10463
10464 if (videoTrack && !pipeline.videoSegmentStream) {
10465 pipeline.coalesceStream.numberOfTracks++;
10466 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
10467 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
10468 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10469 // When video emits timelineStartInfo data after a flush, we forward that
10470 // info to the AudioSegmentStream, if it exists, because video timeline
10471 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
10472 // because this is a particularly subtle form of timestamp alteration.
10473 if (audioTrack && !options.keepOriginalTimestamps) {
10474 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
10475 // very earliest DTS we have seen in video because Chrome will
10476 // interpret any video track with a baseMediaDecodeTime that is
10477 // non-zero as a gap.
10478
10479 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
10480 }
10481 });
10482 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
10483 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
10484 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
10485 if (audioTrack) {
10486 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
10487 }
10488 });
10489 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
10490
10491 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
10492 }
10493
10494 if (audioTrack && !pipeline.audioSegmentStream) {
10495 // hook up the audio segment stream to the first track with aac data
10496 pipeline.coalesceStream.numberOfTracks++;
10497 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10498 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10499 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
10500 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
10501
10502 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
10503 } // emit pmt info
10504
10505
10506 self.trigger('trackinfo', {
10507 hasAudio: !!audioTrack,
10508 hasVideo: !!videoTrack
10509 });
10510 }
10511 }); // Re-emit any data coming from the coalesce stream to the outside world
10512
10513 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
10514 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
10515 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
10516 self.trigger('id3Frame', id3Frame);
10517 });
10518 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
10519
10520 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10521 addPipelineLogRetriggers(this, pipeline);
10522 }; // hook up the segment streams once track metadata is delivered
10523
10524
10525 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10526 var pipeline = this.transmuxPipeline_;
10527
10528 if (!options.keepOriginalTimestamps) {
10529 this.baseMediaDecodeTime = baseMediaDecodeTime;
10530 }
10531
10532 if (audioTrack) {
10533 audioTrack.timelineStartInfo.dts = undefined;
10534 audioTrack.timelineStartInfo.pts = undefined;
10535 trackDecodeInfo.clearDtsInfo(audioTrack);
10536
10537 if (pipeline.audioTimestampRolloverStream) {
10538 pipeline.audioTimestampRolloverStream.discontinuity();
10539 }
10540 }
10541
10542 if (videoTrack) {
10543 if (pipeline.videoSegmentStream) {
10544 pipeline.videoSegmentStream.gopCache_ = [];
10545 }
10546
10547 videoTrack.timelineStartInfo.dts = undefined;
10548 videoTrack.timelineStartInfo.pts = undefined;
10549 trackDecodeInfo.clearDtsInfo(videoTrack);
10550 pipeline.captionStream.reset();
10551 }
10552
10553 if (pipeline.timestampRolloverStream) {
10554 pipeline.timestampRolloverStream.discontinuity();
10555 }
10556 };
10557
10558 this.setAudioAppendStart = function (timestamp) {
10559 if (audioTrack) {
10560 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
10561 }
10562 };
10563
10564 this.setRemux = function (val) {
10565 var pipeline = this.transmuxPipeline_;
10566 options.remux = val;
10567
10568 if (pipeline && pipeline.coalesceStream) {
10569 pipeline.coalesceStream.setRemux(val);
10570 }
10571 };
10572
10573 this.alignGopsWith = function (gopsToAlignWith) {
10574 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
10575 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
10576 }
10577 };
10578
10579 this.getLogTrigger_ = function (key) {
10580 var self = this;
10581 return function (event) {
10582 event.stream = key;
10583 self.trigger('log', event);
10584 };
10585 }; // feed incoming data to the front of the parsing pipeline
10586
10587
10588 this.push = function (data) {
10589 if (hasFlushed) {
10590 var isAac = isLikelyAacData(data);
10591
10592 if (isAac && this.transmuxPipeline_.type !== 'aac') {
10593 this.setupAacPipeline();
10594 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
10595 this.setupTsPipeline();
10596 }
10597
10598 hasFlushed = false;
10599 }
10600
10601 this.transmuxPipeline_.headOfPipeline.push(data);
10602 }; // flush any buffered data
10603
10604
10605 this.flush = function () {
10606 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
10607
10608 this.transmuxPipeline_.headOfPipeline.flush();
10609 };
10610
10611 this.endTimeline = function () {
10612 this.transmuxPipeline_.headOfPipeline.endTimeline();
10613 };
10614
10615 this.reset = function () {
10616 if (this.transmuxPipeline_.headOfPipeline) {
10617 this.transmuxPipeline_.headOfPipeline.reset();
10618 }
10619 }; // Caption data has to be reset when seeking outside buffered range
10620
10621
10622 this.resetCaptions = function () {
10623 if (this.transmuxPipeline_.captionStream) {
10624 this.transmuxPipeline_.captionStream.reset();
10625 }
10626 };
10627 };
10628
10629 _Transmuxer.prototype = new stream();
10630 var transmuxer = {
10631 Transmuxer: _Transmuxer,
10632 VideoSegmentStream: _VideoSegmentStream,
10633 AudioSegmentStream: _AudioSegmentStream,
10634 AUDIO_PROPERTIES: audioProperties,
10635 VIDEO_PROPERTIES: videoProperties,
10636 // exported for testing
10637 generateSegmentTimingInfo: generateSegmentTimingInfo
10638 };
10639 /**
10640 * mux.js
10641 *
10642 * Copyright (c) Brightcove
10643 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10644 */
10645
10646 var toUnsigned$3 = function toUnsigned(value) {
10647 return value >>> 0;
10648 };
10649
10650 var toHexString$1 = function toHexString(value) {
10651 return ('00' + value.toString(16)).slice(-2);
10652 };
10653
10654 var bin = {
10655 toUnsigned: toUnsigned$3,
10656 toHexString: toHexString$1
10657 };
10658
10659 var parseType$1 = function parseType(buffer) {
10660 var result = '';
10661 result += String.fromCharCode(buffer[0]);
10662 result += String.fromCharCode(buffer[1]);
10663 result += String.fromCharCode(buffer[2]);
10664 result += String.fromCharCode(buffer[3]);
10665 return result;
10666 };
10667
10668 var parseType_1 = parseType$1;
10669 var toUnsigned$2 = bin.toUnsigned;
10670
10671 var findBox = function findBox(data, path) {
10672 var results = [],
10673 i,
10674 size,
10675 type,
10676 end,
10677 subresults;
10678
10679 if (!path.length) {
10680 // short-circuit the search for empty paths
10681 return null;
10682 }
10683
10684 for (i = 0; i < data.byteLength;) {
10685 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10686 type = parseType_1(data.subarray(i + 4, i + 8));
10687 end = size > 1 ? i + size : data.byteLength;
10688
10689 if (type === path[0]) {
10690 if (path.length === 1) {
10691 // this is the end of the path and we've found the box we were
10692 // looking for
10693 results.push(data.subarray(i + 8, end));
10694 } else {
10695 // recursively search for the next box along the path
10696 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10697
10698 if (subresults.length) {
10699 results = results.concat(subresults);
10700 }
10701 }
10702 }
10703
10704 i = end;
10705 } // we've finished searching all of data
10706
10707
10708 return results;
10709 };
10710
10711 var findBox_1 = findBox;
10712 var toUnsigned$1 = bin.toUnsigned;
10713 var getUint64$1 = numbers.getUint64;
10714
10715 var tfdt = function tfdt(data) {
10716 var result = {
10717 version: data[0],
10718 flags: new Uint8Array(data.subarray(1, 4))
10719 };
10720
10721 if (result.version === 1) {
10722 result.baseMediaDecodeTime = getUint64$1(data.subarray(4));
10723 } else {
10724 result.baseMediaDecodeTime = toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]);
10725 }
10726
10727 return result;
10728 };
10729
10730 var parseTfdt = tfdt;
10731
10732 var parseSampleFlags = function parseSampleFlags(flags) {
10733 return {
10734 isLeading: (flags[0] & 0x0c) >>> 2,
10735 dependsOn: flags[0] & 0x03,
10736 isDependedOn: (flags[1] & 0xc0) >>> 6,
10737 hasRedundancy: (flags[1] & 0x30) >>> 4,
10738 paddingValue: (flags[1] & 0x0e) >>> 1,
10739 isNonSyncSample: flags[1] & 0x01,
10740 degradationPriority: flags[2] << 8 | flags[3]
10741 };
10742 };
10743
10744 var parseSampleFlags_1 = parseSampleFlags;
10745
10746 var trun = function trun(data) {
10747 var result = {
10748 version: data[0],
10749 flags: new Uint8Array(data.subarray(1, 4)),
10750 samples: []
10751 },
10752 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10753 // Flag interpretation
10754 dataOffsetPresent = result.flags[2] & 0x01,
10755 // compare with 2nd byte of 0x1
10756 firstSampleFlagsPresent = result.flags[2] & 0x04,
10757 // compare with 2nd byte of 0x4
10758 sampleDurationPresent = result.flags[1] & 0x01,
10759 // compare with 2nd byte of 0x100
10760 sampleSizePresent = result.flags[1] & 0x02,
10761 // compare with 2nd byte of 0x200
10762 sampleFlagsPresent = result.flags[1] & 0x04,
10763 // compare with 2nd byte of 0x400
10764 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10765 // compare with 2nd byte of 0x800
10766 sampleCount = view.getUint32(4),
10767 offset = 8,
10768 sample;
10769
10770 if (dataOffsetPresent) {
10771 // 32 bit signed integer
10772 result.dataOffset = view.getInt32(offset);
10773 offset += 4;
10774 } // Overrides the flags for the first sample only. The order of
10775 // optional values will be: duration, size, compositionTimeOffset
10776
10777
10778 if (firstSampleFlagsPresent && sampleCount) {
10779 sample = {
10780 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10781 };
10782 offset += 4;
10783
10784 if (sampleDurationPresent) {
10785 sample.duration = view.getUint32(offset);
10786 offset += 4;
10787 }
10788
10789 if (sampleSizePresent) {
10790 sample.size = view.getUint32(offset);
10791 offset += 4;
10792 }
10793
10794 if (sampleCompositionTimeOffsetPresent) {
10795 if (result.version === 1) {
10796 sample.compositionTimeOffset = view.getInt32(offset);
10797 } else {
10798 sample.compositionTimeOffset = view.getUint32(offset);
10799 }
10800
10801 offset += 4;
10802 }
10803
10804 result.samples.push(sample);
10805 sampleCount--;
10806 }
10807
10808 while (sampleCount--) {
10809 sample = {};
10810
10811 if (sampleDurationPresent) {
10812 sample.duration = view.getUint32(offset);
10813 offset += 4;
10814 }
10815
10816 if (sampleSizePresent) {
10817 sample.size = view.getUint32(offset);
10818 offset += 4;
10819 }
10820
10821 if (sampleFlagsPresent) {
10822 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10823 offset += 4;
10824 }
10825
10826 if (sampleCompositionTimeOffsetPresent) {
10827 if (result.version === 1) {
10828 sample.compositionTimeOffset = view.getInt32(offset);
10829 } else {
10830 sample.compositionTimeOffset = view.getUint32(offset);
10831 }
10832
10833 offset += 4;
10834 }
10835
10836 result.samples.push(sample);
10837 }
10838
10839 return result;
10840 };
10841
10842 var parseTrun = trun;
10843
10844 var tfhd = function tfhd(data) {
10845 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10846 result = {
10847 version: data[0],
10848 flags: new Uint8Array(data.subarray(1, 4)),
10849 trackId: view.getUint32(4)
10850 },
10851 baseDataOffsetPresent = result.flags[2] & 0x01,
10852 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10853 defaultSampleDurationPresent = result.flags[2] & 0x08,
10854 defaultSampleSizePresent = result.flags[2] & 0x10,
10855 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10856 durationIsEmpty = result.flags[0] & 0x010000,
10857 defaultBaseIsMoof = result.flags[0] & 0x020000,
10858 i;
10859 i = 8;
10860
10861 if (baseDataOffsetPresent) {
10862 i += 4; // truncate top 4 bytes
10863 // FIXME: should we read the full 64 bits?
10864
10865 result.baseDataOffset = view.getUint32(12);
10866 i += 4;
10867 }
10868
10869 if (sampleDescriptionIndexPresent) {
10870 result.sampleDescriptionIndex = view.getUint32(i);
10871 i += 4;
10872 }
10873
10874 if (defaultSampleDurationPresent) {
10875 result.defaultSampleDuration = view.getUint32(i);
10876 i += 4;
10877 }
10878
10879 if (defaultSampleSizePresent) {
10880 result.defaultSampleSize = view.getUint32(i);
10881 i += 4;
10882 }
10883
10884 if (defaultSampleFlagsPresent) {
10885 result.defaultSampleFlags = view.getUint32(i);
10886 }
10887
10888 if (durationIsEmpty) {
10889 result.durationIsEmpty = true;
10890 }
10891
10892 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10893 result.baseDataOffsetIsMoof = true;
10894 }
10895
10896 return result;
10897 };
10898
10899 var parseTfhd = tfhd;
10900 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
10901 var win;
10902
10903 if (typeof window !== "undefined") {
10904 win = window;
10905 } else if (typeof commonjsGlobal !== "undefined") {
10906 win = commonjsGlobal;
10907 } else if (typeof self !== "undefined") {
10908 win = self;
10909 } else {
10910 win = {};
10911 }
10912
10913 var window_1 = win;
10914 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
10915 var CaptionStream = captionStream.CaptionStream;
10916 /**
10917 * Maps an offset in the mdat to a sample based on the the size of the samples.
10918 * Assumes that `parseSamples` has been called first.
10919 *
10920 * @param {Number} offset - The offset into the mdat
10921 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10922 * @return {?Object} The matching sample, or null if no match was found.
10923 *
10924 * @see ISO-BMFF-12/2015, Section 8.8.8
10925 **/
10926
10927 var mapToSample = function mapToSample(offset, samples) {
10928 var approximateOffset = offset;
10929
10930 for (var i = 0; i < samples.length; i++) {
10931 var sample = samples[i];
10932
10933 if (approximateOffset < sample.size) {
10934 return sample;
10935 }
10936
10937 approximateOffset -= sample.size;
10938 }
10939
10940 return null;
10941 };
10942 /**
10943 * Finds SEI nal units contained in a Media Data Box.
10944 * Assumes that `parseSamples` has been called first.
10945 *
10946 * @param {Uint8Array} avcStream - The bytes of the mdat
10947 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10948 * @param {Number} trackId - The trackId of this video track
10949 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10950 * The contents of the seiNal should match what is expected by
10951 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10952 *
10953 * @see ISO-BMFF-12/2015, Section 8.1.1
10954 * @see Rec. ITU-T H.264, 7.3.2.3.1
10955 **/
10956
10957
10958 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10959 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10960 result = {
10961 logs: [],
10962 seiNals: []
10963 },
10964 seiNal,
10965 i,
10966 length,
10967 lastMatchedSample;
10968
10969 for (i = 0; i + 4 < avcStream.length; i += length) {
10970 length = avcView.getUint32(i);
10971 i += 4; // Bail if this doesn't appear to be an H264 stream
10972
10973 if (length <= 0) {
10974 continue;
10975 }
10976
10977 switch (avcStream[i] & 0x1F) {
10978 case 0x06:
10979 var data = avcStream.subarray(i + 1, i + 1 + length);
10980 var matchingSample = mapToSample(i, samples);
10981 seiNal = {
10982 nalUnitType: 'sei_rbsp',
10983 size: length,
10984 data: data,
10985 escapedRBSP: discardEmulationPreventionBytes(data),
10986 trackId: trackId
10987 };
10988
10989 if (matchingSample) {
10990 seiNal.pts = matchingSample.pts;
10991 seiNal.dts = matchingSample.dts;
10992 lastMatchedSample = matchingSample;
10993 } else if (lastMatchedSample) {
10994 // If a matching sample cannot be found, use the last
10995 // sample's values as they should be as close as possible
10996 seiNal.pts = lastMatchedSample.pts;
10997 seiNal.dts = lastMatchedSample.dts;
10998 } else {
10999 result.logs.push({
11000 level: 'warn',
11001 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
11002 });
11003 break;
11004 }
11005
11006 result.seiNals.push(seiNal);
11007 break;
11008 }
11009 }
11010
11011 return result;
11012 };
11013 /**
11014 * Parses sample information out of Track Run Boxes and calculates
11015 * the absolute presentation and decode timestamps of each sample.
11016 *
11017 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
11018 * @param {Number|BigInt} baseMediaDecodeTime - base media decode time from tfdt
11019 @see ISO-BMFF-12/2015, Section 8.8.12
11020 * @param {Object} tfhd - The parsed Track Fragment Header
11021 * @see inspect.parseTfhd
11022 * @return {Object[]} the parsed samples
11023 *
11024 * @see ISO-BMFF-12/2015, Section 8.8.8
11025 **/
11026
11027
11028 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
11029 var currentDts = baseMediaDecodeTime;
11030 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
11031 var defaultSampleSize = tfhd.defaultSampleSize || 0;
11032 var trackId = tfhd.trackId;
11033 var allSamples = [];
11034 truns.forEach(function (trun) {
11035 // Note: We currently do not parse the sample table as well
11036 // as the trun. It's possible some sources will require this.
11037 // moov > trak > mdia > minf > stbl
11038 var trackRun = parseTrun(trun);
11039 var samples = trackRun.samples;
11040 samples.forEach(function (sample) {
11041 if (sample.duration === undefined) {
11042 sample.duration = defaultSampleDuration;
11043 }
11044
11045 if (sample.size === undefined) {
11046 sample.size = defaultSampleSize;
11047 }
11048
11049 sample.trackId = trackId;
11050 sample.dts = currentDts;
11051
11052 if (sample.compositionTimeOffset === undefined) {
11053 sample.compositionTimeOffset = 0;
11054 }
11055
11056 if (typeof currentDts === 'bigint') {
11057 sample.pts = currentDts + window_1.BigInt(sample.compositionTimeOffset);
11058 currentDts += window_1.BigInt(sample.duration);
11059 } else {
11060 sample.pts = currentDts + sample.compositionTimeOffset;
11061 currentDts += sample.duration;
11062 }
11063 });
11064 allSamples = allSamples.concat(samples);
11065 });
11066 return allSamples;
11067 };
11068 /**
11069 * Parses out caption nals from an FMP4 segment's video tracks.
11070 *
11071 * @param {Uint8Array} segment - The bytes of a single segment
11072 * @param {Number} videoTrackId - The trackId of a video track in the segment
11073 * @return {Object.<Number, Object[]>} A mapping of video trackId to
11074 * a list of seiNals found in that track
11075 **/
11076
11077
11078 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
11079 // To get the samples
11080 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
11081
11082 var mdats = findBox_1(segment, ['mdat']);
11083 var captionNals = {};
11084 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
11085
11086 mdats.forEach(function (mdat, index) {
11087 var matchingTraf = trafs[index];
11088 mdatTrafPairs.push({
11089 mdat: mdat,
11090 traf: matchingTraf
11091 });
11092 });
11093 mdatTrafPairs.forEach(function (pair) {
11094 var mdat = pair.mdat;
11095 var traf = pair.traf;
11096 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
11097
11098 var headerInfo = parseTfhd(tfhd[0]);
11099 var trackId = headerInfo.trackId;
11100 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
11101
11102 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
11103 var truns = findBox_1(traf, ['trun']);
11104 var samples;
11105 var result; // Only parse video data for the chosen video track
11106
11107 if (videoTrackId === trackId && truns.length > 0) {
11108 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
11109 result = findSeiNals(mdat, samples, trackId);
11110
11111 if (!captionNals[trackId]) {
11112 captionNals[trackId] = {
11113 seiNals: [],
11114 logs: []
11115 };
11116 }
11117
11118 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
11119 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
11120 }
11121 });
11122 return captionNals;
11123 };
11124 /**
11125 * Parses out inband captions from an MP4 container and returns
11126 * caption objects that can be used by WebVTT and the TextTrack API.
11127 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
11128 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
11129 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
11130 *
11131 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11132 * @param {Number} trackId - The id of the video track to parse
11133 * @param {Number} timescale - The timescale for the video track from the init segment
11134 *
11135 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
11136 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
11137 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
11138 * @return {String} parsedCaptions[].text - The visible content of the caption
11139 **/
11140
11141
11142 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
11143 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11144
11145 if (trackId === null) {
11146 return null;
11147 }
11148
11149 captionNals = parseCaptionNals(segment, trackId);
11150 var trackNals = captionNals[trackId] || {};
11151 return {
11152 seiNals: trackNals.seiNals,
11153 logs: trackNals.logs,
11154 timescale: timescale
11155 };
11156 };
11157 /**
11158 * Converts SEI NALUs into captions that can be used by video.js
11159 **/
11160
11161
11162 var CaptionParser = function CaptionParser() {
11163 var isInitialized = false;
11164 var captionStream; // Stores segments seen before trackId and timescale are set
11165
11166 var segmentCache; // Stores video track ID of the track being parsed
11167
11168 var trackId; // Stores the timescale of the track being parsed
11169
11170 var timescale; // Stores captions parsed so far
11171
11172 var parsedCaptions; // Stores whether we are receiving partial data or not
11173
11174 var parsingPartial;
11175 /**
11176 * A method to indicate whether a CaptionParser has been initalized
11177 * @returns {Boolean}
11178 **/
11179
11180 this.isInitialized = function () {
11181 return isInitialized;
11182 };
11183 /**
11184 * Initializes the underlying CaptionStream, SEI NAL parsing
11185 * and management, and caption collection
11186 **/
11187
11188
11189 this.init = function (options) {
11190 captionStream = new CaptionStream();
11191 isInitialized = true;
11192 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
11193
11194 captionStream.on('data', function (event) {
11195 // Convert to seconds in the source's timescale
11196 event.startTime = event.startPts / timescale;
11197 event.endTime = event.endPts / timescale;
11198 parsedCaptions.captions.push(event);
11199 parsedCaptions.captionStreams[event.stream] = true;
11200 });
11201 captionStream.on('log', function (log) {
11202 parsedCaptions.logs.push(log);
11203 });
11204 };
11205 /**
11206 * Determines if a new video track will be selected
11207 * or if the timescale changed
11208 * @return {Boolean}
11209 **/
11210
11211
11212 this.isNewInit = function (videoTrackIds, timescales) {
11213 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
11214 return false;
11215 }
11216
11217 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
11218 };
11219 /**
11220 * Parses out SEI captions and interacts with underlying
11221 * CaptionStream to return dispatched captions
11222 *
11223 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11224 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
11225 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
11226 * @see parseEmbeddedCaptions
11227 * @see m2ts/caption-stream.js
11228 **/
11229
11230
11231 this.parse = function (segment, videoTrackIds, timescales) {
11232 var parsedData;
11233
11234 if (!this.isInitialized()) {
11235 return null; // This is not likely to be a video segment
11236 } else if (!videoTrackIds || !timescales) {
11237 return null;
11238 } else if (this.isNewInit(videoTrackIds, timescales)) {
11239 // Use the first video track only as there is no
11240 // mechanism to switch to other video tracks
11241 trackId = videoTrackIds[0];
11242 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
11243 // data until we have one.
11244 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11245 } else if (trackId === null || !timescale) {
11246 segmentCache.push(segment);
11247 return null;
11248 } // Now that a timescale and trackId is set, parse cached segments
11249
11250
11251 while (segmentCache.length > 0) {
11252 var cachedSegment = segmentCache.shift();
11253 this.parse(cachedSegment, videoTrackIds, timescales);
11254 }
11255
11256 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
11257
11258 if (parsedData && parsedData.logs) {
11259 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
11260 }
11261
11262 if (parsedData === null || !parsedData.seiNals) {
11263 if (parsedCaptions.logs.length) {
11264 return {
11265 logs: parsedCaptions.logs,
11266 captions: [],
11267 captionStreams: []
11268 };
11269 }
11270
11271 return null;
11272 }
11273
11274 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
11275
11276 this.flushStream();
11277 return parsedCaptions;
11278 };
11279 /**
11280 * Pushes SEI NALUs onto CaptionStream
11281 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
11282 * Assumes that `parseCaptionNals` has been called first
11283 * @see m2ts/caption-stream.js
11284 **/
11285
11286
11287 this.pushNals = function (nals) {
11288 if (!this.isInitialized() || !nals || nals.length === 0) {
11289 return null;
11290 }
11291
11292 nals.forEach(function (nal) {
11293 captionStream.push(nal);
11294 });
11295 };
11296 /**
11297 * Flushes underlying CaptionStream to dispatch processed, displayable captions
11298 * @see m2ts/caption-stream.js
11299 **/
11300
11301
11302 this.flushStream = function () {
11303 if (!this.isInitialized()) {
11304 return null;
11305 }
11306
11307 if (!parsingPartial) {
11308 captionStream.flush();
11309 } else {
11310 captionStream.partialFlush();
11311 }
11312 };
11313 /**
11314 * Reset caption buckets for new data
11315 **/
11316
11317
11318 this.clearParsedCaptions = function () {
11319 parsedCaptions.captions = [];
11320 parsedCaptions.captionStreams = {};
11321 parsedCaptions.logs = [];
11322 };
11323 /**
11324 * Resets underlying CaptionStream
11325 * @see m2ts/caption-stream.js
11326 **/
11327
11328
11329 this.resetCaptionStream = function () {
11330 if (!this.isInitialized()) {
11331 return null;
11332 }
11333
11334 captionStream.reset();
11335 };
11336 /**
11337 * Convenience method to clear all captions flushed from the
11338 * CaptionStream and still being parsed
11339 * @see m2ts/caption-stream.js
11340 **/
11341
11342
11343 this.clearAllCaptions = function () {
11344 this.clearParsedCaptions();
11345 this.resetCaptionStream();
11346 };
11347 /**
11348 * Reset caption parser
11349 **/
11350
11351
11352 this.reset = function () {
11353 segmentCache = [];
11354 trackId = null;
11355 timescale = null;
11356
11357 if (!parsedCaptions) {
11358 parsedCaptions = {
11359 captions: [],
11360 // CC1, CC2, CC3, CC4
11361 captionStreams: {},
11362 logs: []
11363 };
11364 } else {
11365 this.clearParsedCaptions();
11366 }
11367
11368 this.resetCaptionStream();
11369 };
11370
11371 this.reset();
11372 };
11373
11374 var captionParser = CaptionParser;
11375 var toUnsigned = bin.toUnsigned;
11376 var toHexString = bin.toHexString;
11377 var getUint64 = numbers.getUint64;
11378 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
11379 /**
11380 * Parses an MP4 initialization segment and extracts the timescale
11381 * values for any declared tracks. Timescale values indicate the
11382 * number of clock ticks per second to assume for time-based values
11383 * elsewhere in the MP4.
11384 *
11385 * To determine the start time of an MP4, you need two pieces of
11386 * information: the timescale unit and the earliest base media decode
11387 * time. Multiple timescales can be specified within an MP4 but the
11388 * base media decode time is always expressed in the timescale from
11389 * the media header box for the track:
11390 * ```
11391 * moov > trak > mdia > mdhd.timescale
11392 * ```
11393 * @param init {Uint8Array} the bytes of the init segment
11394 * @return {object} a hash of track ids to timescale values or null if
11395 * the init segment is malformed.
11396 */
11397
11398 timescale = function timescale(init) {
11399 var result = {},
11400 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
11401
11402 return traks.reduce(function (result, trak) {
11403 var tkhd, version, index, id, mdhd;
11404 tkhd = findBox_1(trak, ['tkhd'])[0];
11405
11406 if (!tkhd) {
11407 return null;
11408 }
11409
11410 version = tkhd[0];
11411 index = version === 0 ? 12 : 20;
11412 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
11413 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11414
11415 if (!mdhd) {
11416 return null;
11417 }
11418
11419 version = mdhd[0];
11420 index = version === 0 ? 12 : 20;
11421 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11422 return result;
11423 }, result);
11424 };
11425 /**
11426 * Determine the base media decode start time, in seconds, for an MP4
11427 * fragment. If multiple fragments are specified, the earliest time is
11428 * returned.
11429 *
11430 * The base media decode time can be parsed from track fragment
11431 * metadata:
11432 * ```
11433 * moof > traf > tfdt.baseMediaDecodeTime
11434 * ```
11435 * It requires the timescale value from the mdhd to interpret.
11436 *
11437 * @param timescale {object} a hash of track ids to timescale values.
11438 * @return {number} the earliest base media decode start time for the
11439 * fragment, in seconds
11440 */
11441
11442
11443 startTime = function startTime(timescale, fragment) {
11444 var trafs; // we need info from two childrend of each track fragment box
11445
11446 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
11447
11448 var lowestTime = trafs.reduce(function (acc, traf) {
11449 var tfhd = findBox_1(traf, ['tfhd'])[0]; // get the track id from the tfhd
11450
11451 var id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
11452
11453 var scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
11454
11455 var tfdt = findBox_1(traf, ['tfdt'])[0];
11456 var dv = new DataView(tfdt.buffer, tfdt.byteOffset, tfdt.byteLength);
11457 var baseTime; // version 1 is 64 bit
11458
11459 if (tfdt[0] === 1) {
11460 baseTime = getUint64(tfdt.subarray(4, 12));
11461 } else {
11462 baseTime = dv.getUint32(4);
11463 } // convert base time to seconds if it is a valid number.
11464
11465
11466 var seconds;
11467
11468 if (typeof baseTime === 'bigint') {
11469 seconds = baseTime / window_1.BigInt(scale);
11470 } else if (typeof baseTime === 'number' && !isNaN(baseTime)) {
11471 seconds = baseTime / scale;
11472 }
11473
11474 if (seconds < Number.MAX_SAFE_INTEGER) {
11475 seconds = Number(seconds);
11476 }
11477
11478 if (seconds < acc) {
11479 acc = seconds;
11480 }
11481
11482 return acc;
11483 }, Infinity);
11484 return typeof lowestTime === 'bigint' || isFinite(lowestTime) ? lowestTime : 0;
11485 };
11486 /**
11487 * Determine the composition start, in seconds, for an MP4
11488 * fragment.
11489 *
11490 * The composition start time of a fragment can be calculated using the base
11491 * media decode time, composition time offset, and timescale, as follows:
11492 *
11493 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
11494 *
11495 * All of the aforementioned information is contained within a media fragment's
11496 * `traf` box, except for timescale info, which comes from the initialization
11497 * segment, so a track id (also contained within a `traf`) is also necessary to
11498 * associate it with a timescale
11499 *
11500 *
11501 * @param timescales {object} - a hash of track ids to timescale values.
11502 * @param fragment {Unit8Array} - the bytes of a media segment
11503 * @return {number} the composition start time for the fragment, in seconds
11504 **/
11505
11506
11507 compositionStartTime = function compositionStartTime(timescales, fragment) {
11508 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
11509 var baseMediaDecodeTime = 0;
11510 var compositionTimeOffset = 0;
11511 var trackId;
11512
11513 if (trafBoxes && trafBoxes.length) {
11514 // The spec states that track run samples contained within a `traf` box are contiguous, but
11515 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
11516 // We will assume that they are, so we only need the first to calculate start time.
11517 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
11518 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
11519 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
11520
11521 if (tfhd) {
11522 var parsedTfhd = parseTfhd(tfhd);
11523 trackId = parsedTfhd.trackId;
11524 }
11525
11526 if (tfdt) {
11527 var parsedTfdt = parseTfdt(tfdt);
11528 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
11529 }
11530
11531 if (trun) {
11532 var parsedTrun = parseTrun(trun);
11533
11534 if (parsedTrun.samples && parsedTrun.samples.length) {
11535 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
11536 }
11537 }
11538 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
11539 // specified.
11540
11541
11542 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
11543
11544 if (typeof baseMediaDecodeTime === 'bigint') {
11545 compositionTimeOffset = window_1.BigInt(compositionTimeOffset);
11546 timescale = window_1.BigInt(timescale);
11547 }
11548
11549 var result = (baseMediaDecodeTime + compositionTimeOffset) / timescale;
11550
11551 if (typeof result === 'bigint' && result < Number.MAX_SAFE_INTEGER) {
11552 result = Number(result);
11553 }
11554
11555 return result;
11556 };
11557 /**
11558 * Find the trackIds of the video tracks in this source.
11559 * Found by parsing the Handler Reference and Track Header Boxes:
11560 * moov > trak > mdia > hdlr
11561 * moov > trak > tkhd
11562 *
11563 * @param {Uint8Array} init - The bytes of the init segment for this source
11564 * @return {Number[]} A list of trackIds
11565 *
11566 * @see ISO-BMFF-12/2015, Section 8.4.3
11567 **/
11568
11569
11570 getVideoTrackIds = function getVideoTrackIds(init) {
11571 var traks = findBox_1(init, ['moov', 'trak']);
11572 var videoTrackIds = [];
11573 traks.forEach(function (trak) {
11574 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
11575 var tkhds = findBox_1(trak, ['tkhd']);
11576 hdlrs.forEach(function (hdlr, index) {
11577 var handlerType = parseType_1(hdlr.subarray(8, 12));
11578 var tkhd = tkhds[index];
11579 var view;
11580 var version;
11581 var trackId;
11582
11583 if (handlerType === 'vide') {
11584 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11585 version = view.getUint8(0);
11586 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
11587 videoTrackIds.push(trackId);
11588 }
11589 });
11590 });
11591 return videoTrackIds;
11592 };
11593
11594 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
11595 // mdhd is a FullBox, meaning it will have its own version as the first byte
11596 var version = mdhd[0];
11597 var index = version === 0 ? 12 : 20;
11598 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11599 };
11600 /**
11601 * Get all the video, audio, and hint tracks from a non fragmented
11602 * mp4 segment
11603 */
11604
11605
11606 getTracks = function getTracks(init) {
11607 var traks = findBox_1(init, ['moov', 'trak']);
11608 var tracks = [];
11609 traks.forEach(function (trak) {
11610 var track = {};
11611 var tkhd = findBox_1(trak, ['tkhd'])[0];
11612 var view, tkhdVersion; // id
11613
11614 if (tkhd) {
11615 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11616 tkhdVersion = view.getUint8(0);
11617 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
11618 }
11619
11620 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
11621
11622 if (hdlr) {
11623 var type = parseType_1(hdlr.subarray(8, 12));
11624
11625 if (type === 'vide') {
11626 track.type = 'video';
11627 } else if (type === 'soun') {
11628 track.type = 'audio';
11629 } else {
11630 track.type = type;
11631 }
11632 } // codec
11633
11634
11635 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
11636
11637 if (stsd) {
11638 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
11639
11640 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
11641 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
11642 var codecConfig, codecConfigType;
11643
11644 if (codecBox) {
11645 // https://tools.ietf.org/html/rfc6381#section-3.3
11646 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
11647 // we don't need anything but the "config" parameter of the
11648 // avc1 codecBox
11649 codecConfig = codecBox.subarray(78);
11650 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11651
11652 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
11653 track.codec += '.'; // left padded with zeroes for single digit hex
11654 // profile idc
11655
11656 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
11657
11658 track.codec += toHexString(codecConfig[10]); // level idc
11659
11660 track.codec += toHexString(codecConfig[11]);
11661 } else {
11662 // TODO: show a warning that we couldn't parse the codec
11663 // and are using the default
11664 track.codec = 'avc1.4d400d';
11665 }
11666 } else if (/^mp4[a,v]$/i.test(track.codec)) {
11667 // we do not need anything but the streamDescriptor of the mp4a codecBox
11668 codecConfig = codecBox.subarray(28);
11669 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11670
11671 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
11672 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
11673
11674 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
11675 } else {
11676 // TODO: show a warning that we couldn't parse the codec
11677 // and are using the default
11678 track.codec = 'mp4a.40.2';
11679 }
11680 } else {
11681 // flac, opus, etc
11682 track.codec = track.codec.toLowerCase();
11683 }
11684 }
11685 }
11686
11687 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11688
11689 if (mdhd) {
11690 track.timescale = getTimescaleFromMediaHeader(mdhd);
11691 }
11692
11693 tracks.push(track);
11694 });
11695 return tracks;
11696 };
11697
11698 var probe$2 = {
11699 // export mp4 inspector's findBox and parseType for backwards compatibility
11700 findBox: findBox_1,
11701 parseType: parseType_1,
11702 timescale: timescale,
11703 startTime: startTime,
11704 compositionStartTime: compositionStartTime,
11705 videoTrackIds: getVideoTrackIds,
11706 tracks: getTracks,
11707 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
11708 };
11709
11710 var parsePid = function parsePid(packet) {
11711 var pid = packet[1] & 0x1f;
11712 pid <<= 8;
11713 pid |= packet[2];
11714 return pid;
11715 };
11716
11717 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
11718 return !!(packet[1] & 0x40);
11719 };
11720
11721 var parseAdaptionField = function parseAdaptionField(packet) {
11722 var offset = 0; // if an adaption field is present, its length is specified by the
11723 // fifth byte of the TS packet header. The adaptation field is
11724 // used to add stuffing to PES packets that don't fill a complete
11725 // TS packet, and to specify some forms of timing and control data
11726 // that we do not currently use.
11727
11728 if ((packet[3] & 0x30) >>> 4 > 0x01) {
11729 offset += packet[4] + 1;
11730 }
11731
11732 return offset;
11733 };
11734
11735 var parseType = function parseType(packet, pmtPid) {
11736 var pid = parsePid(packet);
11737
11738 if (pid === 0) {
11739 return 'pat';
11740 } else if (pid === pmtPid) {
11741 return 'pmt';
11742 } else if (pmtPid) {
11743 return 'pes';
11744 }
11745
11746 return null;
11747 };
11748
11749 var parsePat = function parsePat(packet) {
11750 var pusi = parsePayloadUnitStartIndicator(packet);
11751 var offset = 4 + parseAdaptionField(packet);
11752
11753 if (pusi) {
11754 offset += packet[offset] + 1;
11755 }
11756
11757 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
11758 };
11759
11760 var parsePmt = function parsePmt(packet) {
11761 var programMapTable = {};
11762 var pusi = parsePayloadUnitStartIndicator(packet);
11763 var payloadOffset = 4 + parseAdaptionField(packet);
11764
11765 if (pusi) {
11766 payloadOffset += packet[payloadOffset] + 1;
11767 } // PMTs can be sent ahead of the time when they should actually
11768 // take effect. We don't believe this should ever be the case
11769 // for HLS but we'll ignore "forward" PMT declarations if we see
11770 // them. Future PMT declarations have the current_next_indicator
11771 // set to zero.
11772
11773
11774 if (!(packet[payloadOffset + 5] & 0x01)) {
11775 return;
11776 }
11777
11778 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
11779
11780 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
11781 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
11782 // long the program info descriptors are
11783
11784 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
11785
11786 var offset = 12 + programInfoLength;
11787
11788 while (offset < tableEnd) {
11789 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
11790
11791 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
11792 // skip past the elementary stream descriptors, if present
11793
11794 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
11795 }
11796
11797 return programMapTable;
11798 };
11799
11800 var parsePesType = function parsePesType(packet, programMapTable) {
11801 var pid = parsePid(packet);
11802 var type = programMapTable[pid];
11803
11804 switch (type) {
11805 case streamTypes.H264_STREAM_TYPE:
11806 return 'video';
11807
11808 case streamTypes.ADTS_STREAM_TYPE:
11809 return 'audio';
11810
11811 case streamTypes.METADATA_STREAM_TYPE:
11812 return 'timed-metadata';
11813
11814 default:
11815 return null;
11816 }
11817 };
11818
11819 var parsePesTime = function parsePesTime(packet) {
11820 var pusi = parsePayloadUnitStartIndicator(packet);
11821
11822 if (!pusi) {
11823 return null;
11824 }
11825
11826 var offset = 4 + parseAdaptionField(packet);
11827
11828 if (offset >= packet.byteLength) {
11829 // From the H 222.0 MPEG-TS spec
11830 // "For transport stream packets carrying PES packets, stuffing is needed when there
11831 // is insufficient PES packet data to completely fill the transport stream packet
11832 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
11833 // the sum of the lengths of the data elements in it, so that the payload bytes
11834 // remaining after the adaptation field exactly accommodates the available PES packet
11835 // data."
11836 //
11837 // If the offset is >= the length of the packet, then the packet contains no data
11838 // and instead is just adaption field stuffing bytes
11839 return null;
11840 }
11841
11842 var pes = null;
11843 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
11844 // and a DTS value. Determine what combination of values is
11845 // available to work with.
11846
11847 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
11848 // performs all bitwise operations on 32-bit integers but javascript
11849 // supports a much greater range (52-bits) of integer using standard
11850 // mathematical operations.
11851 // We construct a 31-bit value using bitwise operators over the 31
11852 // most significant bits and then multiply by 4 (equal to a left-shift
11853 // of 2) before we add the final 2 least significant bits of the
11854 // timestamp (equal to an OR.)
11855
11856 if (ptsDtsFlags & 0xC0) {
11857 pes = {}; // the PTS and DTS are not written out directly. For information
11858 // on how they are encoded, see
11859 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
11860
11861 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
11862 pes.pts *= 4; // Left shift by 2
11863
11864 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
11865
11866 pes.dts = pes.pts;
11867
11868 if (ptsDtsFlags & 0x40) {
11869 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
11870 pes.dts *= 4; // Left shift by 2
11871
11872 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
11873 }
11874 }
11875
11876 return pes;
11877 };
11878
11879 var parseNalUnitType = function parseNalUnitType(type) {
11880 switch (type) {
11881 case 0x05:
11882 return 'slice_layer_without_partitioning_rbsp_idr';
11883
11884 case 0x06:
11885 return 'sei_rbsp';
11886
11887 case 0x07:
11888 return 'seq_parameter_set_rbsp';
11889
11890 case 0x08:
11891 return 'pic_parameter_set_rbsp';
11892
11893 case 0x09:
11894 return 'access_unit_delimiter_rbsp';
11895
11896 default:
11897 return null;
11898 }
11899 };
11900
11901 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
11902 var offset = 4 + parseAdaptionField(packet);
11903 var frameBuffer = packet.subarray(offset);
11904 var frameI = 0;
11905 var frameSyncPoint = 0;
11906 var foundKeyFrame = false;
11907 var nalType; // advance the sync point to a NAL start, if necessary
11908
11909 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
11910 if (frameBuffer[frameSyncPoint + 2] === 1) {
11911 // the sync point is properly aligned
11912 frameI = frameSyncPoint + 5;
11913 break;
11914 }
11915 }
11916
11917 while (frameI < frameBuffer.byteLength) {
11918 // look at the current byte to determine if we've hit the end of
11919 // a NAL unit boundary
11920 switch (frameBuffer[frameI]) {
11921 case 0:
11922 // skip past non-sync sequences
11923 if (frameBuffer[frameI - 1] !== 0) {
11924 frameI += 2;
11925 break;
11926 } else if (frameBuffer[frameI - 2] !== 0) {
11927 frameI++;
11928 break;
11929 }
11930
11931 if (frameSyncPoint + 3 !== frameI - 2) {
11932 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11933
11934 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11935 foundKeyFrame = true;
11936 }
11937 } // drop trailing zeroes
11938
11939
11940 do {
11941 frameI++;
11942 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
11943
11944 frameSyncPoint = frameI - 2;
11945 frameI += 3;
11946 break;
11947
11948 case 1:
11949 // skip past non-sync sequences
11950 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
11951 frameI += 3;
11952 break;
11953 }
11954
11955 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11956
11957 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11958 foundKeyFrame = true;
11959 }
11960
11961 frameSyncPoint = frameI - 2;
11962 frameI += 3;
11963 break;
11964
11965 default:
11966 // the current byte isn't a one or zero, so it cannot be part
11967 // of a sync sequence
11968 frameI += 3;
11969 break;
11970 }
11971 }
11972
11973 frameBuffer = frameBuffer.subarray(frameSyncPoint);
11974 frameI -= frameSyncPoint;
11975 frameSyncPoint = 0; // parse the final nal
11976
11977 if (frameBuffer && frameBuffer.byteLength > 3) {
11978 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11979
11980 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11981 foundKeyFrame = true;
11982 }
11983 }
11984
11985 return foundKeyFrame;
11986 };
11987
11988 var probe$1 = {
11989 parseType: parseType,
11990 parsePat: parsePat,
11991 parsePmt: parsePmt,
11992 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
11993 parsePesType: parsePesType,
11994 parsePesTime: parsePesTime,
11995 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
11996 };
11997 var handleRollover = timestampRolloverStream.handleRollover;
11998 var probe = {};
11999 probe.ts = probe$1;
12000 probe.aac = utils;
12001 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
12002 var MP2T_PACKET_LENGTH = 188,
12003 // bytes
12004 SYNC_BYTE = 0x47;
12005 /**
12006 * walks through segment data looking for pat and pmt packets to parse out
12007 * program map table information
12008 */
12009
12010 var parsePsi_ = function parsePsi_(bytes, pmt) {
12011 var startIndex = 0,
12012 endIndex = MP2T_PACKET_LENGTH,
12013 packet,
12014 type;
12015
12016 while (endIndex < bytes.byteLength) {
12017 // Look for a pair of start and end sync bytes in the data..
12018 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12019 // We found a packet
12020 packet = bytes.subarray(startIndex, endIndex);
12021 type = probe.ts.parseType(packet, pmt.pid);
12022
12023 switch (type) {
12024 case 'pat':
12025 pmt.pid = probe.ts.parsePat(packet);
12026 break;
12027
12028 case 'pmt':
12029 var table = probe.ts.parsePmt(packet);
12030 pmt.table = pmt.table || {};
12031 Object.keys(table).forEach(function (key) {
12032 pmt.table[key] = table[key];
12033 });
12034 break;
12035 }
12036
12037 startIndex += MP2T_PACKET_LENGTH;
12038 endIndex += MP2T_PACKET_LENGTH;
12039 continue;
12040 } // If we get here, we have somehow become de-synchronized and we need to step
12041 // forward one byte at a time until we find a pair of sync bytes that denote
12042 // a packet
12043
12044
12045 startIndex++;
12046 endIndex++;
12047 }
12048 };
12049 /**
12050 * walks through the segment data from the start and end to get timing information
12051 * for the first and last audio pes packets
12052 */
12053
12054
12055 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
12056 var startIndex = 0,
12057 endIndex = MP2T_PACKET_LENGTH,
12058 packet,
12059 type,
12060 pesType,
12061 pusi,
12062 parsed;
12063 var endLoop = false; // Start walking from start of segment to get first audio packet
12064
12065 while (endIndex <= bytes.byteLength) {
12066 // Look for a pair of start and end sync bytes in the data..
12067 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12068 // We found a packet
12069 packet = bytes.subarray(startIndex, endIndex);
12070 type = probe.ts.parseType(packet, pmt.pid);
12071
12072 switch (type) {
12073 case 'pes':
12074 pesType = probe.ts.parsePesType(packet, pmt.table);
12075 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12076
12077 if (pesType === 'audio' && pusi) {
12078 parsed = probe.ts.parsePesTime(packet);
12079
12080 if (parsed) {
12081 parsed.type = 'audio';
12082 result.audio.push(parsed);
12083 endLoop = true;
12084 }
12085 }
12086
12087 break;
12088 }
12089
12090 if (endLoop) {
12091 break;
12092 }
12093
12094 startIndex += MP2T_PACKET_LENGTH;
12095 endIndex += MP2T_PACKET_LENGTH;
12096 continue;
12097 } // If we get here, we have somehow become de-synchronized and we need to step
12098 // forward one byte at a time until we find a pair of sync bytes that denote
12099 // a packet
12100
12101
12102 startIndex++;
12103 endIndex++;
12104 } // Start walking from end of segment to get last audio packet
12105
12106
12107 endIndex = bytes.byteLength;
12108 startIndex = endIndex - MP2T_PACKET_LENGTH;
12109 endLoop = false;
12110
12111 while (startIndex >= 0) {
12112 // Look for a pair of start and end sync bytes in the data..
12113 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12114 // We found a packet
12115 packet = bytes.subarray(startIndex, endIndex);
12116 type = probe.ts.parseType(packet, pmt.pid);
12117
12118 switch (type) {
12119 case 'pes':
12120 pesType = probe.ts.parsePesType(packet, pmt.table);
12121 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12122
12123 if (pesType === 'audio' && pusi) {
12124 parsed = probe.ts.parsePesTime(packet);
12125
12126 if (parsed) {
12127 parsed.type = 'audio';
12128 result.audio.push(parsed);
12129 endLoop = true;
12130 }
12131 }
12132
12133 break;
12134 }
12135
12136 if (endLoop) {
12137 break;
12138 }
12139
12140 startIndex -= MP2T_PACKET_LENGTH;
12141 endIndex -= MP2T_PACKET_LENGTH;
12142 continue;
12143 } // If we get here, we have somehow become de-synchronized and we need to step
12144 // forward one byte at a time until we find a pair of sync bytes that denote
12145 // a packet
12146
12147
12148 startIndex--;
12149 endIndex--;
12150 }
12151 };
12152 /**
12153 * walks through the segment data from the start and end to get timing information
12154 * for the first and last video pes packets as well as timing information for the first
12155 * key frame.
12156 */
12157
12158
12159 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
12160 var startIndex = 0,
12161 endIndex = MP2T_PACKET_LENGTH,
12162 packet,
12163 type,
12164 pesType,
12165 pusi,
12166 parsed,
12167 frame,
12168 i,
12169 pes;
12170 var endLoop = false;
12171 var currentFrame = {
12172 data: [],
12173 size: 0
12174 }; // Start walking from start of segment to get first video packet
12175
12176 while (endIndex < bytes.byteLength) {
12177 // Look for a pair of start and end sync bytes in the data..
12178 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12179 // We found a packet
12180 packet = bytes.subarray(startIndex, endIndex);
12181 type = probe.ts.parseType(packet, pmt.pid);
12182
12183 switch (type) {
12184 case 'pes':
12185 pesType = probe.ts.parsePesType(packet, pmt.table);
12186 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12187
12188 if (pesType === 'video') {
12189 if (pusi && !endLoop) {
12190 parsed = probe.ts.parsePesTime(packet);
12191
12192 if (parsed) {
12193 parsed.type = 'video';
12194 result.video.push(parsed);
12195 endLoop = true;
12196 }
12197 }
12198
12199 if (!result.firstKeyFrame) {
12200 if (pusi) {
12201 if (currentFrame.size !== 0) {
12202 frame = new Uint8Array(currentFrame.size);
12203 i = 0;
12204
12205 while (currentFrame.data.length) {
12206 pes = currentFrame.data.shift();
12207 frame.set(pes, i);
12208 i += pes.byteLength;
12209 }
12210
12211 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
12212 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
12213 // the keyframe seems to work fine with HLS playback
12214 // and definitely preferable to a crash with TypeError...
12215
12216 if (firstKeyFrame) {
12217 result.firstKeyFrame = firstKeyFrame;
12218 result.firstKeyFrame.type = 'video';
12219 } else {
12220 // eslint-disable-next-line
12221 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
12222 }
12223 }
12224
12225 currentFrame.size = 0;
12226 }
12227 }
12228
12229 currentFrame.data.push(packet);
12230 currentFrame.size += packet.byteLength;
12231 }
12232 }
12233
12234 break;
12235 }
12236
12237 if (endLoop && result.firstKeyFrame) {
12238 break;
12239 }
12240
12241 startIndex += MP2T_PACKET_LENGTH;
12242 endIndex += MP2T_PACKET_LENGTH;
12243 continue;
12244 } // If we get here, we have somehow become de-synchronized and we need to step
12245 // forward one byte at a time until we find a pair of sync bytes that denote
12246 // a packet
12247
12248
12249 startIndex++;
12250 endIndex++;
12251 } // Start walking from end of segment to get last video packet
12252
12253
12254 endIndex = bytes.byteLength;
12255 startIndex = endIndex - MP2T_PACKET_LENGTH;
12256 endLoop = false;
12257
12258 while (startIndex >= 0) {
12259 // Look for a pair of start and end sync bytes in the data..
12260 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12261 // We found a packet
12262 packet = bytes.subarray(startIndex, endIndex);
12263 type = probe.ts.parseType(packet, pmt.pid);
12264
12265 switch (type) {
12266 case 'pes':
12267 pesType = probe.ts.parsePesType(packet, pmt.table);
12268 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12269
12270 if (pesType === 'video' && pusi) {
12271 parsed = probe.ts.parsePesTime(packet);
12272
12273 if (parsed) {
12274 parsed.type = 'video';
12275 result.video.push(parsed);
12276 endLoop = true;
12277 }
12278 }
12279
12280 break;
12281 }
12282
12283 if (endLoop) {
12284 break;
12285 }
12286
12287 startIndex -= MP2T_PACKET_LENGTH;
12288 endIndex -= MP2T_PACKET_LENGTH;
12289 continue;
12290 } // If we get here, we have somehow become de-synchronized and we need to step
12291 // forward one byte at a time until we find a pair of sync bytes that denote
12292 // a packet
12293
12294
12295 startIndex--;
12296 endIndex--;
12297 }
12298 };
12299 /**
12300 * Adjusts the timestamp information for the segment to account for
12301 * rollover and convert to seconds based on pes packet timescale (90khz clock)
12302 */
12303
12304
12305 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
12306 if (segmentInfo.audio && segmentInfo.audio.length) {
12307 var audioBaseTimestamp = baseTimestamp;
12308
12309 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
12310 audioBaseTimestamp = segmentInfo.audio[0].dts;
12311 }
12312
12313 segmentInfo.audio.forEach(function (info) {
12314 info.dts = handleRollover(info.dts, audioBaseTimestamp);
12315 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
12316
12317 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12318 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12319 });
12320 }
12321
12322 if (segmentInfo.video && segmentInfo.video.length) {
12323 var videoBaseTimestamp = baseTimestamp;
12324
12325 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
12326 videoBaseTimestamp = segmentInfo.video[0].dts;
12327 }
12328
12329 segmentInfo.video.forEach(function (info) {
12330 info.dts = handleRollover(info.dts, videoBaseTimestamp);
12331 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
12332
12333 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12334 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12335 });
12336
12337 if (segmentInfo.firstKeyFrame) {
12338 var frame = segmentInfo.firstKeyFrame;
12339 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
12340 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
12341
12342 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
12343 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
12344 }
12345 }
12346 };
12347 /**
12348 * inspects the aac data stream for start and end time information
12349 */
12350
12351
12352 var inspectAac_ = function inspectAac_(bytes) {
12353 var endLoop = false,
12354 audioCount = 0,
12355 sampleRate = null,
12356 timestamp = null,
12357 frameSize = 0,
12358 byteIndex = 0,
12359 packet;
12360
12361 while (bytes.length - byteIndex >= 3) {
12362 var type = probe.aac.parseType(bytes, byteIndex);
12363
12364 switch (type) {
12365 case 'timed-metadata':
12366 // Exit early because we don't have enough to parse
12367 // the ID3 tag header
12368 if (bytes.length - byteIndex < 10) {
12369 endLoop = true;
12370 break;
12371 }
12372
12373 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12374 // to emit a full packet
12375
12376 if (frameSize > bytes.length) {
12377 endLoop = true;
12378 break;
12379 }
12380
12381 if (timestamp === null) {
12382 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12383 timestamp = probe.aac.parseAacTimestamp(packet);
12384 }
12385
12386 byteIndex += frameSize;
12387 break;
12388
12389 case 'audio':
12390 // Exit early because we don't have enough to parse
12391 // the ADTS frame header
12392 if (bytes.length - byteIndex < 7) {
12393 endLoop = true;
12394 break;
12395 }
12396
12397 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12398 // to emit a full packet
12399
12400 if (frameSize > bytes.length) {
12401 endLoop = true;
12402 break;
12403 }
12404
12405 if (sampleRate === null) {
12406 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12407 sampleRate = probe.aac.parseSampleRate(packet);
12408 }
12409
12410 audioCount++;
12411 byteIndex += frameSize;
12412 break;
12413
12414 default:
12415 byteIndex++;
12416 break;
12417 }
12418
12419 if (endLoop) {
12420 return null;
12421 }
12422 }
12423
12424 if (sampleRate === null || timestamp === null) {
12425 return null;
12426 }
12427
12428 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
12429 var result = {
12430 audio: [{
12431 type: 'audio',
12432 dts: timestamp,
12433 pts: timestamp
12434 }, {
12435 type: 'audio',
12436 dts: timestamp + audioCount * 1024 * audioTimescale,
12437 pts: timestamp + audioCount * 1024 * audioTimescale
12438 }]
12439 };
12440 return result;
12441 };
12442 /**
12443 * inspects the transport stream segment data for start and end time information
12444 * of the audio and video tracks (when present) as well as the first key frame's
12445 * start time.
12446 */
12447
12448
12449 var inspectTs_ = function inspectTs_(bytes) {
12450 var pmt = {
12451 pid: null,
12452 table: null
12453 };
12454 var result = {};
12455 parsePsi_(bytes, pmt);
12456
12457 for (var pid in pmt.table) {
12458 if (pmt.table.hasOwnProperty(pid)) {
12459 var type = pmt.table[pid];
12460
12461 switch (type) {
12462 case streamTypes.H264_STREAM_TYPE:
12463 result.video = [];
12464 parseVideoPes_(bytes, pmt, result);
12465
12466 if (result.video.length === 0) {
12467 delete result.video;
12468 }
12469
12470 break;
12471
12472 case streamTypes.ADTS_STREAM_TYPE:
12473 result.audio = [];
12474 parseAudioPes_(bytes, pmt, result);
12475
12476 if (result.audio.length === 0) {
12477 delete result.audio;
12478 }
12479
12480 break;
12481 }
12482 }
12483 }
12484
12485 return result;
12486 };
12487 /**
12488 * Inspects segment byte data and returns an object with start and end timing information
12489 *
12490 * @param {Uint8Array} bytes The segment byte data
12491 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
12492 * timestamps for rollover. This value must be in 90khz clock.
12493 * @return {Object} Object containing start and end frame timing info of segment.
12494 */
12495
12496
12497 var inspect = function inspect(bytes, baseTimestamp) {
12498 var isAacData = probe.aac.isLikelyAacData(bytes);
12499 var result;
12500
12501 if (isAacData) {
12502 result = inspectAac_(bytes);
12503 } else {
12504 result = inspectTs_(bytes);
12505 }
12506
12507 if (!result || !result.audio && !result.video) {
12508 return null;
12509 }
12510
12511 adjustTimestamp_(result, baseTimestamp);
12512 return result;
12513 };
12514
12515 var tsInspector = {
12516 inspect: inspect,
12517 parseAudioPes_: parseAudioPes_
12518 };
12519 /* global self */
12520
12521 /**
12522 * Re-emits transmuxer events by converting them into messages to the
12523 * world outside the worker.
12524 *
12525 * @param {Object} transmuxer the transmuxer to wire events on
12526 * @private
12527 */
12528
12529 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
12530 transmuxer.on('data', function (segment) {
12531 // transfer ownership of the underlying ArrayBuffer
12532 // instead of doing a copy to save memory
12533 // ArrayBuffers are transferable but generic TypedArrays are not
12534 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
12535 var initArray = segment.initSegment;
12536 segment.initSegment = {
12537 data: initArray.buffer,
12538 byteOffset: initArray.byteOffset,
12539 byteLength: initArray.byteLength
12540 };
12541 var typedArray = segment.data;
12542 segment.data = typedArray.buffer;
12543 self.postMessage({
12544 action: 'data',
12545 segment: segment,
12546 byteOffset: typedArray.byteOffset,
12547 byteLength: typedArray.byteLength
12548 }, [segment.data]);
12549 });
12550 transmuxer.on('done', function (data) {
12551 self.postMessage({
12552 action: 'done'
12553 });
12554 });
12555 transmuxer.on('gopInfo', function (gopInfo) {
12556 self.postMessage({
12557 action: 'gopInfo',
12558 gopInfo: gopInfo
12559 });
12560 });
12561 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
12562 var videoSegmentTimingInfo = {
12563 start: {
12564 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12565 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12566 },
12567 end: {
12568 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12569 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12570 },
12571 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12572 };
12573
12574 if (timingInfo.prependedContentDuration) {
12575 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12576 }
12577
12578 self.postMessage({
12579 action: 'videoSegmentTimingInfo',
12580 videoSegmentTimingInfo: videoSegmentTimingInfo
12581 });
12582 });
12583 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
12584 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
12585 var audioSegmentTimingInfo = {
12586 start: {
12587 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12588 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12589 },
12590 end: {
12591 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12592 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12593 },
12594 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12595 };
12596
12597 if (timingInfo.prependedContentDuration) {
12598 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12599 }
12600
12601 self.postMessage({
12602 action: 'audioSegmentTimingInfo',
12603 audioSegmentTimingInfo: audioSegmentTimingInfo
12604 });
12605 });
12606 transmuxer.on('id3Frame', function (id3Frame) {
12607 self.postMessage({
12608 action: 'id3Frame',
12609 id3Frame: id3Frame
12610 });
12611 });
12612 transmuxer.on('caption', function (caption) {
12613 self.postMessage({
12614 action: 'caption',
12615 caption: caption
12616 });
12617 });
12618 transmuxer.on('trackinfo', function (trackInfo) {
12619 self.postMessage({
12620 action: 'trackinfo',
12621 trackInfo: trackInfo
12622 });
12623 });
12624 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
12625 // convert to video TS since we prioritize video time over audio
12626 self.postMessage({
12627 action: 'audioTimingInfo',
12628 audioTimingInfo: {
12629 start: clock.videoTsToSeconds(audioTimingInfo.start),
12630 end: clock.videoTsToSeconds(audioTimingInfo.end)
12631 }
12632 });
12633 });
12634 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
12635 self.postMessage({
12636 action: 'videoTimingInfo',
12637 videoTimingInfo: {
12638 start: clock.videoTsToSeconds(videoTimingInfo.start),
12639 end: clock.videoTsToSeconds(videoTimingInfo.end)
12640 }
12641 });
12642 });
12643 transmuxer.on('log', function (log) {
12644 self.postMessage({
12645 action: 'log',
12646 log: log
12647 });
12648 });
12649 };
12650 /**
12651 * All incoming messages route through this hash. If no function exists
12652 * to handle an incoming message, then we ignore the message.
12653 *
12654 * @class MessageHandlers
12655 * @param {Object} options the options to initialize with
12656 */
12657
12658
12659 var MessageHandlers = /*#__PURE__*/function () {
12660 function MessageHandlers(self, options) {
12661 this.options = options || {};
12662 this.self = self;
12663 this.init();
12664 }
12665 /**
12666 * initialize our web worker and wire all the events.
12667 */
12668
12669
12670 var _proto = MessageHandlers.prototype;
12671
12672 _proto.init = function init() {
12673 if (this.transmuxer) {
12674 this.transmuxer.dispose();
12675 }
12676
12677 this.transmuxer = new transmuxer.Transmuxer(this.options);
12678 wireTransmuxerEvents(this.self, this.transmuxer);
12679 };
12680
12681 _proto.pushMp4Captions = function pushMp4Captions(data) {
12682 if (!this.captionParser) {
12683 this.captionParser = new captionParser();
12684 this.captionParser.init();
12685 }
12686
12687 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12688 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
12689 this.self.postMessage({
12690 action: 'mp4Captions',
12691 captions: parsed && parsed.captions || [],
12692 logs: parsed && parsed.logs || [],
12693 data: segment.buffer
12694 }, [segment.buffer]);
12695 };
12696
12697 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
12698 var timescales = _ref.timescales,
12699 data = _ref.data;
12700 var startTime = probe$2.startTime(timescales, data);
12701 this.self.postMessage({
12702 action: 'probeMp4StartTime',
12703 startTime: startTime,
12704 data: data
12705 }, [data.buffer]);
12706 };
12707
12708 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
12709 var data = _ref2.data;
12710 var tracks = probe$2.tracks(data);
12711 this.self.postMessage({
12712 action: 'probeMp4Tracks',
12713 tracks: tracks,
12714 data: data
12715 }, [data.buffer]);
12716 }
12717 /**
12718 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
12719 * internal "media time," as well as whether it contains video and/or audio.
12720 *
12721 * @private
12722 * @param {Uint8Array} bytes - segment bytes
12723 * @param {number} baseStartTime
12724 * Relative reference timestamp used when adjusting frame timestamps for rollover.
12725 * This value should be in seconds, as it's converted to a 90khz clock within the
12726 * function body.
12727 * @return {Object} The start time of the current segment in "media time" as well as
12728 * whether it contains video and/or audio
12729 */
12730 ;
12731
12732 _proto.probeTs = function probeTs(_ref3) {
12733 var data = _ref3.data,
12734 baseStartTime = _ref3.baseStartTime;
12735 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
12736 var timeInfo = tsInspector.inspect(data, tsStartTime);
12737 var result = null;
12738
12739 if (timeInfo) {
12740 result = {
12741 // each type's time info comes back as an array of 2 times, start and end
12742 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
12743 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
12744 };
12745
12746 if (result.hasVideo) {
12747 result.videoStart = timeInfo.video[0].ptsTime;
12748 }
12749
12750 if (result.hasAudio) {
12751 result.audioStart = timeInfo.audio[0].ptsTime;
12752 }
12753 }
12754
12755 this.self.postMessage({
12756 action: 'probeTs',
12757 result: result,
12758 data: data
12759 }, [data.buffer]);
12760 };
12761
12762 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
12763 if (this.captionParser) {
12764 this.captionParser.clearAllCaptions();
12765 }
12766 };
12767
12768 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
12769 if (this.captionParser) {
12770 this.captionParser.clearParsedCaptions();
12771 }
12772 }
12773 /**
12774 * Adds data (a ts segment) to the start of the transmuxer pipeline for
12775 * processing.
12776 *
12777 * @param {ArrayBuffer} data data to push into the muxer
12778 */
12779 ;
12780
12781 _proto.push = function push(data) {
12782 // Cast array buffer to correct type for transmuxer
12783 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12784 this.transmuxer.push(segment);
12785 }
12786 /**
12787 * Recreate the transmuxer so that the next segment added via `push`
12788 * start with a fresh transmuxer.
12789 */
12790 ;
12791
12792 _proto.reset = function reset() {
12793 this.transmuxer.reset();
12794 }
12795 /**
12796 * Set the value that will be used as the `baseMediaDecodeTime` time for the
12797 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
12798 * set relative to the first based on the PTS values.
12799 *
12800 * @param {Object} data used to set the timestamp offset in the muxer
12801 */
12802 ;
12803
12804 _proto.setTimestampOffset = function setTimestampOffset(data) {
12805 var timestampOffset = data.timestampOffset || 0;
12806 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
12807 };
12808
12809 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
12810 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
12811 };
12812
12813 _proto.setRemux = function setRemux(data) {
12814 this.transmuxer.setRemux(data.remux);
12815 }
12816 /**
12817 * Forces the pipeline to finish processing the last segment and emit it's
12818 * results.
12819 *
12820 * @param {Object} data event data, not really used
12821 */
12822 ;
12823
12824 _proto.flush = function flush(data) {
12825 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
12826
12827 self.postMessage({
12828 action: 'done',
12829 type: 'transmuxed'
12830 });
12831 };
12832
12833 _proto.endTimeline = function endTimeline() {
12834 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
12835 // timelines
12836
12837 self.postMessage({
12838 action: 'endedtimeline',
12839 type: 'transmuxed'
12840 });
12841 };
12842
12843 _proto.alignGopsWith = function alignGopsWith(data) {
12844 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
12845 };
12846
12847 return MessageHandlers;
12848 }();
12849 /**
12850 * Our web worker interface so that things can talk to mux.js
12851 * that will be running in a web worker. the scope is passed to this by
12852 * webworkify.
12853 *
12854 * @param {Object} self the scope for the web worker
12855 */
12856
12857
12858 self.onmessage = function (event) {
12859 if (event.data.action === 'init' && event.data.options) {
12860 this.messageHandlers = new MessageHandlers(self, event.data.options);
12861 return;
12862 }
12863
12864 if (!this.messageHandlers) {
12865 this.messageHandlers = new MessageHandlers(self);
12866 }
12867
12868 if (event.data && event.data.action && event.data.action !== 'init') {
12869 if (this.messageHandlers[event.data.action]) {
12870 this.messageHandlers[event.data.action](event.data);
12871 }
12872 }
12873 };
12874}));
12875var TransmuxWorker = factory(workerCode$1);
12876/* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/transmuxer-worker.js */
12877
12878var handleData_ = function handleData_(event, transmuxedData, callback) {
12879 var _event$data$segment = event.data.segment,
12880 type = _event$data$segment.type,
12881 initSegment = _event$data$segment.initSegment,
12882 captions = _event$data$segment.captions,
12883 captionStreams = _event$data$segment.captionStreams,
12884 metadata = _event$data$segment.metadata,
12885 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
12886 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
12887 transmuxedData.buffer.push({
12888 captions: captions,
12889 captionStreams: captionStreams,
12890 metadata: metadata
12891 });
12892 var boxes = event.data.segment.boxes || {
12893 data: event.data.segment.data
12894 };
12895 var result = {
12896 type: type,
12897 // cast ArrayBuffer to TypedArray
12898 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
12899 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
12900 };
12901
12902 if (typeof videoFrameDtsTime !== 'undefined') {
12903 result.videoFrameDtsTime = videoFrameDtsTime;
12904 }
12905
12906 if (typeof videoFramePtsTime !== 'undefined') {
12907 result.videoFramePtsTime = videoFramePtsTime;
12908 }
12909
12910 callback(result);
12911};
12912var handleDone_ = function handleDone_(_ref) {
12913 var transmuxedData = _ref.transmuxedData,
12914 callback = _ref.callback;
12915 // Previously we only returned data on data events,
12916 // not on done events. Clear out the buffer to keep that consistent.
12917 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
12918 // have received
12919
12920 callback(transmuxedData);
12921};
12922var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
12923 transmuxedData.gopInfo = event.data.gopInfo;
12924};
12925var processTransmux = function processTransmux(options) {
12926 var transmuxer = options.transmuxer,
12927 bytes = options.bytes,
12928 audioAppendStart = options.audioAppendStart,
12929 gopsToAlignWith = options.gopsToAlignWith,
12930 remux = options.remux,
12931 onData = options.onData,
12932 onTrackInfo = options.onTrackInfo,
12933 onAudioTimingInfo = options.onAudioTimingInfo,
12934 onVideoTimingInfo = options.onVideoTimingInfo,
12935 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
12936 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
12937 onId3 = options.onId3,
12938 onCaptions = options.onCaptions,
12939 onDone = options.onDone,
12940 onEndedTimeline = options.onEndedTimeline,
12941 onTransmuxerLog = options.onTransmuxerLog,
12942 isEndOfTimeline = options.isEndOfTimeline;
12943 var transmuxedData = {
12944 buffer: []
12945 };
12946 var waitForEndedTimelineEvent = isEndOfTimeline;
12947
12948 var handleMessage = function handleMessage(event) {
12949 if (transmuxer.currentTransmux !== options) {
12950 // disposed
12951 return;
12952 }
12953
12954 if (event.data.action === 'data') {
12955 handleData_(event, transmuxedData, onData);
12956 }
12957
12958 if (event.data.action === 'trackinfo') {
12959 onTrackInfo(event.data.trackInfo);
12960 }
12961
12962 if (event.data.action === 'gopInfo') {
12963 handleGopInfo_(event, transmuxedData);
12964 }
12965
12966 if (event.data.action === 'audioTimingInfo') {
12967 onAudioTimingInfo(event.data.audioTimingInfo);
12968 }
12969
12970 if (event.data.action === 'videoTimingInfo') {
12971 onVideoTimingInfo(event.data.videoTimingInfo);
12972 }
12973
12974 if (event.data.action === 'videoSegmentTimingInfo') {
12975 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
12976 }
12977
12978 if (event.data.action === 'audioSegmentTimingInfo') {
12979 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
12980 }
12981
12982 if (event.data.action === 'id3Frame') {
12983 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
12984 }
12985
12986 if (event.data.action === 'caption') {
12987 onCaptions(event.data.caption);
12988 }
12989
12990 if (event.data.action === 'endedtimeline') {
12991 waitForEndedTimelineEvent = false;
12992 onEndedTimeline();
12993 }
12994
12995 if (event.data.action === 'log') {
12996 onTransmuxerLog(event.data.log);
12997 } // wait for the transmuxed event since we may have audio and video
12998
12999
13000 if (event.data.type !== 'transmuxed') {
13001 return;
13002 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
13003 // of a timeline, that means there may still be data events before the segment
13004 // processing can be considerred complete. In that case, the final event should be
13005 // an "endedtimeline" event with the type "transmuxed."
13006
13007
13008 if (waitForEndedTimelineEvent) {
13009 return;
13010 }
13011
13012 transmuxer.onmessage = null;
13013 handleDone_({
13014 transmuxedData: transmuxedData,
13015 callback: onDone
13016 });
13017 /* eslint-disable no-use-before-define */
13018
13019 dequeue(transmuxer);
13020 /* eslint-enable */
13021 };
13022
13023 transmuxer.onmessage = handleMessage;
13024
13025 if (audioAppendStart) {
13026 transmuxer.postMessage({
13027 action: 'setAudioAppendStart',
13028 appendStart: audioAppendStart
13029 });
13030 } // allow empty arrays to be passed to clear out GOPs
13031
13032
13033 if (Array.isArray(gopsToAlignWith)) {
13034 transmuxer.postMessage({
13035 action: 'alignGopsWith',
13036 gopsToAlignWith: gopsToAlignWith
13037 });
13038 }
13039
13040 if (typeof remux !== 'undefined') {
13041 transmuxer.postMessage({
13042 action: 'setRemux',
13043 remux: remux
13044 });
13045 }
13046
13047 if (bytes.byteLength) {
13048 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
13049 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
13050 transmuxer.postMessage({
13051 action: 'push',
13052 // Send the typed-array of data as an ArrayBuffer so that
13053 // it can be sent as a "Transferable" and avoid the costly
13054 // memory copy
13055 data: buffer,
13056 // To recreate the original typed-array, we need information
13057 // about what portion of the ArrayBuffer it was a view into
13058 byteOffset: byteOffset,
13059 byteLength: bytes.byteLength
13060 }, [buffer]);
13061 }
13062
13063 if (isEndOfTimeline) {
13064 transmuxer.postMessage({
13065 action: 'endTimeline'
13066 });
13067 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
13068 // the end of the segment
13069
13070
13071 transmuxer.postMessage({
13072 action: 'flush'
13073 });
13074};
13075var dequeue = function dequeue(transmuxer) {
13076 transmuxer.currentTransmux = null;
13077
13078 if (transmuxer.transmuxQueue.length) {
13079 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
13080
13081 if (typeof transmuxer.currentTransmux === 'function') {
13082 transmuxer.currentTransmux();
13083 } else {
13084 processTransmux(transmuxer.currentTransmux);
13085 }
13086 }
13087};
13088var processAction = function processAction(transmuxer, action) {
13089 transmuxer.postMessage({
13090 action: action
13091 });
13092 dequeue(transmuxer);
13093};
13094var enqueueAction = function enqueueAction(action, transmuxer) {
13095 if (!transmuxer.currentTransmux) {
13096 transmuxer.currentTransmux = action;
13097 processAction(transmuxer, action);
13098 return;
13099 }
13100
13101 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
13102};
13103var reset = function reset(transmuxer) {
13104 enqueueAction('reset', transmuxer);
13105};
13106var endTimeline = function endTimeline(transmuxer) {
13107 enqueueAction('endTimeline', transmuxer);
13108};
13109var transmux = function transmux(options) {
13110 if (!options.transmuxer.currentTransmux) {
13111 options.transmuxer.currentTransmux = options;
13112 processTransmux(options);
13113 return;
13114 }
13115
13116 options.transmuxer.transmuxQueue.push(options);
13117};
13118var createTransmuxer = function createTransmuxer(options) {
13119 var transmuxer = new TransmuxWorker();
13120 transmuxer.currentTransmux = null;
13121 transmuxer.transmuxQueue = [];
13122 var term = transmuxer.terminate;
13123
13124 transmuxer.terminate = function () {
13125 transmuxer.currentTransmux = null;
13126 transmuxer.transmuxQueue.length = 0;
13127 return term.call(transmuxer);
13128 };
13129
13130 transmuxer.postMessage({
13131 action: 'init',
13132 options: options
13133 });
13134 return transmuxer;
13135};
13136var segmentTransmuxer = {
13137 reset: reset,
13138 endTimeline: endTimeline,
13139 transmux: transmux,
13140 createTransmuxer: createTransmuxer
13141};
13142
13143var workerCallback = function workerCallback(options) {
13144 var transmuxer = options.transmuxer;
13145 var endAction = options.endAction || options.action;
13146 var callback = options.callback;
13147
13148 var message = _extends({}, options, {
13149 endAction: null,
13150 transmuxer: null,
13151 callback: null
13152 });
13153
13154 var listenForEndEvent = function listenForEndEvent(event) {
13155 if (event.data.action !== endAction) {
13156 return;
13157 }
13158
13159 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
13160
13161 if (event.data.data) {
13162 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
13163
13164 if (options.data) {
13165 options.data = event.data.data;
13166 }
13167 }
13168
13169 callback(event.data);
13170 };
13171
13172 transmuxer.addEventListener('message', listenForEndEvent);
13173
13174 if (options.data) {
13175 var isArrayBuffer = options.data instanceof ArrayBuffer;
13176 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
13177 message.byteLength = options.data.byteLength;
13178 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
13179 transmuxer.postMessage(message, transfers);
13180 } else {
13181 transmuxer.postMessage(message);
13182 }
13183};
13184
13185var REQUEST_ERRORS = {
13186 FAILURE: 2,
13187 TIMEOUT: -101,
13188 ABORTED: -102
13189};
13190/**
13191 * Abort all requests
13192 *
13193 * @param {Object} activeXhrs - an object that tracks all XHR requests
13194 */
13195
13196var abortAll = function abortAll(activeXhrs) {
13197 activeXhrs.forEach(function (xhr) {
13198 xhr.abort();
13199 });
13200};
13201/**
13202 * Gather important bandwidth stats once a request has completed
13203 *
13204 * @param {Object} request - the XHR request from which to gather stats
13205 */
13206
13207
13208var getRequestStats = function getRequestStats(request) {
13209 return {
13210 bandwidth: request.bandwidth,
13211 bytesReceived: request.bytesReceived || 0,
13212 roundTripTime: request.roundTripTime || 0
13213 };
13214};
13215/**
13216 * If possible gather bandwidth stats as a request is in
13217 * progress
13218 *
13219 * @param {Event} progressEvent - an event object from an XHR's progress event
13220 */
13221
13222
13223var getProgressStats = function getProgressStats(progressEvent) {
13224 var request = progressEvent.target;
13225 var roundTripTime = Date.now() - request.requestTime;
13226 var stats = {
13227 bandwidth: Infinity,
13228 bytesReceived: 0,
13229 roundTripTime: roundTripTime || 0
13230 };
13231 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
13232 // because we should only use bandwidth stats on progress to determine when
13233 // abort a request early due to insufficient bandwidth
13234
13235 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
13236 return stats;
13237};
13238/**
13239 * Handle all error conditions in one place and return an object
13240 * with all the information
13241 *
13242 * @param {Error|null} error - if non-null signals an error occured with the XHR
13243 * @param {Object} request - the XHR request that possibly generated the error
13244 */
13245
13246
13247var handleErrors = function handleErrors(error, request) {
13248 if (request.timedout) {
13249 return {
13250 status: request.status,
13251 message: 'HLS request timed-out at URL: ' + request.uri,
13252 code: REQUEST_ERRORS.TIMEOUT,
13253 xhr: request
13254 };
13255 }
13256
13257 if (request.aborted) {
13258 return {
13259 status: request.status,
13260 message: 'HLS request aborted at URL: ' + request.uri,
13261 code: REQUEST_ERRORS.ABORTED,
13262 xhr: request
13263 };
13264 }
13265
13266 if (error) {
13267 return {
13268 status: request.status,
13269 message: 'HLS request errored at URL: ' + request.uri,
13270 code: REQUEST_ERRORS.FAILURE,
13271 xhr: request
13272 };
13273 }
13274
13275 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
13276 return {
13277 status: request.status,
13278 message: 'Empty HLS response at URL: ' + request.uri,
13279 code: REQUEST_ERRORS.FAILURE,
13280 xhr: request
13281 };
13282 }
13283
13284 return null;
13285};
13286/**
13287 * Handle responses for key data and convert the key data to the correct format
13288 * for the decryption step later
13289 *
13290 * @param {Object} segment - a simplified copy of the segmentInfo object
13291 * from SegmentLoader
13292 * @param {Array} objects - objects to add the key bytes to.
13293 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13294 * this request
13295 */
13296
13297
13298var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
13299 return function (error, request) {
13300 var response = request.response;
13301 var errorObj = handleErrors(error, request);
13302
13303 if (errorObj) {
13304 return finishProcessingFn(errorObj, segment);
13305 }
13306
13307 if (response.byteLength !== 16) {
13308 return finishProcessingFn({
13309 status: request.status,
13310 message: 'Invalid HLS key at URL: ' + request.uri,
13311 code: REQUEST_ERRORS.FAILURE,
13312 xhr: request
13313 }, segment);
13314 }
13315
13316 var view = new DataView(response);
13317 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
13318
13319 for (var i = 0; i < objects.length; i++) {
13320 objects[i].bytes = bytes;
13321 }
13322
13323 return finishProcessingFn(null, segment);
13324 };
13325};
13326
13327var parseInitSegment = function parseInitSegment(segment, _callback) {
13328 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
13329 // only know how to parse mp4 init segments at the moment
13330
13331 if (type !== 'mp4') {
13332 var uri = segment.map.resolvedUri || segment.map.uri;
13333 return _callback({
13334 internal: true,
13335 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
13336 code: REQUEST_ERRORS.FAILURE
13337 });
13338 }
13339
13340 workerCallback({
13341 action: 'probeMp4Tracks',
13342 data: segment.map.bytes,
13343 transmuxer: segment.transmuxer,
13344 callback: function callback(_ref) {
13345 var tracks = _ref.tracks,
13346 data = _ref.data;
13347 // transfer bytes back to us
13348 segment.map.bytes = data;
13349 tracks.forEach(function (track) {
13350 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
13351
13352 if (segment.map.tracks[track.type]) {
13353 return;
13354 }
13355
13356 segment.map.tracks[track.type] = track;
13357
13358 if (typeof track.id === 'number' && track.timescale) {
13359 segment.map.timescales = segment.map.timescales || {};
13360 segment.map.timescales[track.id] = track.timescale;
13361 }
13362 });
13363 return _callback(null);
13364 }
13365 });
13366};
13367/**
13368 * Handle init-segment responses
13369 *
13370 * @param {Object} segment - a simplified copy of the segmentInfo object
13371 * from SegmentLoader
13372 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13373 * this request
13374 */
13375
13376
13377var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
13378 var segment = _ref2.segment,
13379 finishProcessingFn = _ref2.finishProcessingFn;
13380 return function (error, request) {
13381 var errorObj = handleErrors(error, request);
13382
13383 if (errorObj) {
13384 return finishProcessingFn(errorObj, segment);
13385 }
13386
13387 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
13388 // until the key request is done to decrypt.
13389
13390 if (segment.map.key) {
13391 segment.map.encryptedBytes = bytes;
13392 return finishProcessingFn(null, segment);
13393 }
13394
13395 segment.map.bytes = bytes;
13396 parseInitSegment(segment, function (parseError) {
13397 if (parseError) {
13398 parseError.xhr = request;
13399 parseError.status = request.status;
13400 return finishProcessingFn(parseError, segment);
13401 }
13402
13403 finishProcessingFn(null, segment);
13404 });
13405 };
13406};
13407/**
13408 * Response handler for segment-requests being sure to set the correct
13409 * property depending on whether the segment is encryped or not
13410 * Also records and keeps track of stats that are used for ABR purposes
13411 *
13412 * @param {Object} segment - a simplified copy of the segmentInfo object
13413 * from SegmentLoader
13414 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13415 * this request
13416 */
13417
13418
13419var handleSegmentResponse = function handleSegmentResponse(_ref3) {
13420 var segment = _ref3.segment,
13421 finishProcessingFn = _ref3.finishProcessingFn,
13422 responseType = _ref3.responseType;
13423 return function (error, request) {
13424 var errorObj = handleErrors(error, request);
13425
13426 if (errorObj) {
13427 return finishProcessingFn(errorObj, segment);
13428 }
13429
13430 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
13431 // thrown for two primary cases:
13432 // 1. the mime type override stops working, or is not implemented for a specific
13433 // browser
13434 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
13435 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
13436 segment.stats = getRequestStats(request);
13437
13438 if (segment.key) {
13439 segment.encryptedBytes = new Uint8Array(newBytes);
13440 } else {
13441 segment.bytes = new Uint8Array(newBytes);
13442 }
13443
13444 return finishProcessingFn(null, segment);
13445 };
13446};
13447
13448var transmuxAndNotify = function transmuxAndNotify(_ref4) {
13449 var segment = _ref4.segment,
13450 bytes = _ref4.bytes,
13451 trackInfoFn = _ref4.trackInfoFn,
13452 timingInfoFn = _ref4.timingInfoFn,
13453 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
13454 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
13455 id3Fn = _ref4.id3Fn,
13456 captionsFn = _ref4.captionsFn,
13457 isEndOfTimeline = _ref4.isEndOfTimeline,
13458 endedTimelineFn = _ref4.endedTimelineFn,
13459 dataFn = _ref4.dataFn,
13460 doneFn = _ref4.doneFn,
13461 onTransmuxerLog = _ref4.onTransmuxerLog;
13462 var fmp4Tracks = segment.map && segment.map.tracks || {};
13463 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
13464 // One reason for this is that in the case of full segments, we want to trust start
13465 // times from the probe, rather than the transmuxer.
13466
13467 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
13468 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
13469 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
13470 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
13471
13472 var finish = function finish() {
13473 return transmux({
13474 bytes: bytes,
13475 transmuxer: segment.transmuxer,
13476 audioAppendStart: segment.audioAppendStart,
13477 gopsToAlignWith: segment.gopsToAlignWith,
13478 remux: isMuxed,
13479 onData: function onData(result) {
13480 result.type = result.type === 'combined' ? 'video' : result.type;
13481 dataFn(segment, result);
13482 },
13483 onTrackInfo: function onTrackInfo(trackInfo) {
13484 if (trackInfoFn) {
13485 if (isMuxed) {
13486 trackInfo.isMuxed = true;
13487 }
13488
13489 trackInfoFn(segment, trackInfo);
13490 }
13491 },
13492 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
13493 // we only want the first start value we encounter
13494 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
13495 audioStartFn(audioTimingInfo.start);
13496 audioStartFn = null;
13497 } // we want to continually update the end time
13498
13499
13500 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
13501 audioEndFn(audioTimingInfo.end);
13502 }
13503 },
13504 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
13505 // we only want the first start value we encounter
13506 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
13507 videoStartFn(videoTimingInfo.start);
13508 videoStartFn = null;
13509 } // we want to continually update the end time
13510
13511
13512 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
13513 videoEndFn(videoTimingInfo.end);
13514 }
13515 },
13516 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
13517 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
13518 },
13519 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
13520 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
13521 },
13522 onId3: function onId3(id3Frames, dispatchType) {
13523 id3Fn(segment, id3Frames, dispatchType);
13524 },
13525 onCaptions: function onCaptions(captions) {
13526 captionsFn(segment, [captions]);
13527 },
13528 isEndOfTimeline: isEndOfTimeline,
13529 onEndedTimeline: function onEndedTimeline() {
13530 endedTimelineFn();
13531 },
13532 onTransmuxerLog: onTransmuxerLog,
13533 onDone: function onDone(result) {
13534 if (!doneFn) {
13535 return;
13536 }
13537
13538 result.type = result.type === 'combined' ? 'video' : result.type;
13539 doneFn(null, segment, result);
13540 }
13541 });
13542 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
13543 // Meaning cached frame data may corrupt our notion of where this segment
13544 // really starts. To get around this, probe for the info needed.
13545
13546
13547 workerCallback({
13548 action: 'probeTs',
13549 transmuxer: segment.transmuxer,
13550 data: bytes,
13551 baseStartTime: segment.baseStartTime,
13552 callback: function callback(data) {
13553 segment.bytes = bytes = data.data;
13554 var probeResult = data.result;
13555
13556 if (probeResult) {
13557 trackInfoFn(segment, {
13558 hasAudio: probeResult.hasAudio,
13559 hasVideo: probeResult.hasVideo,
13560 isMuxed: isMuxed
13561 });
13562 trackInfoFn = null;
13563
13564 if (probeResult.hasAudio && !isMuxed) {
13565 audioStartFn(probeResult.audioStart);
13566 }
13567
13568 if (probeResult.hasVideo) {
13569 videoStartFn(probeResult.videoStart);
13570 }
13571
13572 audioStartFn = null;
13573 videoStartFn = null;
13574 }
13575
13576 finish();
13577 }
13578 });
13579};
13580
13581var handleSegmentBytes = function handleSegmentBytes(_ref5) {
13582 var segment = _ref5.segment,
13583 bytes = _ref5.bytes,
13584 trackInfoFn = _ref5.trackInfoFn,
13585 timingInfoFn = _ref5.timingInfoFn,
13586 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
13587 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
13588 id3Fn = _ref5.id3Fn,
13589 captionsFn = _ref5.captionsFn,
13590 isEndOfTimeline = _ref5.isEndOfTimeline,
13591 endedTimelineFn = _ref5.endedTimelineFn,
13592 dataFn = _ref5.dataFn,
13593 doneFn = _ref5.doneFn,
13594 onTransmuxerLog = _ref5.onTransmuxerLog;
13595 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
13596 // We should have a handler that fetches the number of bytes required
13597 // to check if something is fmp4. This will allow us to save bandwidth
13598 // because we can only blacklist a playlist and abort requests
13599 // by codec after trackinfo triggers.
13600
13601 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
13602 segment.isFmp4 = true;
13603 var tracks = segment.map.tracks;
13604 var trackInfo = {
13605 isFmp4: true,
13606 hasVideo: !!tracks.video,
13607 hasAudio: !!tracks.audio
13608 }; // if we have a audio track, with a codec that is not set to
13609 // encrypted audio
13610
13611 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
13612 trackInfo.audioCodec = tracks.audio.codec;
13613 } // if we have a video track, with a codec that is not set to
13614 // encrypted video
13615
13616
13617 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
13618 trackInfo.videoCodec = tracks.video.codec;
13619 }
13620
13621 if (tracks.video && tracks.audio) {
13622 trackInfo.isMuxed = true;
13623 } // since we don't support appending fmp4 data on progress, we know we have the full
13624 // segment here
13625
13626
13627 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
13628 // time. The end time can be roughly calculated by the receiver using the duration.
13629 //
13630 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
13631 // that is the true start of the segment (where the playback engine should begin
13632 // decoding).
13633
13634 var finishLoading = function finishLoading(captions) {
13635 // if the track still has audio at this point it is only possible
13636 // for it to be audio only. See `tracks.video && tracks.audio` if statement
13637 // above.
13638 // we make sure to use segment.bytes here as that
13639 dataFn(segment, {
13640 data: bytesAsUint8Array,
13641 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
13642 });
13643
13644 if (captions && captions.length) {
13645 captionsFn(segment, captions);
13646 }
13647
13648 doneFn(null, segment, {});
13649 };
13650
13651 workerCallback({
13652 action: 'probeMp4StartTime',
13653 timescales: segment.map.timescales,
13654 data: bytesAsUint8Array,
13655 transmuxer: segment.transmuxer,
13656 callback: function callback(_ref6) {
13657 var data = _ref6.data,
13658 startTime = _ref6.startTime;
13659 // transfer bytes back to us
13660 bytes = data.buffer;
13661 segment.bytes = bytesAsUint8Array = data;
13662
13663 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
13664 timingInfoFn(segment, 'audio', 'start', startTime);
13665 }
13666
13667 if (trackInfo.hasVideo) {
13668 timingInfoFn(segment, 'video', 'start', startTime);
13669 } // Run through the CaptionParser in case there are captions.
13670 // Initialize CaptionParser if it hasn't been yet
13671
13672
13673 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
13674 finishLoading();
13675 return;
13676 }
13677
13678 workerCallback({
13679 action: 'pushMp4Captions',
13680 endAction: 'mp4Captions',
13681 transmuxer: segment.transmuxer,
13682 data: bytesAsUint8Array,
13683 timescales: segment.map.timescales,
13684 trackIds: [tracks.video.id],
13685 callback: function callback(message) {
13686 // transfer bytes back to us
13687 bytes = message.data.buffer;
13688 segment.bytes = bytesAsUint8Array = message.data;
13689 message.logs.forEach(function (log) {
13690 onTransmuxerLog(videojs.mergeOptions(log, {
13691 stream: 'mp4CaptionParser'
13692 }));
13693 });
13694 finishLoading(message.captions);
13695 }
13696 });
13697 }
13698 });
13699 return;
13700 } // VTT or other segments that don't need processing
13701
13702
13703 if (!segment.transmuxer) {
13704 doneFn(null, segment, {});
13705 return;
13706 }
13707
13708 if (typeof segment.container === 'undefined') {
13709 segment.container = detectContainerForBytes(bytesAsUint8Array);
13710 }
13711
13712 if (segment.container !== 'ts' && segment.container !== 'aac') {
13713 trackInfoFn(segment, {
13714 hasAudio: false,
13715 hasVideo: false
13716 });
13717 doneFn(null, segment, {});
13718 return;
13719 } // ts or aac
13720
13721
13722 transmuxAndNotify({
13723 segment: segment,
13724 bytes: bytes,
13725 trackInfoFn: trackInfoFn,
13726 timingInfoFn: timingInfoFn,
13727 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13728 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13729 id3Fn: id3Fn,
13730 captionsFn: captionsFn,
13731 isEndOfTimeline: isEndOfTimeline,
13732 endedTimelineFn: endedTimelineFn,
13733 dataFn: dataFn,
13734 doneFn: doneFn,
13735 onTransmuxerLog: onTransmuxerLog
13736 });
13737};
13738
13739var decrypt = function decrypt(_ref7, callback) {
13740 var id = _ref7.id,
13741 key = _ref7.key,
13742 encryptedBytes = _ref7.encryptedBytes,
13743 decryptionWorker = _ref7.decryptionWorker;
13744
13745 var decryptionHandler = function decryptionHandler(event) {
13746 if (event.data.source === id) {
13747 decryptionWorker.removeEventListener('message', decryptionHandler);
13748 var decrypted = event.data.decrypted;
13749 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
13750 }
13751 };
13752
13753 decryptionWorker.addEventListener('message', decryptionHandler);
13754 var keyBytes;
13755
13756 if (key.bytes.slice) {
13757 keyBytes = key.bytes.slice();
13758 } else {
13759 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
13760 } // incrementally decrypt the bytes
13761
13762
13763 decryptionWorker.postMessage(createTransferableMessage({
13764 source: id,
13765 encrypted: encryptedBytes,
13766 key: keyBytes,
13767 iv: key.iv
13768 }), [encryptedBytes.buffer, keyBytes.buffer]);
13769};
13770/**
13771 * Decrypt the segment via the decryption web worker
13772 *
13773 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13774 * routines
13775 * @param {Object} segment - a simplified copy of the segmentInfo object
13776 * from SegmentLoader
13777 * @param {Function} trackInfoFn - a callback that receives track info
13778 * @param {Function} timingInfoFn - a callback that receives timing info
13779 * @param {Function} videoSegmentTimingInfoFn
13780 * a callback that receives video timing info based on media times and
13781 * any adjustments made by the transmuxer
13782 * @param {Function} audioSegmentTimingInfoFn
13783 * a callback that receives audio timing info based on media times and
13784 * any adjustments made by the transmuxer
13785 * @param {boolean} isEndOfTimeline
13786 * true if this segment represents the last segment in a timeline
13787 * @param {Function} endedTimelineFn
13788 * a callback made when a timeline is ended, will only be called if
13789 * isEndOfTimeline is true
13790 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13791 * and ready to use
13792 * @param {Function} doneFn - a callback that is executed after decryption has completed
13793 */
13794
13795
13796var decryptSegment = function decryptSegment(_ref8) {
13797 var decryptionWorker = _ref8.decryptionWorker,
13798 segment = _ref8.segment,
13799 trackInfoFn = _ref8.trackInfoFn,
13800 timingInfoFn = _ref8.timingInfoFn,
13801 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
13802 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
13803 id3Fn = _ref8.id3Fn,
13804 captionsFn = _ref8.captionsFn,
13805 isEndOfTimeline = _ref8.isEndOfTimeline,
13806 endedTimelineFn = _ref8.endedTimelineFn,
13807 dataFn = _ref8.dataFn,
13808 doneFn = _ref8.doneFn,
13809 onTransmuxerLog = _ref8.onTransmuxerLog;
13810 decrypt({
13811 id: segment.requestId,
13812 key: segment.key,
13813 encryptedBytes: segment.encryptedBytes,
13814 decryptionWorker: decryptionWorker
13815 }, function (decryptedBytes) {
13816 segment.bytes = decryptedBytes;
13817 handleSegmentBytes({
13818 segment: segment,
13819 bytes: segment.bytes,
13820 trackInfoFn: trackInfoFn,
13821 timingInfoFn: timingInfoFn,
13822 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13823 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13824 id3Fn: id3Fn,
13825 captionsFn: captionsFn,
13826 isEndOfTimeline: isEndOfTimeline,
13827 endedTimelineFn: endedTimelineFn,
13828 dataFn: dataFn,
13829 doneFn: doneFn,
13830 onTransmuxerLog: onTransmuxerLog
13831 });
13832 });
13833};
13834/**
13835 * This function waits for all XHRs to finish (with either success or failure)
13836 * before continueing processing via it's callback. The function gathers errors
13837 * from each request into a single errors array so that the error status for
13838 * each request can be examined later.
13839 *
13840 * @param {Object} activeXhrs - an object that tracks all XHR requests
13841 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13842 * routines
13843 * @param {Function} trackInfoFn - a callback that receives track info
13844 * @param {Function} timingInfoFn - a callback that receives timing info
13845 * @param {Function} videoSegmentTimingInfoFn
13846 * a callback that receives video timing info based on media times and
13847 * any adjustments made by the transmuxer
13848 * @param {Function} audioSegmentTimingInfoFn
13849 * a callback that receives audio timing info based on media times and
13850 * any adjustments made by the transmuxer
13851 * @param {Function} id3Fn - a callback that receives ID3 metadata
13852 * @param {Function} captionsFn - a callback that receives captions
13853 * @param {boolean} isEndOfTimeline
13854 * true if this segment represents the last segment in a timeline
13855 * @param {Function} endedTimelineFn
13856 * a callback made when a timeline is ended, will only be called if
13857 * isEndOfTimeline is true
13858 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13859 * and ready to use
13860 * @param {Function} doneFn - a callback that is executed after all resources have been
13861 * downloaded and any decryption completed
13862 */
13863
13864
13865var waitForCompletion = function waitForCompletion(_ref9) {
13866 var activeXhrs = _ref9.activeXhrs,
13867 decryptionWorker = _ref9.decryptionWorker,
13868 trackInfoFn = _ref9.trackInfoFn,
13869 timingInfoFn = _ref9.timingInfoFn,
13870 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
13871 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
13872 id3Fn = _ref9.id3Fn,
13873 captionsFn = _ref9.captionsFn,
13874 isEndOfTimeline = _ref9.isEndOfTimeline,
13875 endedTimelineFn = _ref9.endedTimelineFn,
13876 dataFn = _ref9.dataFn,
13877 doneFn = _ref9.doneFn,
13878 onTransmuxerLog = _ref9.onTransmuxerLog;
13879 var count = 0;
13880 var didError = false;
13881 return function (error, segment) {
13882 if (didError) {
13883 return;
13884 }
13885
13886 if (error) {
13887 didError = true; // If there are errors, we have to abort any outstanding requests
13888
13889 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
13890 // handle the aborted events from those requests, there are some cases where we may
13891 // never get an aborted event. For instance, if the network connection is lost and
13892 // there were two requests, the first may have triggered an error immediately, while
13893 // the second request remains unsent. In that case, the aborted algorithm will not
13894 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
13895 //
13896 // We also can't rely on the ready state of the XHR, since the request that
13897 // triggered the connection error may also show as a ready state of 0 (unsent).
13898 // Therefore, we have to finish this group of requests immediately after the first
13899 // seen error.
13900
13901 return doneFn(error, segment);
13902 }
13903
13904 count += 1;
13905
13906 if (count === activeXhrs.length) {
13907 var segmentFinish = function segmentFinish() {
13908 if (segment.encryptedBytes) {
13909 return decryptSegment({
13910 decryptionWorker: decryptionWorker,
13911 segment: segment,
13912 trackInfoFn: trackInfoFn,
13913 timingInfoFn: timingInfoFn,
13914 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13915 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13916 id3Fn: id3Fn,
13917 captionsFn: captionsFn,
13918 isEndOfTimeline: isEndOfTimeline,
13919 endedTimelineFn: endedTimelineFn,
13920 dataFn: dataFn,
13921 doneFn: doneFn,
13922 onTransmuxerLog: onTransmuxerLog
13923 });
13924 } // Otherwise, everything is ready just continue
13925
13926
13927 handleSegmentBytes({
13928 segment: segment,
13929 bytes: segment.bytes,
13930 trackInfoFn: trackInfoFn,
13931 timingInfoFn: timingInfoFn,
13932 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13933 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13934 id3Fn: id3Fn,
13935 captionsFn: captionsFn,
13936 isEndOfTimeline: isEndOfTimeline,
13937 endedTimelineFn: endedTimelineFn,
13938 dataFn: dataFn,
13939 doneFn: doneFn,
13940 onTransmuxerLog: onTransmuxerLog
13941 });
13942 }; // Keep track of when *all* of the requests have completed
13943
13944
13945 segment.endOfAllRequests = Date.now();
13946
13947 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
13948 return decrypt({
13949 decryptionWorker: decryptionWorker,
13950 // add -init to the "id" to differentiate between segment
13951 // and init segment decryption, just in case they happen
13952 // at the same time at some point in the future.
13953 id: segment.requestId + '-init',
13954 encryptedBytes: segment.map.encryptedBytes,
13955 key: segment.map.key
13956 }, function (decryptedBytes) {
13957 segment.map.bytes = decryptedBytes;
13958 parseInitSegment(segment, function (parseError) {
13959 if (parseError) {
13960 abortAll(activeXhrs);
13961 return doneFn(parseError, segment);
13962 }
13963
13964 segmentFinish();
13965 });
13966 });
13967 }
13968
13969 segmentFinish();
13970 }
13971 };
13972};
13973/**
13974 * Calls the abort callback if any request within the batch was aborted. Will only call
13975 * the callback once per batch of requests, even if multiple were aborted.
13976 *
13977 * @param {Object} loadendState - state to check to see if the abort function was called
13978 * @param {Function} abortFn - callback to call for abort
13979 */
13980
13981
13982var handleLoadEnd = function handleLoadEnd(_ref10) {
13983 var loadendState = _ref10.loadendState,
13984 abortFn = _ref10.abortFn;
13985 return function (event) {
13986 var request = event.target;
13987
13988 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
13989 abortFn();
13990 loadendState.calledAbortFn = true;
13991 }
13992 };
13993};
13994/**
13995 * Simple progress event callback handler that gathers some stats before
13996 * executing a provided callback with the `segment` object
13997 *
13998 * @param {Object} segment - a simplified copy of the segmentInfo object
13999 * from SegmentLoader
14000 * @param {Function} progressFn - a callback that is executed each time a progress event
14001 * is received
14002 * @param {Function} trackInfoFn - a callback that receives track info
14003 * @param {Function} timingInfoFn - a callback that receives timing info
14004 * @param {Function} videoSegmentTimingInfoFn
14005 * a callback that receives video timing info based on media times and
14006 * any adjustments made by the transmuxer
14007 * @param {Function} audioSegmentTimingInfoFn
14008 * a callback that receives audio timing info based on media times and
14009 * any adjustments made by the transmuxer
14010 * @param {boolean} isEndOfTimeline
14011 * true if this segment represents the last segment in a timeline
14012 * @param {Function} endedTimelineFn
14013 * a callback made when a timeline is ended, will only be called if
14014 * isEndOfTimeline is true
14015 * @param {Function} dataFn - a callback that is executed when segment bytes are available
14016 * and ready to use
14017 * @param {Event} event - the progress event object from XMLHttpRequest
14018 */
14019
14020
14021var handleProgress = function handleProgress(_ref11) {
14022 var segment = _ref11.segment,
14023 progressFn = _ref11.progressFn;
14024 _ref11.trackInfoFn;
14025 _ref11.timingInfoFn;
14026 _ref11.videoSegmentTimingInfoFn;
14027 _ref11.audioSegmentTimingInfoFn;
14028 _ref11.id3Fn;
14029 _ref11.captionsFn;
14030 _ref11.isEndOfTimeline;
14031 _ref11.endedTimelineFn;
14032 _ref11.dataFn;
14033 return function (event) {
14034 var request = event.target;
14035
14036 if (request.aborted) {
14037 return;
14038 }
14039
14040 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
14041
14042 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
14043 segment.stats.firstBytesReceivedAt = Date.now();
14044 }
14045
14046 return progressFn(event, segment);
14047 };
14048};
14049/**
14050 * Load all resources and does any processing necessary for a media-segment
14051 *
14052 * Features:
14053 * decrypts the media-segment if it has a key uri and an iv
14054 * aborts *all* requests if *any* one request fails
14055 *
14056 * The segment object, at minimum, has the following format:
14057 * {
14058 * resolvedUri: String,
14059 * [transmuxer]: Object,
14060 * [byterange]: {
14061 * offset: Number,
14062 * length: Number
14063 * },
14064 * [key]: {
14065 * resolvedUri: String
14066 * [byterange]: {
14067 * offset: Number,
14068 * length: Number
14069 * },
14070 * iv: {
14071 * bytes: Uint32Array
14072 * }
14073 * },
14074 * [map]: {
14075 * resolvedUri: String,
14076 * [byterange]: {
14077 * offset: Number,
14078 * length: Number
14079 * },
14080 * [bytes]: Uint8Array
14081 * }
14082 * }
14083 * ...where [name] denotes optional properties
14084 *
14085 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
14086 * @param {Object} xhrOptions - the base options to provide to all xhr requests
14087 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
14088 * decryption routines
14089 * @param {Object} segment - a simplified copy of the segmentInfo object
14090 * from SegmentLoader
14091 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
14092 * aborted
14093 * @param {Function} progressFn - a callback that receives progress events from the main
14094 * segment's xhr request
14095 * @param {Function} trackInfoFn - a callback that receives track info
14096 * @param {Function} timingInfoFn - a callback that receives timing info
14097 * @param {Function} videoSegmentTimingInfoFn
14098 * a callback that receives video timing info based on media times and
14099 * any adjustments made by the transmuxer
14100 * @param {Function} audioSegmentTimingInfoFn
14101 * a callback that receives audio timing info based on media times and
14102 * any adjustments made by the transmuxer
14103 * @param {Function} id3Fn - a callback that receives ID3 metadata
14104 * @param {Function} captionsFn - a callback that receives captions
14105 * @param {boolean} isEndOfTimeline
14106 * true if this segment represents the last segment in a timeline
14107 * @param {Function} endedTimelineFn
14108 * a callback made when a timeline is ended, will only be called if
14109 * isEndOfTimeline is true
14110 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
14111 * request, transmuxed if needed
14112 * @param {Function} doneFn - a callback that is executed only once all requests have
14113 * succeeded or failed
14114 * @return {Function} a function that, when invoked, immediately aborts all
14115 * outstanding requests
14116 */
14117
14118
14119var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
14120 var xhr = _ref12.xhr,
14121 xhrOptions = _ref12.xhrOptions,
14122 decryptionWorker = _ref12.decryptionWorker,
14123 segment = _ref12.segment,
14124 abortFn = _ref12.abortFn,
14125 progressFn = _ref12.progressFn,
14126 trackInfoFn = _ref12.trackInfoFn,
14127 timingInfoFn = _ref12.timingInfoFn,
14128 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
14129 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
14130 id3Fn = _ref12.id3Fn,
14131 captionsFn = _ref12.captionsFn,
14132 isEndOfTimeline = _ref12.isEndOfTimeline,
14133 endedTimelineFn = _ref12.endedTimelineFn,
14134 dataFn = _ref12.dataFn,
14135 doneFn = _ref12.doneFn,
14136 onTransmuxerLog = _ref12.onTransmuxerLog;
14137 var activeXhrs = [];
14138 var finishProcessingFn = waitForCompletion({
14139 activeXhrs: activeXhrs,
14140 decryptionWorker: decryptionWorker,
14141 trackInfoFn: trackInfoFn,
14142 timingInfoFn: timingInfoFn,
14143 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14144 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14145 id3Fn: id3Fn,
14146 captionsFn: captionsFn,
14147 isEndOfTimeline: isEndOfTimeline,
14148 endedTimelineFn: endedTimelineFn,
14149 dataFn: dataFn,
14150 doneFn: doneFn,
14151 onTransmuxerLog: onTransmuxerLog
14152 }); // optionally, request the decryption key
14153
14154 if (segment.key && !segment.key.bytes) {
14155 var objects = [segment.key];
14156
14157 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
14158 objects.push(segment.map.key);
14159 }
14160
14161 var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
14162 uri: segment.key.resolvedUri,
14163 responseType: 'arraybuffer'
14164 });
14165 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
14166 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
14167 activeXhrs.push(keyXhr);
14168 } // optionally, request the associated media init segment
14169
14170
14171 if (segment.map && !segment.map.bytes) {
14172 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
14173
14174 if (differentMapKey) {
14175 var mapKeyRequestOptions = videojs.mergeOptions(xhrOptions, {
14176 uri: segment.map.key.resolvedUri,
14177 responseType: 'arraybuffer'
14178 });
14179 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
14180 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
14181 activeXhrs.push(mapKeyXhr);
14182 }
14183
14184 var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
14185 uri: segment.map.resolvedUri,
14186 responseType: 'arraybuffer',
14187 headers: segmentXhrHeaders(segment.map)
14188 });
14189 var initSegmentRequestCallback = handleInitSegmentResponse({
14190 segment: segment,
14191 finishProcessingFn: finishProcessingFn
14192 });
14193 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
14194 activeXhrs.push(initSegmentXhr);
14195 }
14196
14197 var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
14198 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
14199 responseType: 'arraybuffer',
14200 headers: segmentXhrHeaders(segment)
14201 });
14202 var segmentRequestCallback = handleSegmentResponse({
14203 segment: segment,
14204 finishProcessingFn: finishProcessingFn,
14205 responseType: segmentRequestOptions.responseType
14206 });
14207 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
14208 segmentXhr.addEventListener('progress', handleProgress({
14209 segment: segment,
14210 progressFn: progressFn,
14211 trackInfoFn: trackInfoFn,
14212 timingInfoFn: timingInfoFn,
14213 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14214 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14215 id3Fn: id3Fn,
14216 captionsFn: captionsFn,
14217 isEndOfTimeline: isEndOfTimeline,
14218 endedTimelineFn: endedTimelineFn,
14219 dataFn: dataFn
14220 }));
14221 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
14222 // multiple times, provide a shared state object
14223
14224 var loadendState = {};
14225 activeXhrs.forEach(function (activeXhr) {
14226 activeXhr.addEventListener('loadend', handleLoadEnd({
14227 loadendState: loadendState,
14228 abortFn: abortFn
14229 }));
14230 });
14231 return function () {
14232 return abortAll(activeXhrs);
14233 };
14234};
14235
14236/**
14237 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
14238 * codec strings, or translating codec strings into objects that can be examined.
14239 */
14240var logFn$1 = logger('CodecUtils');
14241/**
14242 * Returns a set of codec strings parsed from the playlist or the default
14243 * codec strings if no codecs were specified in the playlist
14244 *
14245 * @param {Playlist} media the current media playlist
14246 * @return {Object} an object with the video and audio codecs
14247 */
14248
14249var getCodecs = function getCodecs(media) {
14250 // if the codecs were explicitly specified, use them instead of the
14251 // defaults
14252 var mediaAttributes = media.attributes || {};
14253
14254 if (mediaAttributes.CODECS) {
14255 return parseCodecs(mediaAttributes.CODECS);
14256 }
14257};
14258
14259var isMaat = function isMaat(master, media) {
14260 var mediaAttributes = media.attributes || {};
14261 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14262};
14263var isMuxed = function isMuxed(master, media) {
14264 if (!isMaat(master, media)) {
14265 return true;
14266 }
14267
14268 var mediaAttributes = media.attributes || {};
14269 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14270
14271 for (var groupId in audioGroup) {
14272 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
14273 // or there are listed playlists (the case for DASH, as the manifest will have already
14274 // provided all of the details necessary to generate the audio playlist, as opposed to
14275 // HLS' externally requested playlists), then the content is demuxed.
14276 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
14277 return true;
14278 }
14279 }
14280
14281 return false;
14282};
14283var unwrapCodecList = function unwrapCodecList(codecList) {
14284 var codecs = {};
14285 codecList.forEach(function (_ref) {
14286 var mediaType = _ref.mediaType,
14287 type = _ref.type,
14288 details = _ref.details;
14289 codecs[mediaType] = codecs[mediaType] || [];
14290 codecs[mediaType].push(translateLegacyCodec("" + type + details));
14291 });
14292 Object.keys(codecs).forEach(function (mediaType) {
14293 if (codecs[mediaType].length > 1) {
14294 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
14295 codecs[mediaType] = null;
14296 return;
14297 }
14298
14299 codecs[mediaType] = codecs[mediaType][0];
14300 });
14301 return codecs;
14302};
14303var codecCount = function codecCount(codecObj) {
14304 var count = 0;
14305
14306 if (codecObj.audio) {
14307 count++;
14308 }
14309
14310 if (codecObj.video) {
14311 count++;
14312 }
14313
14314 return count;
14315};
14316/**
14317 * Calculates the codec strings for a working configuration of
14318 * SourceBuffers to play variant streams in a master playlist. If
14319 * there is no possible working configuration, an empty object will be
14320 * returned.
14321 *
14322 * @param master {Object} the m3u8 object for the master playlist
14323 * @param media {Object} the m3u8 object for the variant playlist
14324 * @return {Object} the codec strings.
14325 *
14326 * @private
14327 */
14328
14329var codecsForPlaylist = function codecsForPlaylist(master, media) {
14330 var mediaAttributes = media.attributes || {};
14331 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
14332 // Put another way, there is no way to have a video-only multiple-audio HLS!
14333
14334 if (isMaat(master, media) && !codecInfo.audio) {
14335 if (!isMuxed(master, media)) {
14336 // It is possible for codecs to be specified on the audio media group playlist but
14337 // not on the rendition playlist. This is mostly the case for DASH, where audio and
14338 // video are always separate (and separately specified).
14339 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
14340
14341 if (defaultCodecs.audio) {
14342 codecInfo.audio = defaultCodecs.audio;
14343 }
14344 }
14345 }
14346
14347 return codecInfo;
14348};
14349
14350var logFn = logger('PlaylistSelector');
14351
14352var representationToString = function representationToString(representation) {
14353 if (!representation || !representation.playlist) {
14354 return;
14355 }
14356
14357 var playlist = representation.playlist;
14358 return JSON.stringify({
14359 id: playlist.id,
14360 bandwidth: representation.bandwidth,
14361 width: representation.width,
14362 height: representation.height,
14363 codecs: playlist.attributes && playlist.attributes.CODECS || ''
14364 });
14365}; // Utilities
14366
14367/**
14368 * Returns the CSS value for the specified property on an element
14369 * using `getComputedStyle`. Firefox has a long-standing issue where
14370 * getComputedStyle() may return null when running in an iframe with
14371 * `display: none`.
14372 *
14373 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
14374 * @param {HTMLElement} el the htmlelement to work on
14375 * @param {string} the proprety to get the style for
14376 */
14377
14378
14379var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
14380 if (!el) {
14381 return '';
14382 }
14383
14384 var result = window$1.getComputedStyle(el);
14385
14386 if (!result) {
14387 return '';
14388 }
14389
14390 return result[property];
14391};
14392/**
14393 * Resuable stable sort function
14394 *
14395 * @param {Playlists} array
14396 * @param {Function} sortFn Different comparators
14397 * @function stableSort
14398 */
14399
14400
14401var stableSort = function stableSort(array, sortFn) {
14402 var newArray = array.slice();
14403 array.sort(function (left, right) {
14404 var cmp = sortFn(left, right);
14405
14406 if (cmp === 0) {
14407 return newArray.indexOf(left) - newArray.indexOf(right);
14408 }
14409
14410 return cmp;
14411 });
14412};
14413/**
14414 * A comparator function to sort two playlist object by bandwidth.
14415 *
14416 * @param {Object} left a media playlist object
14417 * @param {Object} right a media playlist object
14418 * @return {number} Greater than zero if the bandwidth attribute of
14419 * left is greater than the corresponding attribute of right. Less
14420 * than zero if the bandwidth of right is greater than left and
14421 * exactly zero if the two are equal.
14422 */
14423
14424
14425var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
14426 var leftBandwidth;
14427 var rightBandwidth;
14428
14429 if (left.attributes.BANDWIDTH) {
14430 leftBandwidth = left.attributes.BANDWIDTH;
14431 }
14432
14433 leftBandwidth = leftBandwidth || window$1.Number.MAX_VALUE;
14434
14435 if (right.attributes.BANDWIDTH) {
14436 rightBandwidth = right.attributes.BANDWIDTH;
14437 }
14438
14439 rightBandwidth = rightBandwidth || window$1.Number.MAX_VALUE;
14440 return leftBandwidth - rightBandwidth;
14441};
14442/**
14443 * A comparator function to sort two playlist object by resolution (width).
14444 *
14445 * @param {Object} left a media playlist object
14446 * @param {Object} right a media playlist object
14447 * @return {number} Greater than zero if the resolution.width attribute of
14448 * left is greater than the corresponding attribute of right. Less
14449 * than zero if the resolution.width of right is greater than left and
14450 * exactly zero if the two are equal.
14451 */
14452
14453var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
14454 var leftWidth;
14455 var rightWidth;
14456
14457 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
14458 leftWidth = left.attributes.RESOLUTION.width;
14459 }
14460
14461 leftWidth = leftWidth || window$1.Number.MAX_VALUE;
14462
14463 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
14464 rightWidth = right.attributes.RESOLUTION.width;
14465 }
14466
14467 rightWidth = rightWidth || window$1.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
14468 // have the same media dimensions/ resolution
14469
14470 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
14471 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
14472 }
14473
14474 return leftWidth - rightWidth;
14475};
14476/**
14477 * Chooses the appropriate media playlist based on bandwidth and player size
14478 *
14479 * @param {Object} master
14480 * Object representation of the master manifest
14481 * @param {number} playerBandwidth
14482 * Current calculated bandwidth of the player
14483 * @param {number} playerWidth
14484 * Current width of the player element (should account for the device pixel ratio)
14485 * @param {number} playerHeight
14486 * Current height of the player element (should account for the device pixel ratio)
14487 * @param {boolean} limitRenditionByPlayerDimensions
14488 * True if the player width and height should be used during the selection, false otherwise
14489 * @param {Object} masterPlaylistController
14490 * the current masterPlaylistController object
14491 * @return {Playlist} the highest bitrate playlist less than the
14492 * currently detected bandwidth, accounting for some amount of
14493 * bandwidth variance
14494 */
14495
14496var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
14497 // If we end up getting called before `master` is available, exit early
14498 if (!master) {
14499 return;
14500 }
14501
14502 var options = {
14503 bandwidth: playerBandwidth,
14504 width: playerWidth,
14505 height: playerHeight,
14506 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
14507 };
14508 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
14509
14510 if (Playlist.isAudioOnly(master)) {
14511 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
14512 // at the buttom of this function for debugging.
14513
14514 options.audioOnly = true;
14515 } // convert the playlists to an intermediary representation to make comparisons easier
14516
14517
14518 var sortedPlaylistReps = playlists.map(function (playlist) {
14519 var bandwidth;
14520 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
14521 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
14522 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
14523 bandwidth = bandwidth || window$1.Number.MAX_VALUE;
14524 return {
14525 bandwidth: bandwidth,
14526 width: width,
14527 height: height,
14528 playlist: playlist
14529 };
14530 });
14531 stableSort(sortedPlaylistReps, function (left, right) {
14532 return left.bandwidth - right.bandwidth;
14533 }); // filter out any playlists that have been excluded due to
14534 // incompatible configurations
14535
14536 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14537 return !Playlist.isIncompatible(rep.playlist);
14538 }); // filter out any playlists that have been disabled manually through the representations
14539 // api or blacklisted temporarily due to playback errors.
14540
14541 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14542 return Playlist.isEnabled(rep.playlist);
14543 });
14544
14545 if (!enabledPlaylistReps.length) {
14546 // if there are no enabled playlists, then they have all been blacklisted or disabled
14547 // by the user through the representations api. In this case, ignore blacklisting and
14548 // fallback to what the user wants by using playlists the user has not disabled.
14549 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14550 return !Playlist.isDisabled(rep.playlist);
14551 });
14552 } // filter out any variant that has greater effective bitrate
14553 // than the current estimated bandwidth
14554
14555
14556 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
14557 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
14558 });
14559 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
14560 // and then taking the very first element
14561
14562 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
14563 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14564 })[0]; // if we're not going to limit renditions by player size, make an early decision.
14565
14566 if (limitRenditionByPlayerDimensions === false) {
14567 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14568
14569 if (_chosenRep && _chosenRep.playlist) {
14570 var type = 'sortedPlaylistReps';
14571
14572 if (bandwidthBestRep) {
14573 type = 'bandwidthBestRep';
14574 }
14575
14576 if (enabledPlaylistReps[0]) {
14577 type = 'enabledPlaylistReps';
14578 }
14579
14580 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
14581 return _chosenRep.playlist;
14582 }
14583
14584 logFn('could not choose a playlist with options', options);
14585 return null;
14586 } // filter out playlists without resolution information
14587
14588
14589 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
14590 return rep.width && rep.height;
14591 }); // sort variants by resolution
14592
14593 stableSort(haveResolution, function (left, right) {
14594 return left.width - right.width;
14595 }); // if we have the exact resolution as the player use it
14596
14597 var resolutionBestRepList = haveResolution.filter(function (rep) {
14598 return rep.width === playerWidth && rep.height === playerHeight;
14599 });
14600 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
14601
14602 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
14603 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14604 })[0];
14605 var resolutionPlusOneList;
14606 var resolutionPlusOneSmallest;
14607 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
14608 // if there is no match of exact resolution
14609
14610 if (!resolutionBestRep) {
14611 resolutionPlusOneList = haveResolution.filter(function (rep) {
14612 return rep.width > playerWidth || rep.height > playerHeight;
14613 }); // find all the variants have the same smallest resolution
14614
14615 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
14616 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
14617 }); // ensure that we also pick the highest bandwidth variant that
14618 // is just-larger-than the video player
14619
14620 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
14621 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
14622 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14623 })[0];
14624 }
14625
14626 var leastPixelDiffRep; // If this selector proves to be better than others,
14627 // resolutionPlusOneRep and resolutionBestRep and all
14628 // the code involving them should be removed.
14629
14630 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
14631 // find the variant that is closest to the player's pixel size
14632 var leastPixelDiffList = haveResolution.map(function (rep) {
14633 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
14634 return rep;
14635 }); // get the highest bandwidth, closest resolution playlist
14636
14637 stableSort(leastPixelDiffList, function (left, right) {
14638 // sort by highest bandwidth if pixelDiff is the same
14639 if (left.pixelDiff === right.pixelDiff) {
14640 return right.bandwidth - left.bandwidth;
14641 }
14642
14643 return left.pixelDiff - right.pixelDiff;
14644 });
14645 leastPixelDiffRep = leastPixelDiffList[0];
14646 } // fallback chain of variants
14647
14648
14649 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14650
14651 if (chosenRep && chosenRep.playlist) {
14652 var _type = 'sortedPlaylistReps';
14653
14654 if (leastPixelDiffRep) {
14655 _type = 'leastPixelDiffRep';
14656 } else if (resolutionPlusOneRep) {
14657 _type = 'resolutionPlusOneRep';
14658 } else if (resolutionBestRep) {
14659 _type = 'resolutionBestRep';
14660 } else if (bandwidthBestRep) {
14661 _type = 'bandwidthBestRep';
14662 } else if (enabledPlaylistReps[0]) {
14663 _type = 'enabledPlaylistReps';
14664 }
14665
14666 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
14667 return chosenRep.playlist;
14668 }
14669
14670 logFn('could not choose a playlist with options', options);
14671 return null;
14672};
14673
14674/**
14675 * Chooses the appropriate media playlist based on the most recent
14676 * bandwidth estimate and the player size.
14677 *
14678 * Expects to be called within the context of an instance of VhsHandler
14679 *
14680 * @return {Playlist} the highest bitrate playlist less than the
14681 * currently detected bandwidth, accounting for some amount of
14682 * bandwidth variance
14683 */
14684
14685var lastBandwidthSelector = function lastBandwidthSelector() {
14686 var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
14687 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14688};
14689/**
14690 * Chooses the appropriate media playlist based on an
14691 * exponential-weighted moving average of the bandwidth after
14692 * filtering for player size.
14693 *
14694 * Expects to be called within the context of an instance of VhsHandler
14695 *
14696 * @param {number} decay - a number between 0 and 1. Higher values of
14697 * this parameter will cause previous bandwidth estimates to lose
14698 * significance more quickly.
14699 * @return {Function} a function which can be invoked to create a new
14700 * playlist selector function.
14701 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
14702 */
14703
14704var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
14705 var average = -1;
14706 var lastSystemBandwidth = -1;
14707
14708 if (decay < 0 || decay > 1) {
14709 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
14710 }
14711
14712 return function () {
14713 var pixelRatio = this.useDevicePixelRatio ? window$1.devicePixelRatio || 1 : 1;
14714
14715 if (average < 0) {
14716 average = this.systemBandwidth;
14717 lastSystemBandwidth = this.systemBandwidth;
14718 } // stop the average value from decaying for every 250ms
14719 // when the systemBandwidth is constant
14720 // and
14721 // stop average from setting to a very low value when the
14722 // systemBandwidth becomes 0 in case of chunk cancellation
14723
14724
14725 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
14726 average = decay * this.systemBandwidth + (1 - decay) * average;
14727 lastSystemBandwidth = this.systemBandwidth;
14728 }
14729
14730 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14731 };
14732};
14733/**
14734 * Chooses the appropriate media playlist based on the potential to rebuffer
14735 *
14736 * @param {Object} settings
14737 * Object of information required to use this selector
14738 * @param {Object} settings.master
14739 * Object representation of the master manifest
14740 * @param {number} settings.currentTime
14741 * The current time of the player
14742 * @param {number} settings.bandwidth
14743 * Current measured bandwidth
14744 * @param {number} settings.duration
14745 * Duration of the media
14746 * @param {number} settings.segmentDuration
14747 * Segment duration to be used in round trip time calculations
14748 * @param {number} settings.timeUntilRebuffer
14749 * Time left in seconds until the player has to rebuffer
14750 * @param {number} settings.currentTimeline
14751 * The current timeline segments are being loaded from
14752 * @param {SyncController} settings.syncController
14753 * SyncController for determining if we have a sync point for a given playlist
14754 * @return {Object|null}
14755 * {Object} return.playlist
14756 * The highest bandwidth playlist with the least amount of rebuffering
14757 * {Number} return.rebufferingImpact
14758 * The amount of time in seconds switching to this playlist will rebuffer. A
14759 * negative value means that switching will cause zero rebuffering.
14760 */
14761
14762var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
14763 var master = settings.master,
14764 currentTime = settings.currentTime,
14765 bandwidth = settings.bandwidth,
14766 duration = settings.duration,
14767 segmentDuration = settings.segmentDuration,
14768 timeUntilRebuffer = settings.timeUntilRebuffer,
14769 currentTimeline = settings.currentTimeline,
14770 syncController = settings.syncController; // filter out any playlists that have been excluded due to
14771 // incompatible configurations
14772
14773 var compatiblePlaylists = master.playlists.filter(function (playlist) {
14774 return !Playlist.isIncompatible(playlist);
14775 }); // filter out any playlists that have been disabled manually through the representations
14776 // api or blacklisted temporarily due to playback errors.
14777
14778 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
14779
14780 if (!enabledPlaylists.length) {
14781 // if there are no enabled playlists, then they have all been blacklisted or disabled
14782 // by the user through the representations api. In this case, ignore blacklisting and
14783 // fallback to what the user wants by using playlists the user has not disabled.
14784 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
14785 return !Playlist.isDisabled(playlist);
14786 });
14787 }
14788
14789 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
14790 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
14791 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
14792 // sync request first. This will double the request time
14793
14794 var numRequests = syncPoint ? 1 : 2;
14795 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
14796 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
14797 return {
14798 playlist: playlist,
14799 rebufferingImpact: rebufferingImpact
14800 };
14801 });
14802 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
14803 return estimate.rebufferingImpact <= 0;
14804 }); // Sort by bandwidth DESC
14805
14806 stableSort(noRebufferingPlaylists, function (a, b) {
14807 return comparePlaylistBandwidth(b.playlist, a.playlist);
14808 });
14809
14810 if (noRebufferingPlaylists.length) {
14811 return noRebufferingPlaylists[0];
14812 }
14813
14814 stableSort(rebufferingEstimates, function (a, b) {
14815 return a.rebufferingImpact - b.rebufferingImpact;
14816 });
14817 return rebufferingEstimates[0] || null;
14818};
14819/**
14820 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
14821 * one with video. If no renditions with video exist, return the lowest audio rendition.
14822 *
14823 * Expects to be called within the context of an instance of VhsHandler
14824 *
14825 * @return {Object|null}
14826 * {Object} return.playlist
14827 * The lowest bitrate playlist that contains a video codec. If no such rendition
14828 * exists pick the lowest audio rendition.
14829 */
14830
14831var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
14832 var _this = this;
14833
14834 // filter out any playlists that have been excluded due to
14835 // incompatible configurations or playback errors
14836 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
14837
14838 stableSort(playlists, function (a, b) {
14839 return comparePlaylistBandwidth(a, b);
14840 }); // Parse and assume that playlists with no video codec have no video
14841 // (this is not necessarily true, although it is generally true).
14842 //
14843 // If an entire manifest has no valid videos everything will get filtered
14844 // out.
14845
14846 var playlistsWithVideo = playlists.filter(function (playlist) {
14847 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
14848 });
14849 return playlistsWithVideo[0] || null;
14850};
14851
14852/**
14853 * Combine all segments into a single Uint8Array
14854 *
14855 * @param {Object} segmentObj
14856 * @return {Uint8Array} concatenated bytes
14857 * @private
14858 */
14859var concatSegments = function concatSegments(segmentObj) {
14860 var offset = 0;
14861 var tempBuffer;
14862
14863 if (segmentObj.bytes) {
14864 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
14865
14866 segmentObj.segments.forEach(function (segment) {
14867 tempBuffer.set(segment, offset);
14868 offset += segment.byteLength;
14869 });
14870 }
14871
14872 return tempBuffer;
14873};
14874
14875/**
14876 * @file text-tracks.js
14877 */
14878/**
14879 * Create captions text tracks on video.js if they do not exist
14880 *
14881 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14882 * @param {Object} tech the video.js tech
14883 * @param {Object} captionStream the caption stream to create
14884 * @private
14885 */
14886
14887var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
14888 if (!inbandTextTracks[captionStream]) {
14889 tech.trigger({
14890 type: 'usage',
14891 name: 'vhs-608'
14892 });
14893 tech.trigger({
14894 type: 'usage',
14895 name: 'hls-608'
14896 });
14897 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
14898
14899 if (/^cc708_/.test(captionStream)) {
14900 instreamId = 'SERVICE' + captionStream.split('_')[1];
14901 }
14902
14903 var track = tech.textTracks().getTrackById(instreamId);
14904
14905 if (track) {
14906 // Resuse an existing track with a CC# id because this was
14907 // very likely created by videojs-contrib-hls from information
14908 // in the m3u8 for us to use
14909 inbandTextTracks[captionStream] = track;
14910 } else {
14911 // This section gets called when we have caption services that aren't specified in the manifest.
14912 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
14913 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
14914 var label = captionStream;
14915 var language = captionStream;
14916 var def = false;
14917 var captionService = captionServices[instreamId];
14918
14919 if (captionService) {
14920 label = captionService.label;
14921 language = captionService.language;
14922 def = captionService.default;
14923 } // Otherwise, create a track with the default `CC#` label and
14924 // without a language
14925
14926
14927 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
14928 kind: 'captions',
14929 id: instreamId,
14930 // TODO: investigate why this doesn't seem to turn the caption on by default
14931 default: def,
14932 label: label,
14933 language: language
14934 }, false).track;
14935 }
14936 }
14937};
14938/**
14939 * Add caption text track data to a source handler given an array of captions
14940 *
14941 * @param {Object}
14942 * @param {Object} inbandTextTracks the inband text tracks
14943 * @param {number} timestampOffset the timestamp offset of the source buffer
14944 * @param {Array} captionArray an array of caption data
14945 * @private
14946 */
14947
14948var addCaptionData = function addCaptionData(_ref) {
14949 var inbandTextTracks = _ref.inbandTextTracks,
14950 captionArray = _ref.captionArray,
14951 timestampOffset = _ref.timestampOffset;
14952
14953 if (!captionArray) {
14954 return;
14955 }
14956
14957 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
14958 captionArray.forEach(function (caption) {
14959 var track = caption.stream;
14960 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
14961 });
14962};
14963/**
14964 * Define properties on a cue for backwards compatability,
14965 * but warn the user that the way that they are using it
14966 * is depricated and will be removed at a later date.
14967 *
14968 * @param {Cue} cue the cue to add the properties on
14969 * @private
14970 */
14971
14972var deprecateOldCue = function deprecateOldCue(cue) {
14973 Object.defineProperties(cue.frame, {
14974 id: {
14975 get: function get() {
14976 videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
14977 return cue.value.key;
14978 }
14979 },
14980 value: {
14981 get: function get() {
14982 videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
14983 return cue.value.data;
14984 }
14985 },
14986 privateData: {
14987 get: function get() {
14988 videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
14989 return cue.value.data;
14990 }
14991 }
14992 });
14993};
14994/**
14995 * Add metadata text track data to a source handler given an array of metadata
14996 *
14997 * @param {Object}
14998 * @param {Object} inbandTextTracks the inband text tracks
14999 * @param {Array} metadataArray an array of meta data
15000 * @param {number} timestampOffset the timestamp offset of the source buffer
15001 * @param {number} videoDuration the duration of the video
15002 * @private
15003 */
15004
15005
15006var addMetadata = function addMetadata(_ref2) {
15007 var inbandTextTracks = _ref2.inbandTextTracks,
15008 metadataArray = _ref2.metadataArray,
15009 timestampOffset = _ref2.timestampOffset,
15010 videoDuration = _ref2.videoDuration;
15011
15012 if (!metadataArray) {
15013 return;
15014 }
15015
15016 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
15017 var metadataTrack = inbandTextTracks.metadataTrack_;
15018
15019 if (!metadataTrack) {
15020 return;
15021 }
15022
15023 metadataArray.forEach(function (metadata) {
15024 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
15025 // ignore this bit of metadata.
15026 // This likely occurs when you have an non-timed ID3 tag like TIT2,
15027 // which is the "Title/Songname/Content description" frame
15028
15029 if (typeof time !== 'number' || window$1.isNaN(time) || time < 0 || !(time < Infinity)) {
15030 return;
15031 }
15032
15033 metadata.frames.forEach(function (frame) {
15034 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
15035 cue.frame = frame;
15036 cue.value = frame;
15037 deprecateOldCue(cue);
15038 metadataTrack.addCue(cue);
15039 });
15040 });
15041
15042 if (!metadataTrack.cues || !metadataTrack.cues.length) {
15043 return;
15044 } // Updating the metadeta cues so that
15045 // the endTime of each cue is the startTime of the next cue
15046 // the endTime of last cue is the duration of the video
15047
15048
15049 var cues = metadataTrack.cues;
15050 var cuesArray = []; // Create a copy of the TextTrackCueList...
15051 // ...disregarding cues with a falsey value
15052
15053 for (var i = 0; i < cues.length; i++) {
15054 if (cues[i]) {
15055 cuesArray.push(cues[i]);
15056 }
15057 } // Group cues by their startTime value
15058
15059
15060 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
15061 var timeSlot = obj[cue.startTime] || [];
15062 timeSlot.push(cue);
15063 obj[cue.startTime] = timeSlot;
15064 return obj;
15065 }, {}); // Sort startTimes by ascending order
15066
15067 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
15068 return Number(a) - Number(b);
15069 }); // Map each cue group's endTime to the next group's startTime
15070
15071 sortedStartTimes.forEach(function (startTime, idx) {
15072 var cueGroup = cuesGroupedByStartTime[startTime];
15073 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
15074
15075 cueGroup.forEach(function (cue) {
15076 cue.endTime = nextTime;
15077 });
15078 });
15079};
15080/**
15081 * Create metadata text track on video.js if it does not exist
15082 *
15083 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
15084 * @param {string} dispatchType the inband metadata track dispatch type
15085 * @param {Object} tech the video.js tech
15086 * @private
15087 */
15088
15089var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
15090 if (inbandTextTracks.metadataTrack_) {
15091 return;
15092 }
15093
15094 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
15095 kind: 'metadata',
15096 label: 'Timed Metadata'
15097 }, false).track;
15098 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
15099};
15100/**
15101 * Remove cues from a track on video.js.
15102 *
15103 * @param {Double} start start of where we should remove the cue
15104 * @param {Double} end end of where the we should remove the cue
15105 * @param {Object} track the text track to remove the cues from
15106 * @private
15107 */
15108
15109var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
15110 var i;
15111 var cue;
15112
15113 if (!track) {
15114 return;
15115 }
15116
15117 if (!track.cues) {
15118 return;
15119 }
15120
15121 i = track.cues.length;
15122
15123 while (i--) {
15124 cue = track.cues[i]; // Remove any cue within the provided start and end time
15125
15126 if (cue.startTime >= start && cue.endTime <= end) {
15127 track.removeCue(cue);
15128 }
15129 }
15130};
15131/**
15132 * Remove duplicate cues from a track on video.js (a cue is considered a
15133 * duplicate if it has the same time interval and text as another)
15134 *
15135 * @param {Object} track the text track to remove the duplicate cues from
15136 * @private
15137 */
15138
15139var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
15140 var cues = track.cues;
15141
15142 if (!cues) {
15143 return;
15144 }
15145
15146 for (var i = 0; i < cues.length; i++) {
15147 var duplicates = [];
15148 var occurrences = 0;
15149
15150 for (var j = 0; j < cues.length; j++) {
15151 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
15152 occurrences++;
15153
15154 if (occurrences > 1) {
15155 duplicates.push(cues[j]);
15156 }
15157 }
15158 }
15159
15160 if (duplicates.length) {
15161 duplicates.forEach(function (dupe) {
15162 return track.removeCue(dupe);
15163 });
15164 }
15165 }
15166};
15167
15168/**
15169 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
15170 * front of current time.
15171 *
15172 * @param {Array} buffer
15173 * The current buffer of gop information
15174 * @param {number} currentTime
15175 * The current time
15176 * @param {Double} mapping
15177 * Offset to map display time to stream presentation time
15178 * @return {Array}
15179 * List of gops considered safe to append over
15180 */
15181
15182var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
15183 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
15184 return [];
15185 } // pts value for current time + 3 seconds to give a bit more wiggle room
15186
15187
15188 var currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
15189 var i;
15190
15191 for (i = 0; i < buffer.length; i++) {
15192 if (buffer[i].pts > currentTimePts) {
15193 break;
15194 }
15195 }
15196
15197 return buffer.slice(i);
15198};
15199/**
15200 * Appends gop information (timing and byteLength) received by the transmuxer for the
15201 * gops appended in the last call to appendBuffer
15202 *
15203 * @param {Array} buffer
15204 * The current buffer of gop information
15205 * @param {Array} gops
15206 * List of new gop information
15207 * @param {boolean} replace
15208 * If true, replace the buffer with the new gop information. If false, append the
15209 * new gop information to the buffer in the right location of time.
15210 * @return {Array}
15211 * Updated list of gop information
15212 */
15213
15214var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
15215 if (!gops.length) {
15216 return buffer;
15217 }
15218
15219 if (replace) {
15220 // If we are in safe append mode, then completely overwrite the gop buffer
15221 // with the most recent appeneded data. This will make sure that when appending
15222 // future segments, we only try to align with gops that are both ahead of current
15223 // time and in the last segment appended.
15224 return gops.slice();
15225 }
15226
15227 var start = gops[0].pts;
15228 var i = 0;
15229
15230 for (i; i < buffer.length; i++) {
15231 if (buffer[i].pts >= start) {
15232 break;
15233 }
15234 }
15235
15236 return buffer.slice(0, i).concat(gops);
15237};
15238/**
15239 * Removes gop information in buffer that overlaps with provided start and end
15240 *
15241 * @param {Array} buffer
15242 * The current buffer of gop information
15243 * @param {Double} start
15244 * position to start the remove at
15245 * @param {Double} end
15246 * position to end the remove at
15247 * @param {Double} mapping
15248 * Offset to map display time to stream presentation time
15249 */
15250
15251var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
15252 var startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
15253 var endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
15254 var updatedBuffer = buffer.slice();
15255 var i = buffer.length;
15256
15257 while (i--) {
15258 if (buffer[i].pts <= endPts) {
15259 break;
15260 }
15261 }
15262
15263 if (i === -1) {
15264 // no removal because end of remove range is before start of buffer
15265 return updatedBuffer;
15266 }
15267
15268 var j = i + 1;
15269
15270 while (j--) {
15271 if (buffer[j].pts <= startPts) {
15272 break;
15273 }
15274 } // clamp remove range start to 0 index
15275
15276
15277 j = Math.max(j, 0);
15278 updatedBuffer.splice(j, i - j + 1);
15279 return updatedBuffer;
15280};
15281
15282var shallowEqual = function shallowEqual(a, b) {
15283 // if both are undefined
15284 // or one or the other is undefined
15285 // they are not equal
15286 if (!a && !b || !a && b || a && !b) {
15287 return false;
15288 } // they are the same object and thus, equal
15289
15290
15291 if (a === b) {
15292 return true;
15293 } // sort keys so we can make sure they have
15294 // all the same keys later.
15295
15296
15297 var akeys = Object.keys(a).sort();
15298 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
15299
15300 if (akeys.length !== bkeys.length) {
15301 return false;
15302 }
15303
15304 for (var i = 0; i < akeys.length; i++) {
15305 var key = akeys[i]; // different sorted keys, not equal
15306
15307 if (key !== bkeys[i]) {
15308 return false;
15309 } // different values, not equal
15310
15311
15312 if (a[key] !== b[key]) {
15313 return false;
15314 }
15315 }
15316
15317 return true;
15318};
15319
15320// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
15321var QUOTA_EXCEEDED_ERR = 22;
15322
15323/**
15324 * The segment loader has no recourse except to fetch a segment in the
15325 * current playlist and use the internal timestamps in that segment to
15326 * generate a syncPoint. This function returns a good candidate index
15327 * for that process.
15328 *
15329 * @param {Array} segments - the segments array from a playlist.
15330 * @return {number} An index of a segment from the playlist to load
15331 */
15332
15333var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
15334 segments = segments || [];
15335 var timelineSegments = [];
15336 var time = 0;
15337
15338 for (var i = 0; i < segments.length; i++) {
15339 var segment = segments[i];
15340
15341 if (currentTimeline === segment.timeline) {
15342 timelineSegments.push(i);
15343 time += segment.duration;
15344
15345 if (time > targetTime) {
15346 return i;
15347 }
15348 }
15349 }
15350
15351 if (timelineSegments.length === 0) {
15352 return 0;
15353 } // default to the last timeline segment
15354
15355
15356 return timelineSegments[timelineSegments.length - 1];
15357}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
15358// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
15359// as a start to prevent any potential issues with removing content too close to the
15360// playhead.
15361
15362var MIN_BACK_BUFFER = 1; // in ms
15363
15364var CHECK_BUFFER_DELAY = 500;
15365
15366var finite = function finite(num) {
15367 return typeof num === 'number' && isFinite(num);
15368}; // With most content hovering around 30fps, if a segment has a duration less than a half
15369// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
15370// not accurately reflect the rest of the content.
15371
15372
15373var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
15374var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
15375 // Although these checks should most likely cover non 'main' types, for now it narrows
15376 // the scope of our checks.
15377 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
15378 return null;
15379 }
15380
15381 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
15382 return 'Neither audio nor video found in segment.';
15383 }
15384
15385 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
15386 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
15387 }
15388
15389 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
15390 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
15391 }
15392
15393 return null;
15394};
15395/**
15396 * Calculates a time value that is safe to remove from the back buffer without interrupting
15397 * playback.
15398 *
15399 * @param {TimeRange} seekable
15400 * The current seekable range
15401 * @param {number} currentTime
15402 * The current time of the player
15403 * @param {number} targetDuration
15404 * The target duration of the current playlist
15405 * @return {number}
15406 * Time that is safe to remove from the back buffer without interrupting playback
15407 */
15408
15409var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
15410 // 30 seconds before the playhead provides a safe default for trimming.
15411 //
15412 // Choosing a reasonable default is particularly important for high bitrate content and
15413 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
15414 // throw an APPEND_BUFFER_ERR.
15415 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
15416
15417 if (seekable.length) {
15418 // Some live playlists may have a shorter window of content than the full allowed back
15419 // buffer. For these playlists, don't save content that's no longer within the window.
15420 trimTime = Math.max(trimTime, seekable.start(0));
15421 } // Don't remove within target duration of the current time to avoid the possibility of
15422 // removing the GOP currently being played, as removing it can cause playback stalls.
15423
15424
15425 var maxTrimTime = currentTime - targetDuration;
15426 return Math.min(maxTrimTime, trimTime);
15427};
15428var segmentInfoString = function segmentInfoString(segmentInfo) {
15429 var startOfSegment = segmentInfo.startOfSegment,
15430 duration = segmentInfo.duration,
15431 segment = segmentInfo.segment,
15432 part = segmentInfo.part,
15433 _segmentInfo$playlist = segmentInfo.playlist,
15434 seq = _segmentInfo$playlist.mediaSequence,
15435 id = _segmentInfo$playlist.id,
15436 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
15437 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
15438 index = segmentInfo.mediaIndex,
15439 partIndex = segmentInfo.partIndex,
15440 timeline = segmentInfo.timeline;
15441 var segmentLen = segments.length - 1;
15442 var selection = 'mediaIndex/partIndex increment';
15443
15444 if (segmentInfo.getMediaInfoForTime) {
15445 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
15446 } else if (segmentInfo.isSyncRequest) {
15447 selection = 'getSyncSegmentCandidate (isSyncRequest)';
15448 }
15449
15450 if (segmentInfo.independent) {
15451 selection += " with independent " + segmentInfo.independent;
15452 }
15453
15454 var hasPartIndex = typeof partIndex === 'number';
15455 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
15456 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
15457 preloadSegment: segment
15458 }) - 1 : 0;
15459 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
15460};
15461
15462var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
15463 return mediaType + "TimingInfo";
15464};
15465/**
15466 * Returns the timestamp offset to use for the segment.
15467 *
15468 * @param {number} segmentTimeline
15469 * The timeline of the segment
15470 * @param {number} currentTimeline
15471 * The timeline currently being followed by the loader
15472 * @param {number} startOfSegment
15473 * The estimated segment start
15474 * @param {TimeRange[]} buffered
15475 * The loader's buffer
15476 * @param {boolean} overrideCheck
15477 * If true, no checks are made to see if the timestamp offset value should be set,
15478 * but sets it directly to a value.
15479 *
15480 * @return {number|null}
15481 * Either a number representing a new timestamp offset, or null if the segment is
15482 * part of the same timeline
15483 */
15484
15485
15486var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
15487 var segmentTimeline = _ref.segmentTimeline,
15488 currentTimeline = _ref.currentTimeline,
15489 startOfSegment = _ref.startOfSegment,
15490 buffered = _ref.buffered,
15491 overrideCheck = _ref.overrideCheck;
15492
15493 // Check to see if we are crossing a discontinuity to see if we need to set the
15494 // timestamp offset on the transmuxer and source buffer.
15495 //
15496 // Previously, we changed the timestampOffset if the start of this segment was less than
15497 // the currently set timestampOffset, but this isn't desirable as it can produce bad
15498 // behavior, especially around long running live streams.
15499 if (!overrideCheck && segmentTimeline === currentTimeline) {
15500 return null;
15501 } // When changing renditions, it's possible to request a segment on an older timeline. For
15502 // instance, given two renditions with the following:
15503 //
15504 // #EXTINF:10
15505 // segment1
15506 // #EXT-X-DISCONTINUITY
15507 // #EXTINF:10
15508 // segment2
15509 // #EXTINF:10
15510 // segment3
15511 //
15512 // And the current player state:
15513 //
15514 // current time: 8
15515 // buffer: 0 => 20
15516 //
15517 // The next segment on the current rendition would be segment3, filling the buffer from
15518 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
15519 // then the next segment to be requested will be segment1 from the new rendition in
15520 // order to fill time 8 and onwards. Using the buffered end would result in repeated
15521 // content (since it would position segment1 of the new rendition starting at 20s). This
15522 // case can be identified when the new segment's timeline is a prior value. Instead of
15523 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
15524 // more accurate to the actual start time of the segment.
15525
15526
15527 if (segmentTimeline < currentTimeline) {
15528 return startOfSegment;
15529 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
15530 // value uses the end of the last segment if it is available. While this value
15531 // should often be correct, it's better to rely on the buffered end, as the new
15532 // content post discontinuity should line up with the buffered end as if it were
15533 // time 0 for the new content.
15534
15535
15536 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
15537};
15538/**
15539 * Returns whether or not the loader should wait for a timeline change from the timeline
15540 * change controller before processing the segment.
15541 *
15542 * Primary timing in VHS goes by video. This is different from most media players, as
15543 * audio is more often used as the primary timing source. For the foreseeable future, VHS
15544 * will continue to use video as the primary timing source, due to the current logic and
15545 * expectations built around it.
15546
15547 * Since the timing follows video, in order to maintain sync, the video loader is
15548 * responsible for setting both audio and video source buffer timestamp offsets.
15549 *
15550 * Setting different values for audio and video source buffers could lead to
15551 * desyncing. The following examples demonstrate some of the situations where this
15552 * distinction is important. Note that all of these cases involve demuxed content. When
15553 * content is muxed, the audio and video are packaged together, therefore syncing
15554 * separate media playlists is not an issue.
15555 *
15556 * CASE 1: Audio prepares to load a new timeline before video:
15557 *
15558 * Timeline: 0 1
15559 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15560 * Audio Loader: ^
15561 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15562 * Video Loader ^
15563 *
15564 * In the above example, the audio loader is preparing to load the 6th segment, the first
15565 * after a discontinuity, while the video loader is still loading the 5th segment, before
15566 * the discontinuity.
15567 *
15568 * If the audio loader goes ahead and loads and appends the 6th segment before the video
15569 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
15570 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
15571 * the audio loader must provide the audioAppendStart value to trim the content in the
15572 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
15573 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
15574 * segment until that value is provided.
15575 *
15576 * CASE 2: Video prepares to load a new timeline before audio:
15577 *
15578 * Timeline: 0 1
15579 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15580 * Audio Loader: ^
15581 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15582 * Video Loader ^
15583 *
15584 * In the above example, the video loader is preparing to load the 6th segment, the first
15585 * after a discontinuity, while the audio loader is still loading the 5th segment, before
15586 * the discontinuity.
15587 *
15588 * If the video loader goes ahead and loads and appends the 6th segment, then once the
15589 * segment is loaded and processed, both the video and audio timestamp offsets will be
15590 * set, since video is used as the primary timing source. This is to ensure content lines
15591 * up appropriately, as any modifications to the video timing are reflected by audio when
15592 * the video loader sets the audio and video timestamp offsets to the same value. However,
15593 * setting the timestamp offset for audio before audio has had a chance to change
15594 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
15595 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
15596 *
15597 * CASE 3: When seeking, audio prepares to load a new timeline before video
15598 *
15599 * Timeline: 0 1
15600 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15601 * Audio Loader: ^
15602 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15603 * Video Loader ^
15604 *
15605 * In the above example, both audio and video loaders are loading segments from timeline
15606 * 0, but imagine that the seek originated from timeline 1.
15607 *
15608 * When seeking to a new timeline, the timestamp offset will be set based on the expected
15609 * segment start of the loaded video segment. In order to maintain sync, the audio loader
15610 * must wait for the video loader to load its segment and update both the audio and video
15611 * timestamp offsets before it may load and append its own segment. This is the case
15612 * whether the seek results in a mismatched segment request (e.g., the audio loader
15613 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
15614 * loaders choose to load the same segment index from each playlist, as the segments may
15615 * not be aligned perfectly, even for matching segment indexes.
15616 *
15617 * @param {Object} timelinechangeController
15618 * @param {number} currentTimeline
15619 * The timeline currently being followed by the loader
15620 * @param {number} segmentTimeline
15621 * The timeline of the segment being loaded
15622 * @param {('main'|'audio')} loaderType
15623 * The loader type
15624 * @param {boolean} audioDisabled
15625 * Whether the audio is disabled for the loader. This should only be true when the
15626 * loader may have muxed audio in its segment, but should not append it, e.g., for
15627 * the main loader when an alternate audio playlist is active.
15628 *
15629 * @return {boolean}
15630 * Whether the loader should wait for a timeline change from the timeline change
15631 * controller before processing the segment
15632 */
15633
15634var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
15635 var timelineChangeController = _ref2.timelineChangeController,
15636 currentTimeline = _ref2.currentTimeline,
15637 segmentTimeline = _ref2.segmentTimeline,
15638 loaderType = _ref2.loaderType,
15639 audioDisabled = _ref2.audioDisabled;
15640
15641 if (currentTimeline === segmentTimeline) {
15642 return false;
15643 }
15644
15645 if (loaderType === 'audio') {
15646 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
15647 type: 'main'
15648 }); // Audio loader should wait if:
15649 //
15650 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
15651 // * main hasn't yet changed to the timeline audio is looking to load
15652
15653 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
15654 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
15655 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
15656 // loader's segments (or the content is audio/video only and handled by the main
15657 // loader).
15658
15659
15660 if (loaderType === 'main' && audioDisabled) {
15661 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
15662 type: 'audio'
15663 }); // Main loader should wait for the audio loader if audio is not pending a timeline
15664 // change to the current timeline.
15665 //
15666 // Since the main loader is responsible for setting the timestamp offset for both
15667 // audio and video, the main loader must wait for audio to be about to change to its
15668 // timeline before setting the offset, otherwise, if audio is behind in loading,
15669 // segments from the previous timeline would be adjusted by the new timestamp offset.
15670 //
15671 // This requirement means that video will not cross a timeline until the audio is
15672 // about to cross to it, so that way audio and video will always cross the timeline
15673 // together.
15674 //
15675 // In addition to normal timeline changes, these rules also apply to the start of a
15676 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
15677 // that these rules apply to the first timeline change because if they did not, it's
15678 // possible that the main loader will cross two timelines before the audio loader has
15679 // crossed one. Logic may be implemented to handle the startup as a special case, but
15680 // it's easier to simply treat all timeline changes the same.
15681
15682 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
15683 return false;
15684 }
15685
15686 return true;
15687 }
15688
15689 return false;
15690};
15691var mediaDuration = function mediaDuration(timingInfos) {
15692 var maxDuration = 0;
15693 ['video', 'audio'].forEach(function (type) {
15694 var typeTimingInfo = timingInfos[type + "TimingInfo"];
15695
15696 if (!typeTimingInfo) {
15697 return;
15698 }
15699
15700 var start = typeTimingInfo.start,
15701 end = typeTimingInfo.end;
15702 var duration;
15703
15704 if (typeof start === 'bigint' || typeof end === 'bigint') {
15705 duration = window$1.BigInt(end) - window$1.BigInt(start);
15706 } else if (typeof start === 'number' && typeof end === 'number') {
15707 duration = end - start;
15708 }
15709
15710 if (typeof duration !== 'undefined' && duration > maxDuration) {
15711 maxDuration = duration;
15712 }
15713 }); // convert back to a number if it is lower than MAX_SAFE_INTEGER
15714 // as we only need BigInt when we are above that.
15715
15716 if (typeof maxDuration === 'bigint' && maxDuration < Number.MAX_SAFE_INTEGER) {
15717 maxDuration = Number(maxDuration);
15718 }
15719
15720 return maxDuration;
15721};
15722var segmentTooLong = function segmentTooLong(_ref3) {
15723 var segmentDuration = _ref3.segmentDuration,
15724 maxDuration = _ref3.maxDuration;
15725
15726 // 0 duration segments are most likely due to metadata only segments or a lack of
15727 // information.
15728 if (!segmentDuration) {
15729 return false;
15730 } // For HLS:
15731 //
15732 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
15733 // The EXTINF duration of each Media Segment in the Playlist
15734 // file, when rounded to the nearest integer, MUST be less than or equal
15735 // to the target duration; longer segments can trigger playback stalls
15736 // or other errors.
15737 //
15738 // For DASH, the mpd-parser uses the largest reported segment duration as the target
15739 // duration. Although that reported duration is occasionally approximate (i.e., not
15740 // exact), a strict check may report that a segment is too long more often in DASH.
15741
15742
15743 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
15744};
15745var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
15746 // Right now we aren't following DASH's timing model exactly, so only perform
15747 // this check for HLS content.
15748 if (sourceType !== 'hls') {
15749 return null;
15750 }
15751
15752 var segmentDuration = mediaDuration({
15753 audioTimingInfo: segmentInfo.audioTimingInfo,
15754 videoTimingInfo: segmentInfo.videoTimingInfo
15755 }); // Don't report if we lack information.
15756 //
15757 // If the segment has a duration of 0 it is either a lack of information or a
15758 // metadata only segment and shouldn't be reported here.
15759
15760 if (!segmentDuration) {
15761 return null;
15762 }
15763
15764 var targetDuration = segmentInfo.playlist.targetDuration;
15765 var isSegmentWayTooLong = segmentTooLong({
15766 segmentDuration: segmentDuration,
15767 maxDuration: targetDuration * 2
15768 });
15769 var isSegmentSlightlyTooLong = segmentTooLong({
15770 segmentDuration: segmentDuration,
15771 maxDuration: targetDuration
15772 });
15773 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
15774
15775 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
15776 return {
15777 severity: isSegmentWayTooLong ? 'warn' : 'info',
15778 message: segmentTooLongMessage
15779 };
15780 }
15781
15782 return null;
15783};
15784/**
15785 * An object that manages segment loading and appending.
15786 *
15787 * @class SegmentLoader
15788 * @param {Object} options required and optional options
15789 * @extends videojs.EventTarget
15790 */
15791
15792var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
15793 _inheritsLoose(SegmentLoader, _videojs$EventTarget);
15794
15795 function SegmentLoader(settings, options) {
15796 var _this;
15797
15798 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
15799
15800 if (!settings) {
15801 throw new TypeError('Initialization settings are required');
15802 }
15803
15804 if (typeof settings.currentTime !== 'function') {
15805 throw new TypeError('No currentTime getter specified');
15806 }
15807
15808 if (!settings.mediaSource) {
15809 throw new TypeError('No MediaSource specified');
15810 } // public properties
15811
15812
15813 _this.bandwidth = settings.bandwidth;
15814 _this.throughput = {
15815 rate: 0,
15816 count: 0
15817 };
15818 _this.roundTrip = NaN;
15819
15820 _this.resetStats_();
15821
15822 _this.mediaIndex = null;
15823 _this.partIndex = null; // private settings
15824
15825 _this.hasPlayed_ = settings.hasPlayed;
15826 _this.currentTime_ = settings.currentTime;
15827 _this.seekable_ = settings.seekable;
15828 _this.seeking_ = settings.seeking;
15829 _this.duration_ = settings.duration;
15830 _this.mediaSource_ = settings.mediaSource;
15831 _this.vhs_ = settings.vhs;
15832 _this.loaderType_ = settings.loaderType;
15833 _this.currentMediaInfo_ = void 0;
15834 _this.startingMediaInfo_ = void 0;
15835 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
15836 _this.goalBufferLength_ = settings.goalBufferLength;
15837 _this.sourceType_ = settings.sourceType;
15838 _this.sourceUpdater_ = settings.sourceUpdater;
15839 _this.inbandTextTracks_ = settings.inbandTextTracks;
15840 _this.state_ = 'INIT';
15841 _this.timelineChangeController_ = settings.timelineChangeController;
15842 _this.shouldSaveSegmentTimingInfo_ = true;
15843 _this.parse708captions_ = settings.parse708captions;
15844 _this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
15845 _this.captionServices_ = settings.captionServices;
15846 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
15847
15848 _this.checkBufferTimeout_ = null;
15849 _this.error_ = void 0;
15850 _this.currentTimeline_ = -1;
15851 _this.pendingSegment_ = null;
15852 _this.xhrOptions_ = null;
15853 _this.pendingSegments_ = [];
15854 _this.audioDisabled_ = false;
15855 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
15856
15857 _this.gopBuffer_ = [];
15858 _this.timeMapping_ = 0;
15859 _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
15860 _this.appendInitSegment_ = {
15861 audio: true,
15862 video: true
15863 };
15864 _this.playlistOfLastInitSegment_ = {
15865 audio: null,
15866 video: null
15867 };
15868 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
15869 // information yet to start the loading process (e.g., if the audio loader wants to
15870 // load a segment from the next timeline but the main loader hasn't yet crossed that
15871 // timeline), then the load call will be added to the queue until it is ready to be
15872 // processed.
15873
15874 _this.loadQueue_ = [];
15875 _this.metadataQueue_ = {
15876 id3: [],
15877 caption: []
15878 };
15879 _this.waitingOnRemove_ = false;
15880 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
15881
15882 _this.activeInitSegmentId_ = null;
15883 _this.initSegments_ = {}; // HLSe playback
15884
15885 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
15886 _this.keyCache_ = {};
15887 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
15888 // between a time in the display time and a segment index within
15889 // a playlist
15890
15891 _this.syncController_ = settings.syncController;
15892 _this.syncPoint_ = {
15893 segmentIndex: 0,
15894 time: 0
15895 };
15896 _this.transmuxer_ = _this.createTransmuxer_();
15897
15898 _this.triggerSyncInfoUpdate_ = function () {
15899 return _this.trigger('syncinfoupdate');
15900 };
15901
15902 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
15903
15904 _this.mediaSource_.addEventListener('sourceopen', function () {
15905 if (!_this.isEndOfStream_()) {
15906 _this.ended_ = false;
15907 }
15908 }); // ...for determining the fetch location
15909
15910
15911 _this.fetchAtBuffer_ = false;
15912 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
15913 Object.defineProperty(_assertThisInitialized(_this), 'state', {
15914 get: function get() {
15915 return this.state_;
15916 },
15917 set: function set(newState) {
15918 if (newState !== this.state_) {
15919 this.logger_(this.state_ + " -> " + newState);
15920 this.state_ = newState;
15921 this.trigger('statechange');
15922 }
15923 }
15924 });
15925
15926 _this.sourceUpdater_.on('ready', function () {
15927 if (_this.hasEnoughInfoToAppend_()) {
15928 _this.processCallQueue_();
15929 }
15930 }); // Only the main loader needs to listen for pending timeline changes, as the main
15931 // loader should wait for audio to be ready to change its timeline so that both main
15932 // and audio timelines change together. For more details, see the
15933 // shouldWaitForTimelineChange function.
15934
15935
15936 if (_this.loaderType_ === 'main') {
15937 _this.timelineChangeController_.on('pendingtimelinechange', function () {
15938 if (_this.hasEnoughInfoToAppend_()) {
15939 _this.processCallQueue_();
15940 }
15941 });
15942 } // The main loader only listens on pending timeline changes, but the audio loader,
15943 // since its loads follow main, needs to listen on timeline changes. For more details,
15944 // see the shouldWaitForTimelineChange function.
15945
15946
15947 if (_this.loaderType_ === 'audio') {
15948 _this.timelineChangeController_.on('timelinechange', function () {
15949 if (_this.hasEnoughInfoToLoad_()) {
15950 _this.processLoadQueue_();
15951 }
15952
15953 if (_this.hasEnoughInfoToAppend_()) {
15954 _this.processCallQueue_();
15955 }
15956 });
15957 }
15958
15959 return _this;
15960 }
15961
15962 var _proto = SegmentLoader.prototype;
15963
15964 _proto.createTransmuxer_ = function createTransmuxer_() {
15965 return segmentTransmuxer.createTransmuxer({
15966 remux: false,
15967 alignGopsAtEnd: this.safeAppend_,
15968 keepOriginalTimestamps: true,
15969 parse708captions: this.parse708captions_,
15970 captionServices: this.captionServices_
15971 });
15972 }
15973 /**
15974 * reset all of our media stats
15975 *
15976 * @private
15977 */
15978 ;
15979
15980 _proto.resetStats_ = function resetStats_() {
15981 this.mediaBytesTransferred = 0;
15982 this.mediaRequests = 0;
15983 this.mediaRequestsAborted = 0;
15984 this.mediaRequestsTimedout = 0;
15985 this.mediaRequestsErrored = 0;
15986 this.mediaTransferDuration = 0;
15987 this.mediaSecondsLoaded = 0;
15988 this.mediaAppends = 0;
15989 }
15990 /**
15991 * dispose of the SegmentLoader and reset to the default state
15992 */
15993 ;
15994
15995 _proto.dispose = function dispose() {
15996 this.trigger('dispose');
15997 this.state = 'DISPOSED';
15998 this.pause();
15999 this.abort_();
16000
16001 if (this.transmuxer_) {
16002 this.transmuxer_.terminate();
16003 }
16004
16005 this.resetStats_();
16006
16007 if (this.checkBufferTimeout_) {
16008 window$1.clearTimeout(this.checkBufferTimeout_);
16009 }
16010
16011 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
16012 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
16013 }
16014
16015 this.off();
16016 };
16017
16018 _proto.setAudio = function setAudio(enable) {
16019 this.audioDisabled_ = !enable;
16020
16021 if (enable) {
16022 this.appendInitSegment_.audio = true;
16023 } else {
16024 // remove current track audio if it gets disabled
16025 this.sourceUpdater_.removeAudio(0, this.duration_());
16026 }
16027 }
16028 /**
16029 * abort anything that is currently doing on with the SegmentLoader
16030 * and reset to a default state
16031 */
16032 ;
16033
16034 _proto.abort = function abort() {
16035 if (this.state !== 'WAITING') {
16036 if (this.pendingSegment_) {
16037 this.pendingSegment_ = null;
16038 }
16039
16040 return;
16041 }
16042
16043 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
16044 // since we are no longer "waiting" on any requests. XHR callback is not always run
16045 // when the request is aborted. This will prevent the loader from being stuck in the
16046 // WAITING state indefinitely.
16047
16048 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
16049 // next segment
16050
16051 if (!this.paused()) {
16052 this.monitorBuffer_();
16053 }
16054 }
16055 /**
16056 * abort all pending xhr requests and null any pending segements
16057 *
16058 * @private
16059 */
16060 ;
16061
16062 _proto.abort_ = function abort_() {
16063 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
16064 this.pendingSegment_.abortRequests();
16065 } // clear out the segment being processed
16066
16067
16068 this.pendingSegment_ = null;
16069 this.callQueue_ = [];
16070 this.loadQueue_ = [];
16071 this.metadataQueue_.id3 = [];
16072 this.metadataQueue_.caption = [];
16073 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
16074 this.waitingOnRemove_ = false;
16075 window$1.clearTimeout(this.quotaExceededErrorRetryTimeout_);
16076 this.quotaExceededErrorRetryTimeout_ = null;
16077 };
16078
16079 _proto.checkForAbort_ = function checkForAbort_(requestId) {
16080 // If the state is APPENDING, then aborts will not modify the state, meaning the first
16081 // callback that happens should reset the state to READY so that loading can continue.
16082 if (this.state === 'APPENDING' && !this.pendingSegment_) {
16083 this.state = 'READY';
16084 return true;
16085 }
16086
16087 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
16088 return true;
16089 }
16090
16091 return false;
16092 }
16093 /**
16094 * set an error on the segment loader and null out any pending segements
16095 *
16096 * @param {Error} error the error to set on the SegmentLoader
16097 * @return {Error} the error that was set or that is currently set
16098 */
16099 ;
16100
16101 _proto.error = function error(_error) {
16102 if (typeof _error !== 'undefined') {
16103 this.logger_('error occurred:', _error);
16104 this.error_ = _error;
16105 }
16106
16107 this.pendingSegment_ = null;
16108 return this.error_;
16109 };
16110
16111 _proto.endOfStream = function endOfStream() {
16112 this.ended_ = true;
16113
16114 if (this.transmuxer_) {
16115 // need to clear out any cached data to prepare for the new segment
16116 segmentTransmuxer.reset(this.transmuxer_);
16117 }
16118
16119 this.gopBuffer_.length = 0;
16120 this.pause();
16121 this.trigger('ended');
16122 }
16123 /**
16124 * Indicates which time ranges are buffered
16125 *
16126 * @return {TimeRange}
16127 * TimeRange object representing the current buffered ranges
16128 */
16129 ;
16130
16131 _proto.buffered_ = function buffered_() {
16132 var trackInfo = this.getMediaInfo_();
16133
16134 if (!this.sourceUpdater_ || !trackInfo) {
16135 return videojs.createTimeRanges();
16136 }
16137
16138 if (this.loaderType_ === 'main') {
16139 var hasAudio = trackInfo.hasAudio,
16140 hasVideo = trackInfo.hasVideo,
16141 isMuxed = trackInfo.isMuxed;
16142
16143 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
16144 return this.sourceUpdater_.buffered();
16145 }
16146
16147 if (hasVideo) {
16148 return this.sourceUpdater_.videoBuffered();
16149 }
16150 } // One case that can be ignored for now is audio only with alt audio,
16151 // as we don't yet have proper support for that.
16152
16153
16154 return this.sourceUpdater_.audioBuffered();
16155 }
16156 /**
16157 * Gets and sets init segment for the provided map
16158 *
16159 * @param {Object} map
16160 * The map object representing the init segment to get or set
16161 * @param {boolean=} set
16162 * If true, the init segment for the provided map should be saved
16163 * @return {Object}
16164 * map object for desired init segment
16165 */
16166 ;
16167
16168 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
16169 if (set === void 0) {
16170 set = false;
16171 }
16172
16173 if (!map) {
16174 return null;
16175 }
16176
16177 var id = initSegmentId(map);
16178 var storedMap = this.initSegments_[id];
16179
16180 if (set && !storedMap && map.bytes) {
16181 this.initSegments_[id] = storedMap = {
16182 resolvedUri: map.resolvedUri,
16183 byterange: map.byterange,
16184 bytes: map.bytes,
16185 tracks: map.tracks,
16186 timescales: map.timescales
16187 };
16188 }
16189
16190 return storedMap || map;
16191 }
16192 /**
16193 * Gets and sets key for the provided key
16194 *
16195 * @param {Object} key
16196 * The key object representing the key to get or set
16197 * @param {boolean=} set
16198 * If true, the key for the provided key should be saved
16199 * @return {Object}
16200 * Key object for desired key
16201 */
16202 ;
16203
16204 _proto.segmentKey = function segmentKey(key, set) {
16205 if (set === void 0) {
16206 set = false;
16207 }
16208
16209 if (!key) {
16210 return null;
16211 }
16212
16213 var id = segmentKeyId(key);
16214 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
16215 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
16216
16217 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
16218 this.keyCache_[id] = storedKey = {
16219 resolvedUri: key.resolvedUri,
16220 bytes: key.bytes
16221 };
16222 }
16223
16224 var result = {
16225 resolvedUri: (storedKey || key).resolvedUri
16226 };
16227
16228 if (storedKey) {
16229 result.bytes = storedKey.bytes;
16230 }
16231
16232 return result;
16233 }
16234 /**
16235 * Returns true if all configuration required for loading is present, otherwise false.
16236 *
16237 * @return {boolean} True if the all configuration is ready for loading
16238 * @private
16239 */
16240 ;
16241
16242 _proto.couldBeginLoading_ = function couldBeginLoading_() {
16243 return this.playlist_ && !this.paused();
16244 }
16245 /**
16246 * load a playlist and start to fill the buffer
16247 */
16248 ;
16249
16250 _proto.load = function load() {
16251 // un-pause
16252 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
16253 // specified
16254
16255 if (!this.playlist_) {
16256 return;
16257 } // if all the configuration is ready, initialize and begin loading
16258
16259
16260 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16261 return this.init_();
16262 } // if we're in the middle of processing a segment already, don't
16263 // kick off an additional segment request
16264
16265
16266 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
16267 return;
16268 }
16269
16270 this.state = 'READY';
16271 }
16272 /**
16273 * Once all the starting parameters have been specified, begin
16274 * operation. This method should only be invoked from the INIT
16275 * state.
16276 *
16277 * @private
16278 */
16279 ;
16280
16281 _proto.init_ = function init_() {
16282 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
16283 // audio data from the muxed content should be removed
16284
16285 this.resetEverything();
16286 return this.monitorBuffer_();
16287 }
16288 /**
16289 * set a playlist on the segment loader
16290 *
16291 * @param {PlaylistLoader} media the playlist to set on the segment loader
16292 */
16293 ;
16294
16295 _proto.playlist = function playlist(newPlaylist, options) {
16296 if (options === void 0) {
16297 options = {};
16298 }
16299
16300 if (!newPlaylist) {
16301 return;
16302 }
16303
16304 var oldPlaylist = this.playlist_;
16305 var segmentInfo = this.pendingSegment_;
16306 this.playlist_ = newPlaylist;
16307 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
16308 // is always our zero-time so force a sync update each time the playlist
16309 // is refreshed from the server
16310 //
16311 // Use the INIT state to determine if playback has started, as the playlist sync info
16312 // should be fixed once requests begin (as sync points are generated based on sync
16313 // info), but not before then.
16314
16315 if (this.state === 'INIT') {
16316 newPlaylist.syncInfo = {
16317 mediaSequence: newPlaylist.mediaSequence,
16318 time: 0
16319 }; // Setting the date time mapping means mapping the program date time (if available)
16320 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
16321 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
16322 // be updated as the playlist is refreshed before the loader starts loading, the
16323 // program date time mapping needs to be updated as well.
16324 //
16325 // This mapping is only done for the main loader because a program date time should
16326 // map equivalently between playlists.
16327
16328 if (this.loaderType_ === 'main') {
16329 this.syncController_.setDateTimeMappingForStart(newPlaylist);
16330 }
16331 }
16332
16333 var oldId = null;
16334
16335 if (oldPlaylist) {
16336 if (oldPlaylist.id) {
16337 oldId = oldPlaylist.id;
16338 } else if (oldPlaylist.uri) {
16339 oldId = oldPlaylist.uri;
16340 }
16341 }
16342
16343 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
16344 // in LIVE, we always want to update with new playlists (including refreshes)
16345
16346 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
16347 // buffering now
16348
16349 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16350 return this.init_();
16351 }
16352
16353 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
16354 if (this.mediaIndex !== null) {
16355 // we must reset/resync the segment loader when we switch renditions and
16356 // the segment loader is already synced to the previous rendition
16357 // on playlist changes we want it to be possible to fetch
16358 // at the buffer for vod but not for live. So we use resetLoader
16359 // for live and resyncLoader for vod. We want this because
16360 // if a playlist uses independent and non-independent segments/parts the
16361 // buffer may not accurately reflect the next segment that we should try
16362 // downloading.
16363 if (!newPlaylist.endList) {
16364 this.resetLoader();
16365 } else {
16366 this.resyncLoader();
16367 }
16368 }
16369
16370 this.currentMediaInfo_ = void 0;
16371 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
16372
16373 return;
16374 } // we reloaded the same playlist so we are in a live scenario
16375 // and we will likely need to adjust the mediaIndex
16376
16377
16378 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
16379 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
16380 // this is important because we can abort a request and this value must be
16381 // equal to the last appended mediaIndex
16382
16383 if (this.mediaIndex !== null) {
16384 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
16385 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
16386 // new playlist was incremented by 1.
16387
16388 if (this.mediaIndex < 0) {
16389 this.mediaIndex = null;
16390 this.partIndex = null;
16391 } else {
16392 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
16393 // unless parts fell off of the playlist for this segment.
16394 // In that case we need to reset partIndex and resync
16395
16396 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
16397 var mediaIndex = this.mediaIndex;
16398 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
16399 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
16400 // as the part was dropped from our current playlists segment.
16401 // The mediaIndex will still be valid so keep that around.
16402
16403 this.mediaIndex = mediaIndex;
16404 }
16405 }
16406 } // update the mediaIndex on the SegmentInfo object
16407 // this is important because we will update this.mediaIndex with this value
16408 // in `handleAppendsDone_` after the segment has been successfully appended
16409
16410
16411 if (segmentInfo) {
16412 segmentInfo.mediaIndex -= mediaSequenceDiff;
16413
16414 if (segmentInfo.mediaIndex < 0) {
16415 segmentInfo.mediaIndex = null;
16416 segmentInfo.partIndex = null;
16417 } else {
16418 // we need to update the referenced segment so that timing information is
16419 // saved for the new playlist's segment, however, if the segment fell off the
16420 // playlist, we can leave the old reference and just lose the timing info
16421 if (segmentInfo.mediaIndex >= 0) {
16422 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
16423 }
16424
16425 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
16426 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
16427 }
16428 }
16429 }
16430
16431 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
16432 }
16433 /**
16434 * Prevent the loader from fetching additional segments. If there
16435 * is a segment request outstanding, it will finish processing
16436 * before the loader halts. A segment loader can be unpaused by
16437 * calling load().
16438 */
16439 ;
16440
16441 _proto.pause = function pause() {
16442 if (this.checkBufferTimeout_) {
16443 window$1.clearTimeout(this.checkBufferTimeout_);
16444 this.checkBufferTimeout_ = null;
16445 }
16446 }
16447 /**
16448 * Returns whether the segment loader is fetching additional
16449 * segments when given the opportunity. This property can be
16450 * modified through calls to pause() and load().
16451 */
16452 ;
16453
16454 _proto.paused = function paused() {
16455 return this.checkBufferTimeout_ === null;
16456 }
16457 /**
16458 * Delete all the buffered data and reset the SegmentLoader
16459 *
16460 * @param {Function} [done] an optional callback to be executed when the remove
16461 * operation is complete
16462 */
16463 ;
16464
16465 _proto.resetEverything = function resetEverything(done) {
16466 this.ended_ = false;
16467 this.appendInitSegment_ = {
16468 audio: true,
16469 video: true
16470 };
16471 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
16472 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
16473 // we then clamp the value to duration if necessary.
16474
16475 this.remove(0, Infinity, done); // clears fmp4 captions
16476
16477 if (this.transmuxer_) {
16478 this.transmuxer_.postMessage({
16479 action: 'clearAllMp4Captions'
16480 }); // reset the cache in the transmuxer
16481
16482 this.transmuxer_.postMessage({
16483 action: 'reset'
16484 });
16485 }
16486 }
16487 /**
16488 * Force the SegmentLoader to resync and start loading around the currentTime instead
16489 * of starting at the end of the buffer
16490 *
16491 * Useful for fast quality changes
16492 */
16493 ;
16494
16495 _proto.resetLoader = function resetLoader() {
16496 this.fetchAtBuffer_ = false;
16497 this.resyncLoader();
16498 }
16499 /**
16500 * Force the SegmentLoader to restart synchronization and make a conservative guess
16501 * before returning to the simple walk-forward method
16502 */
16503 ;
16504
16505 _proto.resyncLoader = function resyncLoader() {
16506 if (this.transmuxer_) {
16507 // need to clear out any cached data to prepare for the new segment
16508 segmentTransmuxer.reset(this.transmuxer_);
16509 }
16510
16511 this.mediaIndex = null;
16512 this.partIndex = null;
16513 this.syncPoint_ = null;
16514 this.isPendingTimestampOffset_ = false;
16515 this.callQueue_ = [];
16516 this.loadQueue_ = [];
16517 this.metadataQueue_.id3 = [];
16518 this.metadataQueue_.caption = [];
16519 this.abort();
16520
16521 if (this.transmuxer_) {
16522 this.transmuxer_.postMessage({
16523 action: 'clearParsedMp4Captions'
16524 });
16525 }
16526 }
16527 /**
16528 * Remove any data in the source buffer between start and end times
16529 *
16530 * @param {number} start - the start time of the region to remove from the buffer
16531 * @param {number} end - the end time of the region to remove from the buffer
16532 * @param {Function} [done] - an optional callback to be executed when the remove
16533 * @param {boolean} force - force all remove operations to happen
16534 * operation is complete
16535 */
16536 ;
16537
16538 _proto.remove = function remove(start, end, done, force) {
16539 if (done === void 0) {
16540 done = function done() {};
16541 }
16542
16543 if (force === void 0) {
16544 force = false;
16545 }
16546
16547 // clamp end to duration if we need to remove everything.
16548 // This is due to a browser bug that causes issues if we remove to Infinity.
16549 // videojs/videojs-contrib-hls#1225
16550 if (end === Infinity) {
16551 end = this.duration_();
16552 } // skip removes that would throw an error
16553 // commonly happens during a rendition switch at the start of a video
16554 // from start 0 to end 0
16555
16556
16557 if (end <= start) {
16558 this.logger_('skipping remove because end ${end} is <= start ${start}');
16559 return;
16560 }
16561
16562 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
16563 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
16564
16565 return;
16566 } // set it to one to complete this function's removes
16567
16568
16569 var removesRemaining = 1;
16570
16571 var removeFinished = function removeFinished() {
16572 removesRemaining--;
16573
16574 if (removesRemaining === 0) {
16575 done();
16576 }
16577 };
16578
16579 if (force || !this.audioDisabled_) {
16580 removesRemaining++;
16581 this.sourceUpdater_.removeAudio(start, end, removeFinished);
16582 } // While it would be better to only remove video if the main loader has video, this
16583 // should be safe with audio only as removeVideo will call back even if there's no
16584 // video buffer.
16585 //
16586 // In theory we can check to see if there's video before calling the remove, but in
16587 // the event that we're switching between renditions and from video to audio only
16588 // (when we add support for that), we may need to clear the video contents despite
16589 // what the new media will contain.
16590
16591
16592 if (force || this.loaderType_ === 'main') {
16593 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
16594 removesRemaining++;
16595 this.sourceUpdater_.removeVideo(start, end, removeFinished);
16596 } // remove any captions and ID3 tags
16597
16598
16599 for (var track in this.inbandTextTracks_) {
16600 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
16601 }
16602
16603 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
16604
16605 removeFinished();
16606 }
16607 /**
16608 * (re-)schedule monitorBufferTick_ to run as soon as possible
16609 *
16610 * @private
16611 */
16612 ;
16613
16614 _proto.monitorBuffer_ = function monitorBuffer_() {
16615 if (this.checkBufferTimeout_) {
16616 window$1.clearTimeout(this.checkBufferTimeout_);
16617 }
16618
16619 this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), 1);
16620 }
16621 /**
16622 * As long as the SegmentLoader is in the READY state, periodically
16623 * invoke fillBuffer_().
16624 *
16625 * @private
16626 */
16627 ;
16628
16629 _proto.monitorBufferTick_ = function monitorBufferTick_() {
16630 if (this.state === 'READY') {
16631 this.fillBuffer_();
16632 }
16633
16634 if (this.checkBufferTimeout_) {
16635 window$1.clearTimeout(this.checkBufferTimeout_);
16636 }
16637
16638 this.checkBufferTimeout_ = window$1.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
16639 }
16640 /**
16641 * fill the buffer with segements unless the sourceBuffers are
16642 * currently updating
16643 *
16644 * Note: this function should only ever be called by monitorBuffer_
16645 * and never directly
16646 *
16647 * @private
16648 */
16649 ;
16650
16651 _proto.fillBuffer_ = function fillBuffer_() {
16652 // TODO since the source buffer maintains a queue, and we shouldn't call this function
16653 // except when we're ready for the next segment, this check can most likely be removed
16654 if (this.sourceUpdater_.updating()) {
16655 return;
16656 } // see if we need to begin loading immediately
16657
16658
16659 var segmentInfo = this.chooseNextRequest_();
16660
16661 if (!segmentInfo) {
16662 return;
16663 }
16664
16665 if (typeof segmentInfo.timestampOffset === 'number') {
16666 this.isPendingTimestampOffset_ = false;
16667 this.timelineChangeController_.pendingTimelineChange({
16668 type: this.loaderType_,
16669 from: this.currentTimeline_,
16670 to: segmentInfo.timeline
16671 });
16672 }
16673
16674 this.loadSegment_(segmentInfo);
16675 }
16676 /**
16677 * Determines if we should call endOfStream on the media source based
16678 * on the state of the buffer or if appened segment was the final
16679 * segment in the playlist.
16680 *
16681 * @param {number} [mediaIndex] the media index of segment we last appended
16682 * @param {Object} [playlist] a media playlist object
16683 * @return {boolean} do we need to call endOfStream on the MediaSource
16684 */
16685 ;
16686
16687 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
16688 if (mediaIndex === void 0) {
16689 mediaIndex = this.mediaIndex;
16690 }
16691
16692 if (playlist === void 0) {
16693 playlist = this.playlist_;
16694 }
16695
16696 if (partIndex === void 0) {
16697 partIndex = this.partIndex;
16698 }
16699
16700 if (!playlist || !this.mediaSource_) {
16701 return false;
16702 }
16703
16704 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
16705
16706 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
16707
16708 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
16709 // so that MediaSources can trigger the `ended` event when it runs out of
16710 // buffered data instead of waiting for me
16711
16712 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
16713 }
16714 /**
16715 * Determines what request should be made given current segment loader state.
16716 *
16717 * @return {Object} a request object that describes the segment/part to load
16718 */
16719 ;
16720
16721 _proto.chooseNextRequest_ = function chooseNextRequest_() {
16722 var buffered = this.buffered_();
16723 var bufferedEnd = lastBufferedEnd(buffered) || 0;
16724 var bufferedTime = timeAheadOf(buffered, this.currentTime_());
16725 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
16726 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
16727 var segments = this.playlist_.segments; // return no segment if:
16728 // 1. we don't have segments
16729 // 2. The video has not yet played and we already downloaded a segment
16730 // 3. we already have enough buffered time
16731
16732 if (!segments.length || preloaded || haveEnoughBuffer) {
16733 return null;
16734 }
16735
16736 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
16737 var next = {
16738 partIndex: null,
16739 mediaIndex: null,
16740 startOfSegment: null,
16741 playlist: this.playlist_,
16742 isSyncRequest: Boolean(!this.syncPoint_)
16743 };
16744
16745 if (next.isSyncRequest) {
16746 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
16747 } else if (this.mediaIndex !== null) {
16748 var segment = segments[this.mediaIndex];
16749 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
16750 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
16751
16752 if (segment.parts && segment.parts[partIndex + 1]) {
16753 next.mediaIndex = this.mediaIndex;
16754 next.partIndex = partIndex + 1;
16755 } else {
16756 next.mediaIndex = this.mediaIndex + 1;
16757 }
16758 } else {
16759 // Find the segment containing the end of the buffer or current time.
16760 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
16761 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
16762 playlist: this.playlist_,
16763 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
16764 startingPartIndex: this.syncPoint_.partIndex,
16765 startingSegmentIndex: this.syncPoint_.segmentIndex,
16766 startTime: this.syncPoint_.time
16767 }),
16768 segmentIndex = _Playlist$getMediaInf.segmentIndex,
16769 startTime = _Playlist$getMediaInf.startTime,
16770 _partIndex = _Playlist$getMediaInf.partIndex;
16771
16772 next.getMediaInfoForTime = this.fetchAtBuffer_ ? "bufferedEnd " + bufferedEnd : "currentTime " + this.currentTime_();
16773 next.mediaIndex = segmentIndex;
16774 next.startOfSegment = startTime;
16775 next.partIndex = _partIndex;
16776 }
16777
16778 var nextSegment = segments[next.mediaIndex];
16779 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
16780 // the next partIndex is invalid do not choose a next segment.
16781
16782 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
16783 return null;
16784 } // if the next segment has parts, and we don't have a partIndex.
16785 // Set partIndex to 0
16786
16787
16788 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
16789 next.partIndex = 0;
16790 nextPart = nextSegment.parts[0];
16791 } // if we have no buffered data then we need to make sure
16792 // that the next part we append is "independent" if possible.
16793 // So we check if the previous part is independent, and request
16794 // it if it is.
16795
16796
16797 if (!bufferedTime && nextPart && !nextPart.independent) {
16798 if (next.partIndex === 0) {
16799 var lastSegment = segments[next.mediaIndex - 1];
16800 var lastSegmentLastPart = lastSegment.parts && lastSegment.parts.length && lastSegment.parts[lastSegment.parts.length - 1];
16801
16802 if (lastSegmentLastPart && lastSegmentLastPart.independent) {
16803 next.mediaIndex -= 1;
16804 next.partIndex = lastSegment.parts.length - 1;
16805 next.independent = 'previous segment';
16806 }
16807 } else if (nextSegment.parts[next.partIndex - 1].independent) {
16808 next.partIndex -= 1;
16809 next.independent = 'previous part';
16810 }
16811 }
16812
16813 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
16814 // 1. this is the last segment in the playlist
16815 // 2. end of stream has been called on the media source already
16816 // 3. the player is not seeking
16817
16818 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
16819 return null;
16820 }
16821
16822 return this.generateSegmentInfo_(next);
16823 };
16824
16825 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
16826 var independent = options.independent,
16827 playlist = options.playlist,
16828 mediaIndex = options.mediaIndex,
16829 startOfSegment = options.startOfSegment,
16830 isSyncRequest = options.isSyncRequest,
16831 partIndex = options.partIndex,
16832 forceTimestampOffset = options.forceTimestampOffset,
16833 getMediaInfoForTime = options.getMediaInfoForTime;
16834 var segment = playlist.segments[mediaIndex];
16835 var part = typeof partIndex === 'number' && segment.parts[partIndex];
16836 var segmentInfo = {
16837 requestId: 'segment-loader-' + Math.random(),
16838 // resolve the segment URL relative to the playlist
16839 uri: part && part.resolvedUri || segment.resolvedUri,
16840 // the segment's mediaIndex at the time it was requested
16841 mediaIndex: mediaIndex,
16842 partIndex: part ? partIndex : null,
16843 // whether or not to update the SegmentLoader's state with this
16844 // segment's mediaIndex
16845 isSyncRequest: isSyncRequest,
16846 startOfSegment: startOfSegment,
16847 // the segment's playlist
16848 playlist: playlist,
16849 // unencrypted bytes of the segment
16850 bytes: null,
16851 // when a key is defined for this segment, the encrypted bytes
16852 encryptedBytes: null,
16853 // The target timestampOffset for this segment when we append it
16854 // to the source buffer
16855 timestampOffset: null,
16856 // The timeline that the segment is in
16857 timeline: segment.timeline,
16858 // The expected duration of the segment in seconds
16859 duration: part && part.duration || segment.duration,
16860 // retain the segment in case the playlist updates while doing an async process
16861 segment: segment,
16862 part: part,
16863 byteLength: 0,
16864 transmuxer: this.transmuxer_,
16865 // type of getMediaInfoForTime that was used to get this segment
16866 getMediaInfoForTime: getMediaInfoForTime,
16867 independent: independent
16868 };
16869 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
16870 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
16871 segmentTimeline: segment.timeline,
16872 currentTimeline: this.currentTimeline_,
16873 startOfSegment: startOfSegment,
16874 buffered: this.buffered_(),
16875 overrideCheck: overrideCheck
16876 });
16877 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
16878
16879 if (typeof audioBufferedEnd === 'number') {
16880 // since the transmuxer is using the actual timing values, but the buffer is
16881 // adjusted by the timestamp offset, we must adjust the value here
16882 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
16883 }
16884
16885 if (this.sourceUpdater_.videoBuffered().length) {
16886 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
16887 // adjusted by the timestmap offset, we must adjust the value here
16888 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
16889 }
16890
16891 return segmentInfo;
16892 } // get the timestampoffset for a segment,
16893 // added so that vtt segment loader can override and prevent
16894 // adding timestamp offsets.
16895 ;
16896
16897 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
16898 return timestampOffsetForSegment(options);
16899 }
16900 /**
16901 * Determines if the network has enough bandwidth to complete the current segment
16902 * request in a timely manner. If not, the request will be aborted early and bandwidth
16903 * updated to trigger a playlist switch.
16904 *
16905 * @param {Object} stats
16906 * Object containing stats about the request timing and size
16907 * @private
16908 */
16909 ;
16910
16911 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
16912 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
16913 // TODO: Replace using timeout with a boolean indicating whether this playlist is
16914 // the lowestEnabledRendition.
16915 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
16916 !this.playlist_.attributes.BANDWIDTH) {
16917 return;
16918 } // Wait at least 1 second since the first byte of data has been received before
16919 // using the calculated bandwidth from the progress event to allow the bitrate
16920 // to stabilize
16921
16922
16923 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
16924 return;
16925 }
16926
16927 var currentTime = this.currentTime_();
16928 var measuredBandwidth = stats.bandwidth;
16929 var segmentDuration = this.pendingSegment_.duration;
16930 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
16931 // if we are only left with less than 1 second when the request completes.
16932 // A negative timeUntilRebuffering indicates we are already rebuffering
16933
16934 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
16935 // is larger than the estimated time until the player runs out of forward buffer
16936
16937 if (requestTimeRemaining <= timeUntilRebuffer$1) {
16938 return;
16939 }
16940
16941 var switchCandidate = minRebufferMaxBandwidthSelector({
16942 master: this.vhs_.playlists.master,
16943 currentTime: currentTime,
16944 bandwidth: measuredBandwidth,
16945 duration: this.duration_(),
16946 segmentDuration: segmentDuration,
16947 timeUntilRebuffer: timeUntilRebuffer$1,
16948 currentTimeline: this.currentTimeline_,
16949 syncController: this.syncController_
16950 });
16951
16952 if (!switchCandidate) {
16953 return;
16954 }
16955
16956 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
16957 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
16958 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
16959 // potential round trip time of the new request so that we are not too aggressive
16960 // with switching to a playlist that might save us a fraction of a second.
16961
16962 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
16963 minimumTimeSaving = 1;
16964 }
16965
16966 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
16967 return;
16968 } // set the bandwidth to that of the desired playlist being sure to scale by
16969 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
16970 // don't trigger a bandwidthupdate as the bandwidth is artifial
16971
16972
16973 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
16974 this.trigger('earlyabort');
16975 };
16976
16977 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
16978 this.logger_("Aborting " + segmentInfoString(segmentInfo));
16979 this.mediaRequestsAborted += 1;
16980 }
16981 /**
16982 * XHR `progress` event handler
16983 *
16984 * @param {Event}
16985 * The XHR `progress` event
16986 * @param {Object} simpleSegment
16987 * A simplified segment object copy
16988 * @private
16989 */
16990 ;
16991
16992 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
16993 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16994
16995 if (this.checkForAbort_(simpleSegment.requestId)) {
16996 return;
16997 }
16998
16999 this.trigger('progress');
17000 };
17001
17002 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
17003 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17004
17005 if (this.checkForAbort_(simpleSegment.requestId)) {
17006 return;
17007 }
17008
17009 if (this.checkForIllegalMediaSwitch(trackInfo)) {
17010 return;
17011 }
17012
17013 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
17014 // Guard against cases where we're not getting track info at all until we are
17015 // certain that all streams will provide it.
17016
17017 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
17018 this.appendInitSegment_ = {
17019 audio: true,
17020 video: true
17021 };
17022 this.startingMediaInfo_ = trackInfo;
17023 this.currentMediaInfo_ = trackInfo;
17024 this.logger_('trackinfo update', trackInfo);
17025 this.trigger('trackinfo');
17026 } // trackinfo may cause an abort if the trackinfo
17027 // causes a codec change to an unsupported codec.
17028
17029
17030 if (this.checkForAbort_(simpleSegment.requestId)) {
17031 return;
17032 } // set trackinfo on the pending segment so that
17033 // it can append.
17034
17035
17036 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
17037
17038 if (this.hasEnoughInfoToAppend_()) {
17039 this.processCallQueue_();
17040 }
17041 };
17042
17043 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
17044 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17045
17046 if (this.checkForAbort_(simpleSegment.requestId)) {
17047 return;
17048 }
17049
17050 var segmentInfo = this.pendingSegment_;
17051 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
17052 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
17053 segmentInfo[timingInfoProperty][timeType] = time;
17054 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
17055
17056 if (this.hasEnoughInfoToAppend_()) {
17057 this.processCallQueue_();
17058 }
17059 };
17060
17061 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
17062 var _this2 = this;
17063
17064 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17065
17066 if (this.checkForAbort_(simpleSegment.requestId)) {
17067 return;
17068 } // This could only happen with fmp4 segments, but
17069 // should still not happen in general
17070
17071
17072 if (captionData.length === 0) {
17073 this.logger_('SegmentLoader received no captions from a caption event');
17074 return;
17075 }
17076
17077 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
17078 // can be adjusted by the timestamp offset
17079
17080 if (!segmentInfo.hasAppendedData_) {
17081 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
17082 return;
17083 }
17084
17085 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
17086 var captionTracks = {}; // get total start/end and captions for each track/stream
17087
17088 captionData.forEach(function (caption) {
17089 // caption.stream is actually a track name...
17090 // set to the existing values in tracks or default values
17091 captionTracks[caption.stream] = captionTracks[caption.stream] || {
17092 // Infinity, as any other value will be less than this
17093 startTime: Infinity,
17094 captions: [],
17095 // 0 as an other value will be more than this
17096 endTime: 0
17097 };
17098 var captionTrack = captionTracks[caption.stream];
17099 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
17100 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
17101 captionTrack.captions.push(caption);
17102 });
17103 Object.keys(captionTracks).forEach(function (trackName) {
17104 var _captionTracks$trackN = captionTracks[trackName],
17105 startTime = _captionTracks$trackN.startTime,
17106 endTime = _captionTracks$trackN.endTime,
17107 captions = _captionTracks$trackN.captions;
17108 var inbandTextTracks = _this2.inbandTextTracks_;
17109
17110 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
17111
17112 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
17113 // We do this because a rendition change that also changes the timescale for captions
17114 // will result in captions being re-parsed for certain segments. If we add them again
17115 // without clearing we will have two of the same captions visible.
17116
17117 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
17118 addCaptionData({
17119 captionArray: captions,
17120 inbandTextTracks: inbandTextTracks,
17121 timestampOffset: timestampOffset
17122 });
17123 }); // Reset stored captions since we added parsed
17124 // captions to a text track at this point
17125
17126 if (this.transmuxer_) {
17127 this.transmuxer_.postMessage({
17128 action: 'clearParsedMp4Captions'
17129 });
17130 }
17131 };
17132
17133 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
17134 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17135
17136 if (this.checkForAbort_(simpleSegment.requestId)) {
17137 return;
17138 }
17139
17140 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
17141
17142 if (!segmentInfo.hasAppendedData_) {
17143 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
17144 return;
17145 }
17146
17147 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
17148 // audio/video source with a metadata track, and an alt audio with a metadata track.
17149 // However, this probably won't happen, and if it does it can be handled then.
17150
17151 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
17152 addMetadata({
17153 inbandTextTracks: this.inbandTextTracks_,
17154 metadataArray: id3Frames,
17155 timestampOffset: timestampOffset,
17156 videoDuration: this.duration_()
17157 });
17158 };
17159
17160 _proto.processMetadataQueue_ = function processMetadataQueue_() {
17161 this.metadataQueue_.id3.forEach(function (fn) {
17162 return fn();
17163 });
17164 this.metadataQueue_.caption.forEach(function (fn) {
17165 return fn();
17166 });
17167 this.metadataQueue_.id3 = [];
17168 this.metadataQueue_.caption = [];
17169 };
17170
17171 _proto.processCallQueue_ = function processCallQueue_() {
17172 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
17173 // functions may check the length of the load queue and default to pushing themselves
17174 // back onto the queue.
17175
17176 this.callQueue_ = [];
17177 callQueue.forEach(function (fun) {
17178 return fun();
17179 });
17180 };
17181
17182 _proto.processLoadQueue_ = function processLoadQueue_() {
17183 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
17184 // functions may check the length of the load queue and default to pushing themselves
17185 // back onto the queue.
17186
17187 this.loadQueue_ = [];
17188 loadQueue.forEach(function (fun) {
17189 return fun();
17190 });
17191 }
17192 /**
17193 * Determines whether the loader has enough info to load the next segment.
17194 *
17195 * @return {boolean}
17196 * Whether or not the loader has enough info to load the next segment
17197 */
17198 ;
17199
17200 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
17201 // Since primary timing goes by video, only the audio loader potentially needs to wait
17202 // to load.
17203 if (this.loaderType_ !== 'audio') {
17204 return true;
17205 }
17206
17207 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
17208 // enough info to load.
17209
17210 if (!segmentInfo) {
17211 return false;
17212 } // The first segment can and should be loaded immediately so that source buffers are
17213 // created together (before appending). Source buffer creation uses the presence of
17214 // audio and video data to determine whether to create audio/video source buffers, and
17215 // uses processed (transmuxed or parsed) media to determine the types required.
17216
17217
17218 if (!this.getCurrentMediaInfo_()) {
17219 return true;
17220 }
17221
17222 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
17223 // can be requested and downloaded and only wait before it is transmuxed or parsed.
17224 // But in practice, there are a few reasons why it is better to wait until a loader
17225 // is ready to append that segment before requesting and downloading:
17226 //
17227 // 1. Because audio and main loaders cross discontinuities together, if this loader
17228 // is waiting for the other to catch up, then instead of requesting another
17229 // segment and using up more bandwidth, by not yet loading, more bandwidth is
17230 // allotted to the loader currently behind.
17231 // 2. media-segment-request doesn't have to have logic to consider whether a segment
17232 // is ready to be processed or not, isolating the queueing behavior to the loader.
17233 // 3. The audio loader bases some of its segment properties on timing information
17234 // provided by the main loader, meaning that, if the logic for waiting on
17235 // processing was in media-segment-request, then it would also need to know how
17236 // to re-generate the segment information after the main loader caught up.
17237 shouldWaitForTimelineChange({
17238 timelineChangeController: this.timelineChangeController_,
17239 currentTimeline: this.currentTimeline_,
17240 segmentTimeline: segmentInfo.timeline,
17241 loaderType: this.loaderType_,
17242 audioDisabled: this.audioDisabled_
17243 })) {
17244 return false;
17245 }
17246
17247 return true;
17248 };
17249
17250 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
17251 if (segmentInfo === void 0) {
17252 segmentInfo = this.pendingSegment_;
17253 }
17254
17255 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
17256 };
17257
17258 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
17259 if (segmentInfo === void 0) {
17260 segmentInfo = this.pendingSegment_;
17261 }
17262
17263 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
17264 };
17265
17266 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
17267 if (!this.sourceUpdater_.ready()) {
17268 return false;
17269 } // If content needs to be removed or the loader is waiting on an append reattempt,
17270 // then no additional content should be appended until the prior append is resolved.
17271
17272
17273 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
17274 return false;
17275 }
17276
17277 var segmentInfo = this.pendingSegment_;
17278 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
17279 // we do not have information on this specific
17280 // segment yet
17281
17282 if (!segmentInfo || !trackInfo) {
17283 return false;
17284 }
17285
17286 var hasAudio = trackInfo.hasAudio,
17287 hasVideo = trackInfo.hasVideo,
17288 isMuxed = trackInfo.isMuxed;
17289
17290 if (hasVideo && !segmentInfo.videoTimingInfo) {
17291 return false;
17292 } // muxed content only relies on video timing information for now.
17293
17294
17295 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
17296 return false;
17297 }
17298
17299 if (shouldWaitForTimelineChange({
17300 timelineChangeController: this.timelineChangeController_,
17301 currentTimeline: this.currentTimeline_,
17302 segmentTimeline: segmentInfo.timeline,
17303 loaderType: this.loaderType_,
17304 audioDisabled: this.audioDisabled_
17305 })) {
17306 return false;
17307 }
17308
17309 return true;
17310 };
17311
17312 _proto.handleData_ = function handleData_(simpleSegment, result) {
17313 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17314
17315 if (this.checkForAbort_(simpleSegment.requestId)) {
17316 return;
17317 } // If there's anything in the call queue, then this data came later and should be
17318 // executed after the calls currently queued.
17319
17320
17321 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
17322 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
17323 return;
17324 }
17325
17326 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
17327
17328 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
17329
17330 this.updateMediaSecondsLoaded_(segmentInfo.part || segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
17331 // logic may change behavior depending on the state, and changing state too early may
17332 // inflate our estimates of bandwidth. In the future this should be re-examined to
17333 // note more granular states.
17334 // don't process and append data if the mediaSource is closed
17335
17336 if (this.mediaSource_.readyState === 'closed') {
17337 return;
17338 } // if this request included an initialization segment, save that data
17339 // to the initSegment cache
17340
17341
17342 if (simpleSegment.map) {
17343 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
17344
17345 segmentInfo.segment.map = simpleSegment.map;
17346 } // if this request included a segment key, save that data in the cache
17347
17348
17349 if (simpleSegment.key) {
17350 this.segmentKey(simpleSegment.key, true);
17351 }
17352
17353 segmentInfo.isFmp4 = simpleSegment.isFmp4;
17354 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
17355
17356 if (segmentInfo.isFmp4) {
17357 this.trigger('fmp4');
17358 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
17359 } else {
17360 var trackInfo = this.getCurrentMediaInfo_();
17361 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
17362 var firstVideoFrameTimeForData;
17363
17364 if (useVideoTimingInfo) {
17365 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
17366 } // Segment loader knows more about segment timing than the transmuxer (in certain
17367 // aspects), so make any changes required for a more accurate start time.
17368 // Don't set the end time yet, as the segment may not be finished processing.
17369
17370
17371 segmentInfo.timingInfo.start = this.trueSegmentStart_({
17372 currentStart: segmentInfo.timingInfo.start,
17373 playlist: segmentInfo.playlist,
17374 mediaIndex: segmentInfo.mediaIndex,
17375 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
17376 useVideoTimingInfo: useVideoTimingInfo,
17377 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
17378 videoTimingInfo: segmentInfo.videoTimingInfo,
17379 audioTimingInfo: segmentInfo.audioTimingInfo
17380 });
17381 } // Init segments for audio and video only need to be appended in certain cases. Now
17382 // that data is about to be appended, we can check the final cases to determine
17383 // whether we should append an init segment.
17384
17385
17386 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
17387 // as we use the start of the segment to offset the best guess (playlist provided)
17388 // timestamp offset.
17389
17390 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
17391 // be appended or not.
17392
17393 if (segmentInfo.isSyncRequest) {
17394 // first save/update our timing info for this segment.
17395 // this is what allows us to choose an accurate segment
17396 // and the main reason we make a sync request.
17397 this.updateTimingInfoEnd_(segmentInfo);
17398 this.syncController_.saveSegmentTimingInfo({
17399 segmentInfo: segmentInfo,
17400 shouldSaveTimelineMapping: this.loaderType_ === 'main'
17401 });
17402 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
17403 // after taking into account its timing info, do not append it.
17404
17405 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
17406 this.logger_('sync segment was incorrect, not appending');
17407 return;
17408 } // otherwise append it like any other segment as our guess was correct.
17409
17410
17411 this.logger_('sync segment was correct, appending');
17412 } // Save some state so that in the future anything waiting on first append (and/or
17413 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
17414 // we need some notion of whether the timestamp offset or other relevant information
17415 // has had a chance to be set.
17416
17417
17418 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
17419
17420 this.processMetadataQueue_();
17421 this.appendData_(segmentInfo, result);
17422 };
17423
17424 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
17425 // alt audio doesn't manage timestamp offset
17426 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
17427 // segment for each chunk
17428 !segmentInfo.changedTimestampOffset) {
17429 // if the timestamp offset changed, the timeline may have changed, so we have to re-
17430 // append init segments
17431 this.appendInitSegment_ = {
17432 audio: true,
17433 video: true
17434 };
17435 }
17436
17437 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
17438 // make sure we append init segment on playlist changes, in case the media config
17439 // changed
17440 this.appendInitSegment_[type] = true;
17441 }
17442 };
17443
17444 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
17445 var type = _ref4.type,
17446 initSegment = _ref4.initSegment,
17447 map = _ref4.map,
17448 playlist = _ref4.playlist;
17449
17450 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
17451 // (Section 3) required to parse the applicable Media Segments. It applies to every
17452 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
17453 // or until the end of the playlist."
17454 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
17455 if (map) {
17456 var id = initSegmentId(map);
17457
17458 if (this.activeInitSegmentId_ === id) {
17459 // don't need to re-append the init segment if the ID matches
17460 return null;
17461 } // a map-specified init segment takes priority over any transmuxed (or otherwise
17462 // obtained) init segment
17463 //
17464 // this also caches the init segment for later use
17465
17466
17467 initSegment = this.initSegmentForMap(map, true).bytes;
17468 this.activeInitSegmentId_ = id;
17469 } // We used to always prepend init segments for video, however, that shouldn't be
17470 // necessary. Instead, we should only append on changes, similar to what we've always
17471 // done for audio. This is more important (though may not be that important) for
17472 // frame-by-frame appending for LHLS, simply because of the increased quantity of
17473 // appends.
17474
17475
17476 if (initSegment && this.appendInitSegment_[type]) {
17477 // Make sure we track the playlist that we last used for the init segment, so that
17478 // we can re-append the init segment in the event that we get data from a new
17479 // playlist. Discontinuities and track changes are handled in other sections.
17480 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
17481
17482 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
17483 // we are appending the muxer init segment
17484
17485 this.activeInitSegmentId_ = null;
17486 return initSegment;
17487 }
17488
17489 return null;
17490 };
17491
17492 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
17493 var _this3 = this;
17494
17495 var segmentInfo = _ref5.segmentInfo,
17496 type = _ref5.type,
17497 bytes = _ref5.bytes;
17498 var audioBuffered = this.sourceUpdater_.audioBuffered();
17499 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
17500 // should be cleared out during the buffer removals. However, log in case it helps
17501 // debug.
17502
17503 if (audioBuffered.length > 1) {
17504 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
17505 }
17506
17507 if (videoBuffered.length > 1) {
17508 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
17509 }
17510
17511 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
17512 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
17513 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
17514 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
17515
17516 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
17517 // Can't remove enough buffer to make room for new segment (or the browser doesn't
17518 // allow for appends of segments this size). In the future, it may be possible to
17519 // split up the segment and append in pieces, but for now, error out this playlist
17520 // in an attempt to switch to a more manageable rendition.
17521 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
17522 this.error({
17523 message: 'Quota exceeded error with append of a single segment of content',
17524 excludeUntil: Infinity
17525 });
17526 this.trigger('error');
17527 return;
17528 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
17529 // that the segment-loader should block on future events until this one is handled, so
17530 // that it doesn't keep moving onto further segments. Adding the call to the call
17531 // queue will prevent further appends until waitingOnRemove_ and
17532 // quotaExceededErrorRetryTimeout_ are cleared.
17533 //
17534 // Note that this will only block the current loader. In the case of demuxed content,
17535 // the other load may keep filling as fast as possible. In practice, this should be
17536 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
17537 // source buffer, or video fills without enough room for audio to append (and without
17538 // the availability of clearing out seconds of back buffer to make room for audio).
17539 // But it might still be good to handle this case in the future as a TODO.
17540
17541
17542 this.waitingOnRemove_ = true;
17543 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
17544 segmentInfo: segmentInfo,
17545 type: type,
17546 bytes: bytes
17547 }));
17548 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
17549 // before retrying.
17550
17551 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
17552 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
17553 this.remove(0, timeToRemoveUntil, function () {
17554 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
17555
17556 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
17557 // attempts (since we can't clear less than the minimum)
17558
17559 _this3.quotaExceededErrorRetryTimeout_ = window$1.setTimeout(function () {
17560 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
17561
17562 _this3.quotaExceededErrorRetryTimeout_ = null;
17563
17564 _this3.processCallQueue_();
17565 }, MIN_BACK_BUFFER * 1000);
17566 }, true);
17567 };
17568
17569 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
17570 var segmentInfo = _ref6.segmentInfo,
17571 type = _ref6.type,
17572 bytes = _ref6.bytes;
17573
17574 // if there's no error, nothing to do
17575 if (!error) {
17576 return;
17577 }
17578
17579 if (error.code === QUOTA_EXCEEDED_ERR) {
17580 this.handleQuotaExceededError_({
17581 segmentInfo: segmentInfo,
17582 type: type,
17583 bytes: bytes
17584 }); // A quota exceeded error should be recoverable with a future re-append, so no need
17585 // to trigger an append error.
17586
17587 return;
17588 }
17589
17590 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
17591 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
17592 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
17593 //
17594 // Trigger a special error so that it can be handled separately from normal,
17595 // recoverable errors.
17596
17597 this.trigger('appenderror');
17598 };
17599
17600 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
17601 var segmentInfo = _ref7.segmentInfo,
17602 type = _ref7.type,
17603 initSegment = _ref7.initSegment,
17604 data = _ref7.data,
17605 bytes = _ref7.bytes;
17606
17607 // If this is a re-append, bytes were already created and don't need to be recreated
17608 if (!bytes) {
17609 var segments = [data];
17610 var byteLength = data.byteLength;
17611
17612 if (initSegment) {
17613 // if the media initialization segment is changing, append it before the content
17614 // segment
17615 segments.unshift(initSegment);
17616 byteLength += initSegment.byteLength;
17617 } // Technically we should be OK appending the init segment separately, however, we
17618 // haven't yet tested that, and prepending is how we have always done things.
17619
17620
17621 bytes = concatSegments({
17622 bytes: byteLength,
17623 segments: segments
17624 });
17625 }
17626
17627 this.sourceUpdater_.appendBuffer({
17628 segmentInfo: segmentInfo,
17629 type: type,
17630 bytes: bytes
17631 }, this.handleAppendError_.bind(this, {
17632 segmentInfo: segmentInfo,
17633 type: type,
17634 bytes: bytes
17635 }));
17636 };
17637
17638 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
17639 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
17640 return;
17641 }
17642
17643 var segment = this.pendingSegment_.segment;
17644 var timingInfoProperty = type + "TimingInfo";
17645
17646 if (!segment[timingInfoProperty]) {
17647 segment[timingInfoProperty] = {};
17648 }
17649
17650 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
17651 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
17652 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
17653 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
17654 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
17655
17656 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
17657 };
17658
17659 _proto.appendData_ = function appendData_(segmentInfo, result) {
17660 var type = result.type,
17661 data = result.data;
17662
17663 if (!data || !data.byteLength) {
17664 return;
17665 }
17666
17667 if (type === 'audio' && this.audioDisabled_) {
17668 return;
17669 }
17670
17671 var initSegment = this.getInitSegmentAndUpdateState_({
17672 type: type,
17673 initSegment: result.initSegment,
17674 playlist: segmentInfo.playlist,
17675 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
17676 });
17677 this.appendToSourceBuffer_({
17678 segmentInfo: segmentInfo,
17679 type: type,
17680 initSegment: initSegment,
17681 data: data
17682 });
17683 }
17684 /**
17685 * load a specific segment from a request into the buffer
17686 *
17687 * @private
17688 */
17689 ;
17690
17691 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
17692 var _this4 = this;
17693
17694 this.state = 'WAITING';
17695 this.pendingSegment_ = segmentInfo;
17696 this.trimBackBuffer_(segmentInfo);
17697
17698 if (typeof segmentInfo.timestampOffset === 'number') {
17699 if (this.transmuxer_) {
17700 this.transmuxer_.postMessage({
17701 action: 'clearAllMp4Captions'
17702 });
17703 }
17704 }
17705
17706 if (!this.hasEnoughInfoToLoad_()) {
17707 this.loadQueue_.push(function () {
17708 // regenerate the audioAppendStart, timestampOffset, etc as they
17709 // may have changed since this function was added to the queue.
17710 var options = _extends({}, segmentInfo, {
17711 forceTimestampOffset: true
17712 });
17713
17714 _extends(segmentInfo, _this4.generateSegmentInfo_(options));
17715
17716 _this4.isPendingTimestampOffset_ = false;
17717
17718 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
17719 });
17720 return;
17721 }
17722
17723 this.updateTransmuxerAndRequestSegment_(segmentInfo);
17724 };
17725
17726 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
17727 var _this5 = this;
17728
17729 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
17730 // the transmuxer still needs to be updated before then.
17731 //
17732 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
17733 // offset must be passed to the transmuxer for stream correcting adjustments.
17734 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
17735 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
17736
17737 segmentInfo.gopsToAlignWith = [];
17738 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
17739
17740 this.transmuxer_.postMessage({
17741 action: 'reset'
17742 });
17743 this.transmuxer_.postMessage({
17744 action: 'setTimestampOffset',
17745 timestampOffset: segmentInfo.timestampOffset
17746 });
17747 }
17748
17749 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
17750 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
17751 var isWalkingForward = this.mediaIndex !== null;
17752 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
17753 // the first timeline
17754 segmentInfo.timeline > 0;
17755 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
17756 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
17757 // then this init segment has never been seen before and should be appended.
17758 //
17759 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
17760 // both to true and leave the decision of whether to append the init segment to append time.
17761
17762 if (simpleSegment.map && !simpleSegment.map.bytes) {
17763 this.logger_('going to request init segment.');
17764 this.appendInitSegment_ = {
17765 video: true,
17766 audio: true
17767 };
17768 }
17769
17770 segmentInfo.abortRequests = mediaSegmentRequest({
17771 xhr: this.vhs_.xhr,
17772 xhrOptions: this.xhrOptions_,
17773 decryptionWorker: this.decrypter_,
17774 segment: simpleSegment,
17775 abortFn: this.handleAbort_.bind(this, segmentInfo),
17776 progressFn: this.handleProgress_.bind(this),
17777 trackInfoFn: this.handleTrackInfo_.bind(this),
17778 timingInfoFn: this.handleTimingInfo_.bind(this),
17779 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
17780 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
17781 captionsFn: this.handleCaptions_.bind(this),
17782 isEndOfTimeline: isEndOfTimeline,
17783 endedTimelineFn: function endedTimelineFn() {
17784 _this5.logger_('received endedtimeline callback');
17785 },
17786 id3Fn: this.handleId3_.bind(this),
17787 dataFn: this.handleData_.bind(this),
17788 doneFn: this.segmentRequestFinished_.bind(this),
17789 onTransmuxerLog: function onTransmuxerLog(_ref8) {
17790 var message = _ref8.message,
17791 level = _ref8.level,
17792 stream = _ref8.stream;
17793
17794 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
17795 }
17796 });
17797 }
17798 /**
17799 * trim the back buffer so that we don't have too much data
17800 * in the source buffer
17801 *
17802 * @private
17803 *
17804 * @param {Object} segmentInfo - the current segment
17805 */
17806 ;
17807
17808 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
17809 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
17810 // buffer and a very conservative "garbage collector"
17811 // We manually clear out the old buffer to ensure
17812 // we don't trigger the QuotaExceeded error
17813 // on the source buffer during subsequent appends
17814
17815 if (removeToTime > 0) {
17816 this.remove(0, removeToTime);
17817 }
17818 }
17819 /**
17820 * created a simplified copy of the segment object with just the
17821 * information necessary to perform the XHR and decryption
17822 *
17823 * @private
17824 *
17825 * @param {Object} segmentInfo - the current segment
17826 * @return {Object} a simplified segment object copy
17827 */
17828 ;
17829
17830 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
17831 var segment = segmentInfo.segment;
17832 var part = segmentInfo.part;
17833 var simpleSegment = {
17834 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
17835 byterange: part ? part.byterange : segment.byterange,
17836 requestId: segmentInfo.requestId,
17837 transmuxer: segmentInfo.transmuxer,
17838 audioAppendStart: segmentInfo.audioAppendStart,
17839 gopsToAlignWith: segmentInfo.gopsToAlignWith,
17840 part: segmentInfo.part
17841 };
17842 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
17843
17844 if (previousSegment && previousSegment.timeline === segment.timeline) {
17845 // The baseStartTime of a segment is used to handle rollover when probing the TS
17846 // segment to retrieve timing information. Since the probe only looks at the media's
17847 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
17848 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
17849 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
17850 // seconds of media time, so should be used here. The previous segment is used since
17851 // the end of the previous segment should represent the beginning of the current
17852 // segment, so long as they are on the same timeline.
17853 if (previousSegment.videoTimingInfo) {
17854 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
17855 } else if (previousSegment.audioTimingInfo) {
17856 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
17857 }
17858 }
17859
17860 if (segment.key) {
17861 // if the media sequence is greater than 2^32, the IV will be incorrect
17862 // assuming 10s segments, that would be about 1300 years
17863 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
17864 simpleSegment.key = this.segmentKey(segment.key);
17865 simpleSegment.key.iv = iv;
17866 }
17867
17868 if (segment.map) {
17869 simpleSegment.map = this.initSegmentForMap(segment.map);
17870 }
17871
17872 return simpleSegment;
17873 };
17874
17875 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
17876 // every request counts as a media request even if it has been aborted
17877 // or canceled due to a timeout
17878 this.mediaRequests += 1;
17879
17880 if (stats) {
17881 this.mediaBytesTransferred += stats.bytesReceived;
17882 this.mediaTransferDuration += stats.roundTripTime;
17883 }
17884 };
17885
17886 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
17887 // byteLength will be used for throughput, and should be based on bytes receieved,
17888 // which we only know at the end of the request and should reflect total bytes
17889 // downloaded rather than just bytes processed from components of the segment
17890 this.pendingSegment_.byteLength = stats.bytesReceived;
17891
17892 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
17893 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
17894 return;
17895 }
17896
17897 this.bandwidth = stats.bandwidth;
17898 this.roundTrip = stats.roundTripTime;
17899 };
17900
17901 _proto.handleTimeout_ = function handleTimeout_() {
17902 // although the VTT segment loader bandwidth isn't really used, it's good to
17903 // maintain functinality between segment loaders
17904 this.mediaRequestsTimedout += 1;
17905 this.bandwidth = 1;
17906 this.roundTrip = NaN;
17907 this.trigger('bandwidthupdate');
17908 }
17909 /**
17910 * Handle the callback from the segmentRequest function and set the
17911 * associated SegmentLoader state and errors if necessary
17912 *
17913 * @private
17914 */
17915 ;
17916
17917 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
17918 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
17919 // check the call queue directly since this function doesn't need to deal with any
17920 // data, and can continue even if the source buffers are not set up and we didn't get
17921 // any data from the segment
17922 if (this.callQueue_.length) {
17923 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
17924 return;
17925 }
17926
17927 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
17928
17929 if (!this.pendingSegment_) {
17930 return;
17931 } // the request was aborted and the SegmentLoader has already started
17932 // another request. this can happen when the timeout for an aborted
17933 // request triggers due to a limitation in the XHR library
17934 // do not count this as any sort of request or we risk double-counting
17935
17936
17937 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
17938 return;
17939 } // an error occurred from the active pendingSegment_ so reset everything
17940
17941
17942 if (error) {
17943 this.pendingSegment_ = null;
17944 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
17945
17946 if (error.code === REQUEST_ERRORS.ABORTED) {
17947 return;
17948 }
17949
17950 this.pause(); // the error is really just that at least one of the requests timed-out
17951 // set the bandwidth to a very low value and trigger an ABR switch to
17952 // take emergency action
17953
17954 if (error.code === REQUEST_ERRORS.TIMEOUT) {
17955 this.handleTimeout_();
17956 return;
17957 } // if control-flow has arrived here, then the error is real
17958 // emit an error event to blacklist the current playlist
17959
17960
17961 this.mediaRequestsErrored += 1;
17962 this.error(error);
17963 this.trigger('error');
17964 return;
17965 }
17966
17967 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
17968 // generated for ABR purposes
17969
17970 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
17971 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
17972
17973 if (result.gopInfo) {
17974 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
17975 } // Although we may have already started appending on progress, we shouldn't switch the
17976 // state away from loading until we are officially done loading the segment data.
17977
17978
17979 this.state = 'APPENDING'; // used for testing
17980
17981 this.trigger('appending');
17982 this.waitForAppendsToComplete_(segmentInfo);
17983 };
17984
17985 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
17986 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
17987
17988 if (timelineMapping !== null) {
17989 this.timeMapping_ = timelineMapping;
17990 }
17991 };
17992
17993 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
17994 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
17995 this.mediaSecondsLoaded += segment.end - segment.start;
17996 } else {
17997 this.mediaSecondsLoaded += segment.duration;
17998 }
17999 };
18000
18001 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
18002 if (timestampOffset === null) {
18003 return false;
18004 } // note that we're potentially using the same timestamp offset for both video and
18005 // audio
18006
18007
18008 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18009 return true;
18010 }
18011
18012 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18013 return true;
18014 }
18015
18016 return false;
18017 };
18018
18019 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
18020 var currentStart = _ref9.currentStart,
18021 playlist = _ref9.playlist,
18022 mediaIndex = _ref9.mediaIndex,
18023 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
18024 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
18025 useVideoTimingInfo = _ref9.useVideoTimingInfo,
18026 videoTimingInfo = _ref9.videoTimingInfo,
18027 audioTimingInfo = _ref9.audioTimingInfo;
18028
18029 if (typeof currentStart !== 'undefined') {
18030 // if start was set once, keep using it
18031 return currentStart;
18032 }
18033
18034 if (!useVideoTimingInfo) {
18035 return audioTimingInfo.start;
18036 }
18037
18038 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
18039 // within that segment. Since the transmuxer maintains a cache of incomplete data
18040 // from and/or the last frame seen, the start time may reflect a frame that starts
18041 // in the previous segment. Check for that case and ensure the start time is
18042 // accurate for the segment.
18043
18044 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
18045 return firstVideoFrameTimeForData;
18046 }
18047
18048 return videoTimingInfo.start;
18049 };
18050
18051 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
18052 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
18053
18054 if (!trackInfo) {
18055 this.error({
18056 message: 'No starting media returned, likely due to an unsupported media format.',
18057 blacklistDuration: Infinity
18058 });
18059 this.trigger('error');
18060 return;
18061 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
18062 // on each queue this loader is responsible for to ensure that the appends are
18063 // complete.
18064
18065
18066 var hasAudio = trackInfo.hasAudio,
18067 hasVideo = trackInfo.hasVideo,
18068 isMuxed = trackInfo.isMuxed;
18069 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
18070 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
18071 segmentInfo.waitingOnAppends = 0; // segments with no data
18072
18073 if (!segmentInfo.hasAppendedData_) {
18074 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
18075 // When there's no audio or video data in the segment, there's no audio or video
18076 // timing information.
18077 //
18078 // If there's no audio or video timing information, then the timestamp offset
18079 // can't be adjusted to the appropriate value for the transmuxer and source
18080 // buffers.
18081 //
18082 // Therefore, the next segment should be used to set the timestamp offset.
18083 this.isPendingTimestampOffset_ = true;
18084 } // override settings for metadata only segments
18085
18086
18087 segmentInfo.timingInfo = {
18088 start: 0
18089 };
18090 segmentInfo.waitingOnAppends++;
18091
18092 if (!this.isPendingTimestampOffset_) {
18093 // update the timestampoffset
18094 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
18095 // no video/audio data.
18096
18097 this.processMetadataQueue_();
18098 } // append is "done" instantly with no data.
18099
18100
18101 this.checkAppendsDone_(segmentInfo);
18102 return;
18103 } // Since source updater could call back synchronously, do the increments first.
18104
18105
18106 if (waitForVideo) {
18107 segmentInfo.waitingOnAppends++;
18108 }
18109
18110 if (waitForAudio) {
18111 segmentInfo.waitingOnAppends++;
18112 }
18113
18114 if (waitForVideo) {
18115 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
18116 }
18117
18118 if (waitForAudio) {
18119 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
18120 }
18121 };
18122
18123 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
18124 if (this.checkForAbort_(segmentInfo.requestId)) {
18125 return;
18126 }
18127
18128 segmentInfo.waitingOnAppends--;
18129
18130 if (segmentInfo.waitingOnAppends === 0) {
18131 this.handleAppendsDone_();
18132 }
18133 };
18134
18135 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
18136 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
18137
18138 if (illegalMediaSwitchError) {
18139 this.error({
18140 message: illegalMediaSwitchError,
18141 blacklistDuration: Infinity
18142 });
18143 this.trigger('error');
18144 return true;
18145 }
18146
18147 return false;
18148 };
18149
18150 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
18151 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
18152 // priority, timing-wise, so we must wait
18153 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
18154 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
18155 this.loaderType_ !== 'main') {
18156 return;
18157 }
18158
18159 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
18160 // the timing info here comes from video. In the event that the audio is longer than
18161 // the video, this will trim the start of the audio.
18162 // This also trims any offset from 0 at the beginning of the media
18163
18164 segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
18165 videoTimingInfo: segmentInfo.segment.videoTimingInfo,
18166 audioTimingInfo: segmentInfo.segment.audioTimingInfo,
18167 timingInfo: segmentInfo.timingInfo
18168 }); // In the event that there are part segment downloads, each will try to update the
18169 // timestamp offset. Retaining this bit of state prevents us from updating in the
18170 // future (within the same segment), however, there may be a better way to handle it.
18171
18172 segmentInfo.changedTimestampOffset = true;
18173
18174 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18175 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
18176 didChange = true;
18177 }
18178
18179 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18180 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
18181 didChange = true;
18182 }
18183
18184 if (didChange) {
18185 this.trigger('timestampoffset');
18186 }
18187 };
18188
18189 _proto.getSegmentStartTimeForTimestampOffsetCalculation_ = function getSegmentStartTimeForTimestampOffsetCalculation_(_ref10) {
18190 var videoTimingInfo = _ref10.videoTimingInfo,
18191 audioTimingInfo = _ref10.audioTimingInfo,
18192 timingInfo = _ref10.timingInfo;
18193
18194 if (!this.useDtsForTimestampOffset_) {
18195 return timingInfo.start;
18196 }
18197
18198 if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
18199 return videoTimingInfo.transmuxedDecodeStart;
18200 } // handle audio only
18201
18202
18203 if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
18204 return audioTimingInfo.transmuxedDecodeStart;
18205 } // handle content not transmuxed (e.g., MP4)
18206
18207
18208 return timingInfo.start;
18209 };
18210
18211 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
18212 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
18213 var trackInfo = this.getMediaInfo_();
18214 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
18215 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
18216
18217 if (!prioritizedTimingInfo) {
18218 return;
18219 }
18220
18221 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
18222 // current example is the case of fmp4), so use the rough duration to calculate an
18223 // end time.
18224 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
18225 }
18226 /**
18227 * callback to run when appendBuffer is finished. detects if we are
18228 * in a good state to do things with the data we got, or if we need
18229 * to wait for more
18230 *
18231 * @private
18232 */
18233 ;
18234
18235 _proto.handleAppendsDone_ = function handleAppendsDone_() {
18236 // appendsdone can cause an abort
18237 if (this.pendingSegment_) {
18238 this.trigger('appendsdone');
18239 }
18240
18241 if (!this.pendingSegment_) {
18242 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
18243 // all appending cases?
18244
18245 if (!this.paused()) {
18246 this.monitorBuffer_();
18247 }
18248
18249 return;
18250 }
18251
18252 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
18253 // best to wait until all appends are done so we're sure that the primary media is
18254 // finished (and we have its end time).
18255
18256 this.updateTimingInfoEnd_(segmentInfo);
18257
18258 if (this.shouldSaveSegmentTimingInfo_) {
18259 // Timeline mappings should only be saved for the main loader. This is for multiple
18260 // reasons:
18261 //
18262 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
18263 // and the main loader try to save the timeline mapping, whichever comes later
18264 // will overwrite the first. In theory this is OK, as the mappings should be the
18265 // same, however, it breaks for (2)
18266 // 2) In the event of a live stream, the initial live point will make for a somewhat
18267 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
18268 // the mapping will be off for one of the streams, dependent on which one was
18269 // first saved (see (1)).
18270 // 3) Primary timing goes by video in VHS, so the mapping should be video.
18271 //
18272 // Since the audio loader will wait for the main loader to load the first segment,
18273 // the main loader will save the first timeline mapping, and ensure that there won't
18274 // be a case where audio loads two segments without saving a mapping (thus leading
18275 // to missing segment timing info).
18276 this.syncController_.saveSegmentTimingInfo({
18277 segmentInfo: segmentInfo,
18278 shouldSaveTimelineMapping: this.loaderType_ === 'main'
18279 });
18280 }
18281
18282 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
18283
18284 if (segmentDurationMessage) {
18285 if (segmentDurationMessage.severity === 'warn') {
18286 videojs.log.warn(segmentDurationMessage.message);
18287 } else {
18288 this.logger_(segmentDurationMessage.message);
18289 }
18290 }
18291
18292 this.recordThroughput_(segmentInfo);
18293 this.pendingSegment_ = null;
18294 this.state = 'READY';
18295
18296 if (segmentInfo.isSyncRequest) {
18297 this.trigger('syncinfoupdate'); // if the sync request was not appended
18298 // then it was not the correct segment.
18299 // throw it away and use the data it gave us
18300 // to get the correct one.
18301
18302 if (!segmentInfo.hasAppendedData_) {
18303 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
18304 return;
18305 }
18306 }
18307
18308 this.logger_("Appended " + segmentInfoString(segmentInfo));
18309 this.addSegmentMetadataCue_(segmentInfo);
18310 this.fetchAtBuffer_ = true;
18311
18312 if (this.currentTimeline_ !== segmentInfo.timeline) {
18313 this.timelineChangeController_.lastTimelineChange({
18314 type: this.loaderType_,
18315 from: this.currentTimeline_,
18316 to: segmentInfo.timeline
18317 }); // If audio is not disabled, the main segment loader is responsible for updating
18318 // the audio timeline as well. If the content is video only, this won't have any
18319 // impact.
18320
18321 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
18322 this.timelineChangeController_.lastTimelineChange({
18323 type: 'audio',
18324 from: this.currentTimeline_,
18325 to: segmentInfo.timeline
18326 });
18327 }
18328 }
18329
18330 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
18331 // the following conditional otherwise it may consider this a bad "guess"
18332 // and attempt to resync when the post-update seekable window and live
18333 // point would mean that this was the perfect segment to fetch
18334
18335 this.trigger('syncinfoupdate');
18336 var segment = segmentInfo.segment;
18337 var part = segmentInfo.part;
18338 var badSegmentGuess = segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3;
18339 var badPartGuess = part && part.end && this.currentTime_() - part.end > segmentInfo.playlist.partTargetDuration * 3; // If we previously appended a segment/part that ends more than 3 part/targetDurations before
18340 // the currentTime_ that means that our conservative guess was too conservative.
18341 // In that case, reset the loader state so that we try to use any information gained
18342 // from the previous request to create a new, more accurate, sync-point.
18343
18344 if (badSegmentGuess || badPartGuess) {
18345 this.logger_("bad " + (badSegmentGuess ? 'segment' : 'part') + " " + segmentInfoString(segmentInfo));
18346 this.resetEverything();
18347 return;
18348 }
18349
18350 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
18351 // and conservatively guess
18352
18353 if (isWalkingForward) {
18354 this.trigger('bandwidthupdate');
18355 }
18356
18357 this.trigger('progress');
18358 this.mediaIndex = segmentInfo.mediaIndex;
18359 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
18360 // buffer, end the stream. this ensures the "ended" event will
18361 // fire if playback reaches that point.
18362
18363 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
18364 this.endOfStream();
18365 } // used for testing
18366
18367
18368 this.trigger('appended');
18369
18370 if (segmentInfo.hasAppendedData_) {
18371 this.mediaAppends++;
18372 }
18373
18374 if (!this.paused()) {
18375 this.monitorBuffer_();
18376 }
18377 }
18378 /**
18379 * Records the current throughput of the decrypt, transmux, and append
18380 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
18381 * moving average of the throughput. `throughput.count` is the number of
18382 * data points in the average.
18383 *
18384 * @private
18385 * @param {Object} segmentInfo the object returned by loadSegment
18386 */
18387 ;
18388
18389 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
18390 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
18391 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
18392 return;
18393 }
18394
18395 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
18396 // by zero in the case where the throughput is ridiculously high
18397
18398 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
18399
18400 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
18401 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
18402
18403 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
18404 }
18405 /**
18406 * Adds a cue to the segment-metadata track with some metadata information about the
18407 * segment
18408 *
18409 * @private
18410 * @param {Object} segmentInfo
18411 * the object returned by loadSegment
18412 * @method addSegmentMetadataCue_
18413 */
18414 ;
18415
18416 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
18417 if (!this.segmentMetadataTrack_) {
18418 return;
18419 }
18420
18421 var segment = segmentInfo.segment;
18422 var start = segment.start;
18423 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
18424
18425 if (!finite(start) || !finite(end)) {
18426 return;
18427 }
18428
18429 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
18430 var Cue = window$1.WebKitDataCue || window$1.VTTCue;
18431 var value = {
18432 custom: segment.custom,
18433 dateTimeObject: segment.dateTimeObject,
18434 dateTimeString: segment.dateTimeString,
18435 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
18436 resolution: segmentInfo.playlist.attributes.RESOLUTION,
18437 codecs: segmentInfo.playlist.attributes.CODECS,
18438 byteLength: segmentInfo.byteLength,
18439 uri: segmentInfo.uri,
18440 timeline: segmentInfo.timeline,
18441 playlist: segmentInfo.playlist.id,
18442 start: start,
18443 end: end
18444 };
18445 var data = JSON.stringify(value);
18446 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
18447 // the differences of WebKitDataCue in safari and VTTCue in other browsers
18448
18449 cue.value = value;
18450 this.segmentMetadataTrack_.addCue(cue);
18451 };
18452
18453 return SegmentLoader;
18454}(videojs.EventTarget);
18455
18456function noop() {}
18457
18458var toTitleCase = function toTitleCase(string) {
18459 if (typeof string !== 'string') {
18460 return string;
18461 }
18462
18463 return string.replace(/./, function (w) {
18464 return w.toUpperCase();
18465 });
18466};
18467
18468var bufferTypes = ['video', 'audio'];
18469
18470var _updating = function updating(type, sourceUpdater) {
18471 var sourceBuffer = sourceUpdater[type + "Buffer"];
18472 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
18473};
18474
18475var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
18476 for (var i = 0; i < queue.length; i++) {
18477 var queueEntry = queue[i];
18478
18479 if (queueEntry.type === 'mediaSource') {
18480 // If the next entry is a media source entry (uses multiple source buffers), block
18481 // processing to allow it to go through first.
18482 return null;
18483 }
18484
18485 if (queueEntry.type === type) {
18486 return i;
18487 }
18488 }
18489
18490 return null;
18491};
18492
18493var shiftQueue = function shiftQueue(type, sourceUpdater) {
18494 if (sourceUpdater.queue.length === 0) {
18495 return;
18496 }
18497
18498 var queueIndex = 0;
18499 var queueEntry = sourceUpdater.queue[queueIndex];
18500
18501 if (queueEntry.type === 'mediaSource') {
18502 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
18503 sourceUpdater.queue.shift();
18504 queueEntry.action(sourceUpdater);
18505
18506 if (queueEntry.doneFn) {
18507 queueEntry.doneFn();
18508 } // Only specific source buffer actions must wait for async updateend events. Media
18509 // Source actions process synchronously. Therefore, both audio and video source
18510 // buffers are now clear to process the next queue entries.
18511
18512
18513 shiftQueue('audio', sourceUpdater);
18514 shiftQueue('video', sourceUpdater);
18515 } // Media Source actions require both source buffers, so if the media source action
18516 // couldn't process yet (because one or both source buffers are busy), block other
18517 // queue actions until both are available and the media source action can process.
18518
18519
18520 return;
18521 }
18522
18523 if (type === 'mediaSource') {
18524 // If the queue was shifted by a media source action (this happens when pushing a
18525 // media source action onto the queue), then it wasn't from an updateend event from an
18526 // audio or video source buffer, so there's no change from previous state, and no
18527 // processing should be done.
18528 return;
18529 } // Media source queue entries don't need to consider whether the source updater is
18530 // started (i.e., source buffers are created) as they don't need the source buffers, but
18531 // source buffer queue entries do.
18532
18533
18534 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
18535 return;
18536 }
18537
18538 if (queueEntry.type !== type) {
18539 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
18540
18541 if (queueIndex === null) {
18542 // Either there's no queue entry that uses this source buffer type in the queue, or
18543 // there's a media source queue entry before the next entry of this type, in which
18544 // case wait for that action to process first.
18545 return;
18546 }
18547
18548 queueEntry = sourceUpdater.queue[queueIndex];
18549 }
18550
18551 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
18552 //
18553 // The queue pending operation must be set before the action is performed in the event
18554 // that the action results in a synchronous event that is acted upon. For instance, if
18555 // an exception is thrown that can be handled, it's possible that new actions will be
18556 // appended to an empty queue and immediately executed, but would not have the correct
18557 // pending information if this property was set after the action was performed.
18558
18559 sourceUpdater.queuePending[type] = queueEntry;
18560 queueEntry.action(type, sourceUpdater);
18561
18562 if (!queueEntry.doneFn) {
18563 // synchronous operation, process next entry
18564 sourceUpdater.queuePending[type] = null;
18565 shiftQueue(type, sourceUpdater);
18566 return;
18567 }
18568};
18569
18570var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
18571 var buffer = sourceUpdater[type + "Buffer"];
18572 var titleType = toTitleCase(type);
18573
18574 if (!buffer) {
18575 return;
18576 }
18577
18578 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18579 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18580 sourceUpdater.codecs[type] = null;
18581 sourceUpdater[type + "Buffer"] = null;
18582};
18583
18584var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
18585 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
18586};
18587
18588var actions = {
18589 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
18590 return function (type, sourceUpdater) {
18591 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18592 // or the media source does not contain this source buffer.
18593
18594 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18595 return;
18596 }
18597
18598 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
18599
18600 try {
18601 sourceBuffer.appendBuffer(bytes);
18602 } catch (e) {
18603 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
18604 sourceUpdater.queuePending[type] = null;
18605 onError(e);
18606 }
18607 };
18608 },
18609 remove: function remove(start, end) {
18610 return function (type, sourceUpdater) {
18611 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18612 // or the media source does not contain this source buffer.
18613
18614 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18615 return;
18616 }
18617
18618 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
18619
18620 try {
18621 sourceBuffer.remove(start, end);
18622 } catch (e) {
18623 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
18624 }
18625 };
18626 },
18627 timestampOffset: function timestampOffset(offset) {
18628 return function (type, sourceUpdater) {
18629 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18630 // or the media source does not contain this source buffer.
18631
18632 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18633 return;
18634 }
18635
18636 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
18637 sourceBuffer.timestampOffset = offset;
18638 };
18639 },
18640 callback: function callback(_callback) {
18641 return function (type, sourceUpdater) {
18642 _callback();
18643 };
18644 },
18645 endOfStream: function endOfStream(error) {
18646 return function (sourceUpdater) {
18647 if (sourceUpdater.mediaSource.readyState !== 'open') {
18648 return;
18649 }
18650
18651 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
18652
18653 try {
18654 sourceUpdater.mediaSource.endOfStream(error);
18655 } catch (e) {
18656 videojs.log.warn('Failed to call media source endOfStream', e);
18657 }
18658 };
18659 },
18660 duration: function duration(_duration) {
18661 return function (sourceUpdater) {
18662 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
18663
18664 try {
18665 sourceUpdater.mediaSource.duration = _duration;
18666 } catch (e) {
18667 videojs.log.warn('Failed to set media source duration', e);
18668 }
18669 };
18670 },
18671 abort: function abort() {
18672 return function (type, sourceUpdater) {
18673 if (sourceUpdater.mediaSource.readyState !== 'open') {
18674 return;
18675 }
18676
18677 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18678 // or the media source does not contain this source buffer.
18679
18680 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18681 return;
18682 }
18683
18684 sourceUpdater.logger_("calling abort on " + type + "Buffer");
18685
18686 try {
18687 sourceBuffer.abort();
18688 } catch (e) {
18689 videojs.log.warn("Failed to abort on " + type + "Buffer", e);
18690 }
18691 };
18692 },
18693 addSourceBuffer: function addSourceBuffer(type, codec) {
18694 return function (sourceUpdater) {
18695 var titleType = toTitleCase(type);
18696 var mime = getMimeForCodec(codec);
18697 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
18698 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
18699 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18700 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18701 sourceUpdater.codecs[type] = codec;
18702 sourceUpdater[type + "Buffer"] = sourceBuffer;
18703 };
18704 },
18705 removeSourceBuffer: function removeSourceBuffer(type) {
18706 return function (sourceUpdater) {
18707 var sourceBuffer = sourceUpdater[type + "Buffer"];
18708 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
18709 // or the media source does not contain this source buffer.
18710
18711 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18712 return;
18713 }
18714
18715 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
18716
18717 try {
18718 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
18719 } catch (e) {
18720 videojs.log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
18721 }
18722 };
18723 },
18724 changeType: function changeType(codec) {
18725 return function (type, sourceUpdater) {
18726 var sourceBuffer = sourceUpdater[type + "Buffer"];
18727 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
18728 // or the media source does not contain this source buffer.
18729
18730 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18731 return;
18732 } // do not update codec if we don't need to.
18733
18734
18735 if (sourceUpdater.codecs[type] === codec) {
18736 return;
18737 }
18738
18739 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
18740 sourceBuffer.changeType(mime);
18741 sourceUpdater.codecs[type] = codec;
18742 };
18743 }
18744};
18745
18746var pushQueue = function pushQueue(_ref) {
18747 var type = _ref.type,
18748 sourceUpdater = _ref.sourceUpdater,
18749 action = _ref.action,
18750 doneFn = _ref.doneFn,
18751 name = _ref.name;
18752 sourceUpdater.queue.push({
18753 type: type,
18754 action: action,
18755 doneFn: doneFn,
18756 name: name
18757 });
18758 shiftQueue(type, sourceUpdater);
18759};
18760
18761var onUpdateend = function onUpdateend(type, sourceUpdater) {
18762 return function (e) {
18763 // Although there should, in theory, be a pending action for any updateend receieved,
18764 // there are some actions that may trigger updateend events without set definitions in
18765 // the w3c spec. For instance, setting the duration on the media source may trigger
18766 // updateend events on source buffers. This does not appear to be in the spec. As such,
18767 // if we encounter an updateend without a corresponding pending action from our queue
18768 // for that source buffer type, process the next action.
18769 if (sourceUpdater.queuePending[type]) {
18770 var doneFn = sourceUpdater.queuePending[type].doneFn;
18771 sourceUpdater.queuePending[type] = null;
18772
18773 if (doneFn) {
18774 // if there's an error, report it
18775 doneFn(sourceUpdater[type + "Error_"]);
18776 }
18777 }
18778
18779 shiftQueue(type, sourceUpdater);
18780 };
18781};
18782/**
18783 * A queue of callbacks to be serialized and applied when a
18784 * MediaSource and its associated SourceBuffers are not in the
18785 * updating state. It is used by the segment loader to update the
18786 * underlying SourceBuffers when new data is loaded, for instance.
18787 *
18788 * @class SourceUpdater
18789 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
18790 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
18791 */
18792
18793
18794var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
18795 _inheritsLoose(SourceUpdater, _videojs$EventTarget);
18796
18797 function SourceUpdater(mediaSource) {
18798 var _this;
18799
18800 _this = _videojs$EventTarget.call(this) || this;
18801 _this.mediaSource = mediaSource;
18802
18803 _this.sourceopenListener_ = function () {
18804 return shiftQueue('mediaSource', _assertThisInitialized(_this));
18805 };
18806
18807 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
18808
18809 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
18810
18811 _this.audioTimestampOffset_ = 0;
18812 _this.videoTimestampOffset_ = 0;
18813 _this.queue = [];
18814 _this.queuePending = {
18815 audio: null,
18816 video: null
18817 };
18818 _this.delayedAudioAppendQueue_ = [];
18819 _this.videoAppendQueued_ = false;
18820 _this.codecs = {};
18821 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized(_this));
18822 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized(_this));
18823
18824 _this.onVideoError_ = function (e) {
18825 // used for debugging
18826 _this.videoError_ = e;
18827 };
18828
18829 _this.onAudioError_ = function (e) {
18830 // used for debugging
18831 _this.audioError_ = e;
18832 };
18833
18834 _this.createdSourceBuffers_ = false;
18835 _this.initializedEme_ = false;
18836 _this.triggeredReady_ = false;
18837 return _this;
18838 }
18839
18840 var _proto = SourceUpdater.prototype;
18841
18842 _proto.initializedEme = function initializedEme() {
18843 this.initializedEme_ = true;
18844 this.triggerReady();
18845 };
18846
18847 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
18848 // if false, likely waiting on one of the segment loaders to get enough data to create
18849 // source buffers
18850 return this.createdSourceBuffers_;
18851 };
18852
18853 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
18854 return this.initializedEme_;
18855 };
18856
18857 _proto.ready = function ready() {
18858 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
18859 };
18860
18861 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
18862 if (this.hasCreatedSourceBuffers()) {
18863 // already created them before
18864 return;
18865 } // the intial addOrChangeSourceBuffers will always be
18866 // two add buffers.
18867
18868
18869 this.addOrChangeSourceBuffers(codecs);
18870 this.createdSourceBuffers_ = true;
18871 this.trigger('createdsourcebuffers');
18872 this.triggerReady();
18873 };
18874
18875 _proto.triggerReady = function triggerReady() {
18876 // only allow ready to be triggered once, this prevents the case
18877 // where:
18878 // 1. we trigger createdsourcebuffers
18879 // 2. ie 11 synchronously initializates eme
18880 // 3. the synchronous initialization causes us to trigger ready
18881 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
18882 if (this.ready() && !this.triggeredReady_) {
18883 this.triggeredReady_ = true;
18884 this.trigger('ready');
18885 }
18886 }
18887 /**
18888 * Add a type of source buffer to the media source.
18889 *
18890 * @param {string} type
18891 * The type of source buffer to add.
18892 *
18893 * @param {string} codec
18894 * The codec to add the source buffer with.
18895 */
18896 ;
18897
18898 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
18899 pushQueue({
18900 type: 'mediaSource',
18901 sourceUpdater: this,
18902 action: actions.addSourceBuffer(type, codec),
18903 name: 'addSourceBuffer'
18904 });
18905 }
18906 /**
18907 * call abort on a source buffer.
18908 *
18909 * @param {string} type
18910 * The type of source buffer to call abort on.
18911 */
18912 ;
18913
18914 _proto.abort = function abort(type) {
18915 pushQueue({
18916 type: type,
18917 sourceUpdater: this,
18918 action: actions.abort(type),
18919 name: 'abort'
18920 });
18921 }
18922 /**
18923 * Call removeSourceBuffer and remove a specific type
18924 * of source buffer on the mediaSource.
18925 *
18926 * @param {string} type
18927 * The type of source buffer to remove.
18928 */
18929 ;
18930
18931 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
18932 if (!this.canRemoveSourceBuffer()) {
18933 videojs.log.error('removeSourceBuffer is not supported!');
18934 return;
18935 }
18936
18937 pushQueue({
18938 type: 'mediaSource',
18939 sourceUpdater: this,
18940 action: actions.removeSourceBuffer(type),
18941 name: 'removeSourceBuffer'
18942 });
18943 }
18944 /**
18945 * Whether or not the removeSourceBuffer function is supported
18946 * on the mediaSource.
18947 *
18948 * @return {boolean}
18949 * if removeSourceBuffer can be called.
18950 */
18951 ;
18952
18953 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
18954 // IE reports that it supports removeSourceBuffer, but often throws
18955 // errors when attempting to use the function. So we report that it
18956 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
18957 // throws errors, so we report that it does not support this as well.
18958 return !videojs.browser.IE_VERSION && !videojs.browser.IS_FIREFOX && window$1.MediaSource && window$1.MediaSource.prototype && typeof window$1.MediaSource.prototype.removeSourceBuffer === 'function';
18959 }
18960 /**
18961 * Whether or not the changeType function is supported
18962 * on our SourceBuffers.
18963 *
18964 * @return {boolean}
18965 * if changeType can be called.
18966 */
18967 ;
18968
18969 SourceUpdater.canChangeType = function canChangeType() {
18970 return window$1.SourceBuffer && window$1.SourceBuffer.prototype && typeof window$1.SourceBuffer.prototype.changeType === 'function';
18971 }
18972 /**
18973 * Whether or not the changeType function is supported
18974 * on our SourceBuffers.
18975 *
18976 * @return {boolean}
18977 * if changeType can be called.
18978 */
18979 ;
18980
18981 _proto.canChangeType = function canChangeType() {
18982 return this.constructor.canChangeType();
18983 }
18984 /**
18985 * Call the changeType function on a source buffer, given the code and type.
18986 *
18987 * @param {string} type
18988 * The type of source buffer to call changeType on.
18989 *
18990 * @param {string} codec
18991 * The codec string to change type with on the source buffer.
18992 */
18993 ;
18994
18995 _proto.changeType = function changeType(type, codec) {
18996 if (!this.canChangeType()) {
18997 videojs.log.error('changeType is not supported!');
18998 return;
18999 }
19000
19001 pushQueue({
19002 type: type,
19003 sourceUpdater: this,
19004 action: actions.changeType(codec),
19005 name: 'changeType'
19006 });
19007 }
19008 /**
19009 * Add source buffers with a codec or, if they are already created,
19010 * call changeType on source buffers using changeType.
19011 *
19012 * @param {Object} codecs
19013 * Codecs to switch to
19014 */
19015 ;
19016
19017 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
19018 var _this2 = this;
19019
19020 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
19021 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
19022 }
19023
19024 Object.keys(codecs).forEach(function (type) {
19025 var codec = codecs[type];
19026
19027 if (!_this2.hasCreatedSourceBuffers()) {
19028 return _this2.addSourceBuffer(type, codec);
19029 }
19030
19031 if (_this2.canChangeType()) {
19032 _this2.changeType(type, codec);
19033 }
19034 });
19035 }
19036 /**
19037 * Queue an update to append an ArrayBuffer.
19038 *
19039 * @param {MediaObject} object containing audioBytes and/or videoBytes
19040 * @param {Function} done the function to call when done
19041 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
19042 */
19043 ;
19044
19045 _proto.appendBuffer = function appendBuffer(options, doneFn) {
19046 var _this3 = this;
19047
19048 var segmentInfo = options.segmentInfo,
19049 type = options.type,
19050 bytes = options.bytes;
19051 this.processedAppend_ = true;
19052
19053 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
19054 this.delayedAudioAppendQueue_.push([options, doneFn]);
19055 this.logger_("delayed audio append of " + bytes.length + " until video append");
19056 return;
19057 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
19058 // not be fired. This means that the queue will be blocked until the next action
19059 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
19060 // these errors by calling the doneFn with the specific error.
19061
19062
19063 var onError = doneFn;
19064 pushQueue({
19065 type: type,
19066 sourceUpdater: this,
19067 action: actions.appendBuffer(bytes, segmentInfo || {
19068 mediaIndex: -1
19069 }, onError),
19070 doneFn: doneFn,
19071 name: 'appendBuffer'
19072 });
19073
19074 if (type === 'video') {
19075 this.videoAppendQueued_ = true;
19076
19077 if (!this.delayedAudioAppendQueue_.length) {
19078 return;
19079 }
19080
19081 var queue = this.delayedAudioAppendQueue_.slice();
19082 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
19083 this.delayedAudioAppendQueue_.length = 0;
19084 queue.forEach(function (que) {
19085 _this3.appendBuffer.apply(_this3, que);
19086 });
19087 }
19088 }
19089 /**
19090 * Get the audio buffer's buffered timerange.
19091 *
19092 * @return {TimeRange}
19093 * The audio buffer's buffered time range
19094 */
19095 ;
19096
19097 _proto.audioBuffered = function audioBuffered() {
19098 // no media source/source buffer or it isn't in the media sources
19099 // source buffer list
19100 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
19101 return videojs.createTimeRange();
19102 }
19103
19104 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs.createTimeRange();
19105 }
19106 /**
19107 * Get the video buffer's buffered timerange.
19108 *
19109 * @return {TimeRange}
19110 * The video buffer's buffered time range
19111 */
19112 ;
19113
19114 _proto.videoBuffered = function videoBuffered() {
19115 // no media source/source buffer or it isn't in the media sources
19116 // source buffer list
19117 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
19118 return videojs.createTimeRange();
19119 }
19120
19121 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs.createTimeRange();
19122 }
19123 /**
19124 * Get a combined video/audio buffer's buffered timerange.
19125 *
19126 * @return {TimeRange}
19127 * the combined time range
19128 */
19129 ;
19130
19131 _proto.buffered = function buffered() {
19132 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
19133 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
19134
19135 if (audio && !video) {
19136 return this.audioBuffered();
19137 }
19138
19139 if (video && !audio) {
19140 return this.videoBuffered();
19141 }
19142
19143 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
19144 }
19145 /**
19146 * Add a callback to the queue that will set duration on the mediaSource.
19147 *
19148 * @param {number} duration
19149 * The duration to set
19150 *
19151 * @param {Function} [doneFn]
19152 * function to run after duration has been set.
19153 */
19154 ;
19155
19156 _proto.setDuration = function setDuration(duration, doneFn) {
19157 if (doneFn === void 0) {
19158 doneFn = noop;
19159 }
19160
19161 // In order to set the duration on the media source, it's necessary to wait for all
19162 // source buffers to no longer be updating. "If the updating attribute equals true on
19163 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19164 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19165 pushQueue({
19166 type: 'mediaSource',
19167 sourceUpdater: this,
19168 action: actions.duration(duration),
19169 name: 'duration',
19170 doneFn: doneFn
19171 });
19172 }
19173 /**
19174 * Add a mediaSource endOfStream call to the queue
19175 *
19176 * @param {Error} [error]
19177 * Call endOfStream with an error
19178 *
19179 * @param {Function} [doneFn]
19180 * A function that should be called when the
19181 * endOfStream call has finished.
19182 */
19183 ;
19184
19185 _proto.endOfStream = function endOfStream(error, doneFn) {
19186 if (error === void 0) {
19187 error = null;
19188 }
19189
19190 if (doneFn === void 0) {
19191 doneFn = noop;
19192 }
19193
19194 if (typeof error !== 'string') {
19195 error = undefined;
19196 } // In order to set the duration on the media source, it's necessary to wait for all
19197 // source buffers to no longer be updating. "If the updating attribute equals true on
19198 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19199 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19200
19201
19202 pushQueue({
19203 type: 'mediaSource',
19204 sourceUpdater: this,
19205 action: actions.endOfStream(error),
19206 name: 'endOfStream',
19207 doneFn: doneFn
19208 });
19209 }
19210 /**
19211 * Queue an update to remove a time range from the buffer.
19212 *
19213 * @param {number} start where to start the removal
19214 * @param {number} end where to end the removal
19215 * @param {Function} [done=noop] optional callback to be executed when the remove
19216 * operation is complete
19217 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19218 */
19219 ;
19220
19221 _proto.removeAudio = function removeAudio(start, end, done) {
19222 if (done === void 0) {
19223 done = noop;
19224 }
19225
19226 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
19227 done();
19228 return;
19229 }
19230
19231 pushQueue({
19232 type: 'audio',
19233 sourceUpdater: this,
19234 action: actions.remove(start, end),
19235 doneFn: done,
19236 name: 'remove'
19237 });
19238 }
19239 /**
19240 * Queue an update to remove a time range from the buffer.
19241 *
19242 * @param {number} start where to start the removal
19243 * @param {number} end where to end the removal
19244 * @param {Function} [done=noop] optional callback to be executed when the remove
19245 * operation is complete
19246 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19247 */
19248 ;
19249
19250 _proto.removeVideo = function removeVideo(start, end, done) {
19251 if (done === void 0) {
19252 done = noop;
19253 }
19254
19255 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
19256 done();
19257 return;
19258 }
19259
19260 pushQueue({
19261 type: 'video',
19262 sourceUpdater: this,
19263 action: actions.remove(start, end),
19264 doneFn: done,
19265 name: 'remove'
19266 });
19267 }
19268 /**
19269 * Whether the underlying sourceBuffer is updating or not
19270 *
19271 * @return {boolean} the updating status of the SourceBuffer
19272 */
19273 ;
19274
19275 _proto.updating = function updating() {
19276 // the audio/video source buffer is updating
19277 if (_updating('audio', this) || _updating('video', this)) {
19278 return true;
19279 }
19280
19281 return false;
19282 }
19283 /**
19284 * Set/get the timestampoffset on the audio SourceBuffer
19285 *
19286 * @return {number} the timestamp offset
19287 */
19288 ;
19289
19290 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
19291 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
19292 this.audioTimestampOffset_ !== offset) {
19293 pushQueue({
19294 type: 'audio',
19295 sourceUpdater: this,
19296 action: actions.timestampOffset(offset),
19297 name: 'timestampOffset'
19298 });
19299 this.audioTimestampOffset_ = offset;
19300 }
19301
19302 return this.audioTimestampOffset_;
19303 }
19304 /**
19305 * Set/get the timestampoffset on the video SourceBuffer
19306 *
19307 * @return {number} the timestamp offset
19308 */
19309 ;
19310
19311 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
19312 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
19313 this.videoTimestampOffset !== offset) {
19314 pushQueue({
19315 type: 'video',
19316 sourceUpdater: this,
19317 action: actions.timestampOffset(offset),
19318 name: 'timestampOffset'
19319 });
19320 this.videoTimestampOffset_ = offset;
19321 }
19322
19323 return this.videoTimestampOffset_;
19324 }
19325 /**
19326 * Add a function to the queue that will be called
19327 * when it is its turn to run in the audio queue.
19328 *
19329 * @param {Function} callback
19330 * The callback to queue.
19331 */
19332 ;
19333
19334 _proto.audioQueueCallback = function audioQueueCallback(callback) {
19335 if (!this.audioBuffer) {
19336 return;
19337 }
19338
19339 pushQueue({
19340 type: 'audio',
19341 sourceUpdater: this,
19342 action: actions.callback(callback),
19343 name: 'callback'
19344 });
19345 }
19346 /**
19347 * Add a function to the queue that will be called
19348 * when it is its turn to run in the video queue.
19349 *
19350 * @param {Function} callback
19351 * The callback to queue.
19352 */
19353 ;
19354
19355 _proto.videoQueueCallback = function videoQueueCallback(callback) {
19356 if (!this.videoBuffer) {
19357 return;
19358 }
19359
19360 pushQueue({
19361 type: 'video',
19362 sourceUpdater: this,
19363 action: actions.callback(callback),
19364 name: 'callback'
19365 });
19366 }
19367 /**
19368 * dispose of the source updater and the underlying sourceBuffer
19369 */
19370 ;
19371
19372 _proto.dispose = function dispose() {
19373 var _this4 = this;
19374
19375 this.trigger('dispose');
19376 bufferTypes.forEach(function (type) {
19377 _this4.abort(type);
19378
19379 if (_this4.canRemoveSourceBuffer()) {
19380 _this4.removeSourceBuffer(type);
19381 } else {
19382 _this4[type + "QueueCallback"](function () {
19383 return cleanupBuffer(type, _this4);
19384 });
19385 }
19386 });
19387 this.videoAppendQueued_ = false;
19388 this.delayedAudioAppendQueue_.length = 0;
19389
19390 if (this.sourceopenListener_) {
19391 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
19392 }
19393
19394 this.off();
19395 };
19396
19397 return SourceUpdater;
19398}(videojs.EventTarget);
19399
19400var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
19401 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
19402};
19403
19404var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
19405 return char.charCodeAt(0);
19406}));
19407/**
19408 * An object that manages segment loading and appending.
19409 *
19410 * @class VTTSegmentLoader
19411 * @param {Object} options required and optional options
19412 * @extends videojs.EventTarget
19413 */
19414
19415var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
19416 _inheritsLoose(VTTSegmentLoader, _SegmentLoader);
19417
19418 function VTTSegmentLoader(settings, options) {
19419 var _this;
19420
19421 if (options === void 0) {
19422 options = {};
19423 }
19424
19425 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
19426 // however, VTTSegmentLoader has no need of a media source, so delete the reference
19427
19428 _this.mediaSource_ = null;
19429 _this.subtitlesTrack_ = null;
19430 _this.loaderType_ = 'subtitle';
19431 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
19432 // the sync controller leads to improper behavior.
19433
19434 _this.shouldSaveSegmentTimingInfo_ = false;
19435 return _this;
19436 }
19437
19438 var _proto = VTTSegmentLoader.prototype;
19439
19440 _proto.createTransmuxer_ = function createTransmuxer_() {
19441 // don't need to transmux any subtitles
19442 return null;
19443 }
19444 /**
19445 * Indicates which time ranges are buffered
19446 *
19447 * @return {TimeRange}
19448 * TimeRange object representing the current buffered ranges
19449 */
19450 ;
19451
19452 _proto.buffered_ = function buffered_() {
19453 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
19454 return videojs.createTimeRanges();
19455 }
19456
19457 var cues = this.subtitlesTrack_.cues;
19458 var start = cues[0].startTime;
19459 var end = cues[cues.length - 1].startTime;
19460 return videojs.createTimeRanges([[start, end]]);
19461 }
19462 /**
19463 * Gets and sets init segment for the provided map
19464 *
19465 * @param {Object} map
19466 * The map object representing the init segment to get or set
19467 * @param {boolean=} set
19468 * If true, the init segment for the provided map should be saved
19469 * @return {Object}
19470 * map object for desired init segment
19471 */
19472 ;
19473
19474 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
19475 if (set === void 0) {
19476 set = false;
19477 }
19478
19479 if (!map) {
19480 return null;
19481 }
19482
19483 var id = initSegmentId(map);
19484 var storedMap = this.initSegments_[id];
19485
19486 if (set && !storedMap && map.bytes) {
19487 // append WebVTT line terminators to the media initialization segment if it exists
19488 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
19489 // requires two or more WebVTT line terminators between the WebVTT header and the
19490 // rest of the file
19491 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
19492 var combinedSegment = new Uint8Array(combinedByteLength);
19493 combinedSegment.set(map.bytes);
19494 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
19495 this.initSegments_[id] = storedMap = {
19496 resolvedUri: map.resolvedUri,
19497 byterange: map.byterange,
19498 bytes: combinedSegment
19499 };
19500 }
19501
19502 return storedMap || map;
19503 }
19504 /**
19505 * Returns true if all configuration required for loading is present, otherwise false.
19506 *
19507 * @return {boolean} True if the all configuration is ready for loading
19508 * @private
19509 */
19510 ;
19511
19512 _proto.couldBeginLoading_ = function couldBeginLoading_() {
19513 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
19514 }
19515 /**
19516 * Once all the starting parameters have been specified, begin
19517 * operation. This method should only be invoked from the INIT
19518 * state.
19519 *
19520 * @private
19521 */
19522 ;
19523
19524 _proto.init_ = function init_() {
19525 this.state = 'READY';
19526 this.resetEverything();
19527 return this.monitorBuffer_();
19528 }
19529 /**
19530 * Set a subtitle track on the segment loader to add subtitles to
19531 *
19532 * @param {TextTrack=} track
19533 * The text track to add loaded subtitles to
19534 * @return {TextTrack}
19535 * Returns the subtitles track
19536 */
19537 ;
19538
19539 _proto.track = function track(_track) {
19540 if (typeof _track === 'undefined') {
19541 return this.subtitlesTrack_;
19542 }
19543
19544 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
19545 // buffering now
19546
19547 if (this.state === 'INIT' && this.couldBeginLoading_()) {
19548 this.init_();
19549 }
19550
19551 return this.subtitlesTrack_;
19552 }
19553 /**
19554 * Remove any data in the source buffer between start and end times
19555 *
19556 * @param {number} start - the start time of the region to remove from the buffer
19557 * @param {number} end - the end time of the region to remove from the buffer
19558 */
19559 ;
19560
19561 _proto.remove = function remove(start, end) {
19562 removeCuesFromTrack(start, end, this.subtitlesTrack_);
19563 }
19564 /**
19565 * fill the buffer with segements unless the sourceBuffers are
19566 * currently updating
19567 *
19568 * Note: this function should only ever be called by monitorBuffer_
19569 * and never directly
19570 *
19571 * @private
19572 */
19573 ;
19574
19575 _proto.fillBuffer_ = function fillBuffer_() {
19576 var _this2 = this;
19577
19578 // see if we need to begin loading immediately
19579 var segmentInfo = this.chooseNextRequest_();
19580
19581 if (!segmentInfo) {
19582 return;
19583 }
19584
19585 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
19586 // We don't have the timestamp offset that we need to sync subtitles.
19587 // Rerun on a timestamp offset or user interaction.
19588 var checkTimestampOffset = function checkTimestampOffset() {
19589 _this2.state = 'READY';
19590
19591 if (!_this2.paused()) {
19592 // if not paused, queue a buffer check as soon as possible
19593 _this2.monitorBuffer_();
19594 }
19595 };
19596
19597 this.syncController_.one('timestampoffset', checkTimestampOffset);
19598 this.state = 'WAITING_ON_TIMELINE';
19599 return;
19600 }
19601
19602 this.loadSegment_(segmentInfo);
19603 } // never set a timestamp offset for vtt segments.
19604 ;
19605
19606 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
19607 return null;
19608 };
19609
19610 _proto.chooseNextRequest_ = function chooseNextRequest_() {
19611 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
19612 }
19613 /**
19614 * Prevents the segment loader from requesting segments we know contain no subtitles
19615 * by walking forward until we find the next segment that we don't know whether it is
19616 * empty or not.
19617 *
19618 * @param {Object} segmentInfo
19619 * a segment info object that describes the current segment
19620 * @return {Object}
19621 * a segment info object that describes the current segment
19622 */
19623 ;
19624
19625 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
19626 while (segmentInfo && segmentInfo.segment.empty) {
19627 // stop at the last possible segmentInfo
19628 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
19629 segmentInfo = null;
19630 break;
19631 }
19632
19633 segmentInfo = this.generateSegmentInfo_({
19634 playlist: segmentInfo.playlist,
19635 mediaIndex: segmentInfo.mediaIndex + 1,
19636 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
19637 isSyncRequest: segmentInfo.isSyncRequest
19638 });
19639 }
19640
19641 return segmentInfo;
19642 };
19643
19644 _proto.stopForError = function stopForError(error) {
19645 this.error(error);
19646 this.state = 'READY';
19647 this.pause();
19648 this.trigger('error');
19649 }
19650 /**
19651 * append a decrypted segement to the SourceBuffer through a SourceUpdater
19652 *
19653 * @private
19654 */
19655 ;
19656
19657 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
19658 var _this3 = this;
19659
19660 if (!this.subtitlesTrack_) {
19661 this.state = 'READY';
19662 return;
19663 }
19664
19665 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
19666
19667 if (!this.pendingSegment_) {
19668 this.state = 'READY';
19669 this.mediaRequestsAborted += 1;
19670 return;
19671 }
19672
19673 if (error) {
19674 if (error.code === REQUEST_ERRORS.TIMEOUT) {
19675 this.handleTimeout_();
19676 }
19677
19678 if (error.code === REQUEST_ERRORS.ABORTED) {
19679 this.mediaRequestsAborted += 1;
19680 } else {
19681 this.mediaRequestsErrored += 1;
19682 }
19683
19684 this.stopForError(error);
19685 return;
19686 }
19687
19688 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
19689 // maintain functionality between segment loaders
19690
19691 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
19692 this.state = 'APPENDING'; // used for tests
19693
19694 this.trigger('appending');
19695 var segment = segmentInfo.segment;
19696
19697 if (segment.map) {
19698 segment.map.bytes = simpleSegment.map.bytes;
19699 }
19700
19701 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
19702
19703 if (typeof window$1.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
19704 var loadHandler;
19705
19706 var errorHandler = function errorHandler() {
19707 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
19708
19709 _this3.stopForError({
19710 message: 'Error loading vtt.js'
19711 });
19712
19713 return;
19714 };
19715
19716 loadHandler = function loadHandler() {
19717 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
19718
19719 _this3.segmentRequestFinished_(error, simpleSegment, result);
19720 };
19721
19722 this.state = 'WAITING_ON_VTTJS';
19723 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
19724 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
19725 return;
19726 }
19727
19728 segment.requested = true;
19729
19730 try {
19731 this.parseVTTCues_(segmentInfo);
19732 } catch (e) {
19733 this.stopForError({
19734 message: e.message
19735 });
19736 return;
19737 }
19738
19739 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
19740
19741 if (segmentInfo.cues.length) {
19742 segmentInfo.timingInfo = {
19743 start: segmentInfo.cues[0].startTime,
19744 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
19745 };
19746 } else {
19747 segmentInfo.timingInfo = {
19748 start: segmentInfo.startOfSegment,
19749 end: segmentInfo.startOfSegment + segmentInfo.duration
19750 };
19751 }
19752
19753 if (segmentInfo.isSyncRequest) {
19754 this.trigger('syncinfoupdate');
19755 this.pendingSegment_ = null;
19756 this.state = 'READY';
19757 return;
19758 }
19759
19760 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
19761 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
19762 // the subtitle track
19763
19764 segmentInfo.cues.forEach(function (cue) {
19765 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window$1.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
19766 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
19767 // cues to have identical time-intervals, but if the text is also identical
19768 // we can safely assume it is a duplicate that can be removed (ex. when a cue
19769 // "overlaps" VTT segments)
19770
19771 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
19772 this.handleAppendsDone_();
19773 };
19774
19775 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
19776 // that we do not support here.
19777 };
19778
19779 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
19780 }
19781 /**
19782 * Uses the WebVTT parser to parse the segment response
19783 *
19784 * @param {Object} segmentInfo
19785 * a segment info object that describes the current segment
19786 * @private
19787 */
19788 ;
19789
19790 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
19791 var decoder;
19792 var decodeBytesToString = false;
19793
19794 if (typeof window$1.TextDecoder === 'function') {
19795 decoder = new window$1.TextDecoder('utf8');
19796 } else {
19797 decoder = window$1.WebVTT.StringDecoder();
19798 decodeBytesToString = true;
19799 }
19800
19801 var parser = new window$1.WebVTT.Parser(window$1, window$1.vttjs, decoder);
19802 segmentInfo.cues = [];
19803 segmentInfo.timestampmap = {
19804 MPEGTS: 0,
19805 LOCAL: 0
19806 };
19807 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
19808
19809 parser.ontimestampmap = function (map) {
19810 segmentInfo.timestampmap = map;
19811 };
19812
19813 parser.onparsingerror = function (error) {
19814 videojs.log.warn('Error encountered when parsing cues: ' + error.message);
19815 };
19816
19817 if (segmentInfo.segment.map) {
19818 var mapData = segmentInfo.segment.map.bytes;
19819
19820 if (decodeBytesToString) {
19821 mapData = uint8ToUtf8(mapData);
19822 }
19823
19824 parser.parse(mapData);
19825 }
19826
19827 var segmentData = segmentInfo.bytes;
19828
19829 if (decodeBytesToString) {
19830 segmentData = uint8ToUtf8(segmentData);
19831 }
19832
19833 parser.parse(segmentData);
19834 parser.flush();
19835 }
19836 /**
19837 * Updates the start and end times of any cues parsed by the WebVTT parser using
19838 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
19839 * from the SyncController
19840 *
19841 * @param {Object} segmentInfo
19842 * a segment info object that describes the current segment
19843 * @param {Object} mappingObj
19844 * object containing a mapping from TS to media time
19845 * @param {Object} playlist
19846 * the playlist object containing the segment
19847 * @private
19848 */
19849 ;
19850
19851 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
19852 var segment = segmentInfo.segment;
19853
19854 if (!mappingObj) {
19855 // If the sync controller does not have a mapping of TS to Media Time for the
19856 // timeline, then we don't have enough information to update the cue
19857 // start/end times
19858 return;
19859 }
19860
19861 if (!segmentInfo.cues.length) {
19862 // If there are no cues, we also do not have enough information to figure out
19863 // segment timing. Mark that the segment contains no cues so we don't re-request
19864 // an empty segment.
19865 segment.empty = true;
19866 return;
19867 }
19868
19869 var timestampmap = segmentInfo.timestampmap;
19870 var diff = timestampmap.MPEGTS / ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
19871 segmentInfo.cues.forEach(function (cue) {
19872 // First convert cue time to TS time using the timestamp-map provided within the vtt
19873 cue.startTime += diff;
19874 cue.endTime += diff;
19875 });
19876
19877 if (!playlist.syncInfo) {
19878 var firstStart = segmentInfo.cues[0].startTime;
19879 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
19880 playlist.syncInfo = {
19881 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
19882 time: Math.min(firstStart, lastStart - segment.duration)
19883 };
19884 }
19885 };
19886
19887 return VTTSegmentLoader;
19888}(SegmentLoader);
19889
19890/**
19891 * @file ad-cue-tags.js
19892 */
19893/**
19894 * Searches for an ad cue that overlaps with the given mediaTime
19895 *
19896 * @param {Object} track
19897 * the track to find the cue for
19898 *
19899 * @param {number} mediaTime
19900 * the time to find the cue at
19901 *
19902 * @return {Object|null}
19903 * the found cue or null
19904 */
19905
19906var findAdCue = function findAdCue(track, mediaTime) {
19907 var cues = track.cues;
19908
19909 for (var i = 0; i < cues.length; i++) {
19910 var cue = cues[i];
19911
19912 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
19913 return cue;
19914 }
19915 }
19916
19917 return null;
19918};
19919var updateAdCues = function updateAdCues(media, track, offset) {
19920 if (offset === void 0) {
19921 offset = 0;
19922 }
19923
19924 if (!media.segments) {
19925 return;
19926 }
19927
19928 var mediaTime = offset;
19929 var cue;
19930
19931 for (var i = 0; i < media.segments.length; i++) {
19932 var segment = media.segments[i];
19933
19934 if (!cue) {
19935 // Since the cues will span for at least the segment duration, adding a fudge
19936 // factor of half segment duration will prevent duplicate cues from being
19937 // created when timing info is not exact (e.g. cue start time initialized
19938 // at 10.006677, but next call mediaTime is 10.003332 )
19939 cue = findAdCue(track, mediaTime + segment.duration / 2);
19940 }
19941
19942 if (cue) {
19943 if ('cueIn' in segment) {
19944 // Found a CUE-IN so end the cue
19945 cue.endTime = mediaTime;
19946 cue.adEndTime = mediaTime;
19947 mediaTime += segment.duration;
19948 cue = null;
19949 continue;
19950 }
19951
19952 if (mediaTime < cue.endTime) {
19953 // Already processed this mediaTime for this cue
19954 mediaTime += segment.duration;
19955 continue;
19956 } // otherwise extend cue until a CUE-IN is found
19957
19958
19959 cue.endTime += segment.duration;
19960 } else {
19961 if ('cueOut' in segment) {
19962 cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
19963 cue.adStartTime = mediaTime; // Assumes tag format to be
19964 // #EXT-X-CUE-OUT:30
19965
19966 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
19967 track.addCue(cue);
19968 }
19969
19970 if ('cueOutCont' in segment) {
19971 // Entered into the middle of an ad cue
19972 // Assumes tag formate to be
19973 // #EXT-X-CUE-OUT-CONT:10/30
19974 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
19975 adOffset = _segment$cueOutCont$s[0],
19976 adTotal = _segment$cueOutCont$s[1];
19977
19978 cue = new window$1.VTTCue(mediaTime, mediaTime + segment.duration, '');
19979 cue.adStartTime = mediaTime - adOffset;
19980 cue.adEndTime = cue.adStartTime + adTotal;
19981 track.addCue(cue);
19982 }
19983 }
19984
19985 mediaTime += segment.duration;
19986 }
19987};
19988
19989// synchronize expired playlist segments.
19990// the max media sequence diff is 48 hours of live stream
19991// content with two second segments. Anything larger than that
19992// will likely be invalid.
19993
19994var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
19995var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
19996// the equivalence display-time 0 === segment-index 0
19997{
19998 name: 'VOD',
19999 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20000 if (duration !== Infinity) {
20001 var syncPoint = {
20002 time: 0,
20003 segmentIndex: 0,
20004 partIndex: null
20005 };
20006 return syncPoint;
20007 }
20008
20009 return null;
20010 }
20011}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
20012{
20013 name: 'ProgramDateTime',
20014 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20015 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
20016 return null;
20017 }
20018
20019 var syncPoint = null;
20020 var lastDistance = null;
20021 var partsAndSegments = getPartsAndSegments(playlist);
20022 currentTime = currentTime || 0;
20023
20024 for (var i = 0; i < partsAndSegments.length; i++) {
20025 // start from the end and loop backwards for live
20026 // or start from the front and loop forwards for non-live
20027 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
20028 var partAndSegment = partsAndSegments[index];
20029 var segment = partAndSegment.segment;
20030 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
20031
20032 if (!datetimeMapping || !segment.dateTimeObject) {
20033 continue;
20034 }
20035
20036 var segmentTime = segment.dateTimeObject.getTime() / 1000;
20037 var start = segmentTime + datetimeMapping; // take part duration into account.
20038
20039 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
20040 for (var z = 0; z < partAndSegment.partIndex; z++) {
20041 start += segment.parts[z].duration;
20042 }
20043 }
20044
20045 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
20046 // currentTime and can stop looking for better candidates
20047
20048 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
20049 break;
20050 }
20051
20052 lastDistance = distance;
20053 syncPoint = {
20054 time: start,
20055 segmentIndex: partAndSegment.segmentIndex,
20056 partIndex: partAndSegment.partIndex
20057 };
20058 }
20059
20060 return syncPoint;
20061 }
20062}, // Stategy "Segment": We have a known time mapping for a timeline and a
20063// segment in the current timeline with timing data
20064{
20065 name: 'Segment',
20066 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20067 var syncPoint = null;
20068 var lastDistance = null;
20069 currentTime = currentTime || 0;
20070 var partsAndSegments = getPartsAndSegments(playlist);
20071
20072 for (var i = 0; i < partsAndSegments.length; i++) {
20073 // start from the end and loop backwards for live
20074 // or start from the front and loop forwards for non-live
20075 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
20076 var partAndSegment = partsAndSegments[index];
20077 var segment = partAndSegment.segment;
20078 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
20079
20080 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
20081 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
20082 // currentTime and can stop looking for better candidates
20083
20084 if (lastDistance !== null && lastDistance < distance) {
20085 break;
20086 }
20087
20088 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
20089 lastDistance = distance;
20090 syncPoint = {
20091 time: start,
20092 segmentIndex: partAndSegment.segmentIndex,
20093 partIndex: partAndSegment.partIndex
20094 };
20095 }
20096 }
20097 }
20098
20099 return syncPoint;
20100 }
20101}, // Stategy "Discontinuity": We have a discontinuity with a known
20102// display-time
20103{
20104 name: 'Discontinuity',
20105 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20106 var syncPoint = null;
20107 currentTime = currentTime || 0;
20108
20109 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20110 var lastDistance = null;
20111
20112 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20113 var segmentIndex = playlist.discontinuityStarts[i];
20114 var discontinuity = playlist.discontinuitySequence + i + 1;
20115 var discontinuitySync = syncController.discontinuities[discontinuity];
20116
20117 if (discontinuitySync) {
20118 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
20119 // currentTime and can stop looking for better candidates
20120
20121 if (lastDistance !== null && lastDistance < distance) {
20122 break;
20123 }
20124
20125 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
20126 lastDistance = distance;
20127 syncPoint = {
20128 time: discontinuitySync.time,
20129 segmentIndex: segmentIndex,
20130 partIndex: null
20131 };
20132 }
20133 }
20134 }
20135 }
20136
20137 return syncPoint;
20138 }
20139}, // Stategy "Playlist": We have a playlist with a known mapping of
20140// segment index to display time
20141{
20142 name: 'Playlist',
20143 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
20144 if (playlist.syncInfo) {
20145 var syncPoint = {
20146 time: playlist.syncInfo.time,
20147 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
20148 partIndex: null
20149 };
20150 return syncPoint;
20151 }
20152
20153 return null;
20154 }
20155}];
20156
20157var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
20158 _inheritsLoose(SyncController, _videojs$EventTarget);
20159
20160 function SyncController(options) {
20161 var _this;
20162
20163 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
20164
20165 _this.timelines = [];
20166 _this.discontinuities = [];
20167 _this.timelineToDatetimeMappings = {};
20168 _this.logger_ = logger('SyncController');
20169 return _this;
20170 }
20171 /**
20172 * Find a sync-point for the playlist specified
20173 *
20174 * A sync-point is defined as a known mapping from display-time to
20175 * a segment-index in the current playlist.
20176 *
20177 * @param {Playlist} playlist
20178 * The playlist that needs a sync-point
20179 * @param {number} duration
20180 * Duration of the MediaSource (Infinite if playing a live source)
20181 * @param {number} currentTimeline
20182 * The last timeline from which a segment was loaded
20183 * @return {Object}
20184 * A sync-point object
20185 */
20186
20187
20188 var _proto = SyncController.prototype;
20189
20190 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
20191 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
20192
20193 if (!syncPoints.length) {
20194 // Signal that we need to attempt to get a sync-point manually
20195 // by fetching a segment in the playlist and constructing
20196 // a sync-point from that information
20197 return null;
20198 } // Now find the sync-point that is closest to the currentTime because
20199 // that should result in the most accurate guess about which segment
20200 // to fetch
20201
20202
20203 return this.selectSyncPoint_(syncPoints, {
20204 key: 'time',
20205 value: currentTime
20206 });
20207 }
20208 /**
20209 * Calculate the amount of time that has expired off the playlist during playback
20210 *
20211 * @param {Playlist} playlist
20212 * Playlist object to calculate expired from
20213 * @param {number} duration
20214 * Duration of the MediaSource (Infinity if playling a live source)
20215 * @return {number|null}
20216 * The amount of time that has expired off the playlist during playback. Null
20217 * if no sync-points for the playlist can be found.
20218 */
20219 ;
20220
20221 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
20222 if (!playlist || !playlist.segments) {
20223 return null;
20224 }
20225
20226 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
20227
20228 if (!syncPoints.length) {
20229 return null;
20230 }
20231
20232 var syncPoint = this.selectSyncPoint_(syncPoints, {
20233 key: 'segmentIndex',
20234 value: 0
20235 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
20236 // duration from index 0 to syncPoint.segmentIndex instead of adding.
20237
20238 if (syncPoint.segmentIndex > 0) {
20239 syncPoint.time *= -1;
20240 }
20241
20242 return Math.abs(syncPoint.time + sumDurations({
20243 defaultDuration: playlist.targetDuration,
20244 durationList: playlist.segments,
20245 startIndex: syncPoint.segmentIndex,
20246 endIndex: 0
20247 }));
20248 }
20249 /**
20250 * Runs each sync-point strategy and returns a list of sync-points returned by the
20251 * strategies
20252 *
20253 * @private
20254 * @param {Playlist} playlist
20255 * The playlist that needs a sync-point
20256 * @param {number} duration
20257 * Duration of the MediaSource (Infinity if playing a live source)
20258 * @param {number} currentTimeline
20259 * The last timeline from which a segment was loaded
20260 * @return {Array}
20261 * A list of sync-point objects
20262 */
20263 ;
20264
20265 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
20266 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
20267
20268 for (var i = 0; i < syncPointStrategies.length; i++) {
20269 var strategy = syncPointStrategies[i];
20270 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
20271
20272 if (syncPoint) {
20273 syncPoint.strategy = strategy.name;
20274 syncPoints.push({
20275 strategy: strategy.name,
20276 syncPoint: syncPoint
20277 });
20278 }
20279 }
20280
20281 return syncPoints;
20282 }
20283 /**
20284 * Selects the sync-point nearest the specified target
20285 *
20286 * @private
20287 * @param {Array} syncPoints
20288 * List of sync-points to select from
20289 * @param {Object} target
20290 * Object specifying the property and value we are targeting
20291 * @param {string} target.key
20292 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
20293 * @param {number} target.value
20294 * The value to target for the specified key.
20295 * @return {Object}
20296 * The sync-point nearest the target
20297 */
20298 ;
20299
20300 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
20301 var bestSyncPoint = syncPoints[0].syncPoint;
20302 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
20303 var bestStrategy = syncPoints[0].strategy;
20304
20305 for (var i = 1; i < syncPoints.length; i++) {
20306 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
20307
20308 if (newDistance < bestDistance) {
20309 bestDistance = newDistance;
20310 bestSyncPoint = syncPoints[i].syncPoint;
20311 bestStrategy = syncPoints[i].strategy;
20312 }
20313 }
20314
20315 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
20316 return bestSyncPoint;
20317 }
20318 /**
20319 * Save any meta-data present on the segments when segments leave
20320 * the live window to the playlist to allow for synchronization at the
20321 * playlist level later.
20322 *
20323 * @param {Playlist} oldPlaylist - The previous active playlist
20324 * @param {Playlist} newPlaylist - The updated and most current playlist
20325 */
20326 ;
20327
20328 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
20329 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
20330
20331 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
20332 videojs.log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
20333 return;
20334 } // When a segment expires from the playlist and it has a start time
20335 // save that information as a possible sync-point reference in future
20336
20337
20338 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
20339 var lastRemovedSegment = oldPlaylist.segments[i];
20340
20341 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
20342 newPlaylist.syncInfo = {
20343 mediaSequence: oldPlaylist.mediaSequence + i,
20344 time: lastRemovedSegment.start
20345 };
20346 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
20347 this.trigger('syncinfoupdate');
20348 break;
20349 }
20350 }
20351 }
20352 /**
20353 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
20354 * before segments start to load.
20355 *
20356 * @param {Playlist} playlist - The currently active playlist
20357 */
20358 ;
20359
20360 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
20361 // It's possible for the playlist to be updated before playback starts, meaning time
20362 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
20363 // crossed, then the old time zero mapping (for the prior timeline) would be retained
20364 // unless the mappings are cleared.
20365 this.timelineToDatetimeMappings = {};
20366
20367 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
20368 var firstSegment = playlist.segments[0];
20369 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
20370 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
20371 }
20372 }
20373 /**
20374 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
20375 * based on the latest timing information.
20376 *
20377 * @param {Object} options
20378 * Options object
20379 * @param {SegmentInfo} options.segmentInfo
20380 * The current active request information
20381 * @param {boolean} options.shouldSaveTimelineMapping
20382 * If there's a timeline change, determines if the timeline mapping should be
20383 * saved for timeline mapping and program date time mappings.
20384 */
20385 ;
20386
20387 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
20388 var segmentInfo = _ref.segmentInfo,
20389 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
20390 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
20391 var segment = segmentInfo.segment;
20392
20393 if (didCalculateSegmentTimeMapping) {
20394 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
20395 // now with segment timing information
20396
20397 if (!segmentInfo.playlist.syncInfo) {
20398 segmentInfo.playlist.syncInfo = {
20399 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
20400 time: segment.start
20401 };
20402 }
20403 }
20404
20405 var dateTime = segment.dateTimeObject;
20406
20407 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
20408 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
20409 }
20410 };
20411
20412 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
20413 if (typeof this.timelines[timeline] === 'undefined') {
20414 return null;
20415 }
20416
20417 return this.timelines[timeline].time;
20418 };
20419
20420 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
20421 if (typeof this.timelines[timeline] === 'undefined') {
20422 return null;
20423 }
20424
20425 return this.timelines[timeline].mapping;
20426 }
20427 /**
20428 * Use the "media time" for a segment to generate a mapping to "display time" and
20429 * save that display time to the segment.
20430 *
20431 * @private
20432 * @param {SegmentInfo} segmentInfo
20433 * The current active request information
20434 * @param {Object} timingInfo
20435 * The start and end time of the current segment in "media time"
20436 * @param {boolean} shouldSaveTimelineMapping
20437 * If there's a timeline change, determines if the timeline mapping should be
20438 * saved in timelines.
20439 * @return {boolean}
20440 * Returns false if segment time mapping could not be calculated
20441 */
20442 ;
20443
20444 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
20445 // TODO: remove side effects
20446 var segment = segmentInfo.segment;
20447 var part = segmentInfo.part;
20448 var mappingObj = this.timelines[segmentInfo.timeline];
20449 var start;
20450 var end;
20451
20452 if (typeof segmentInfo.timestampOffset === 'number') {
20453 mappingObj = {
20454 time: segmentInfo.startOfSegment,
20455 mapping: segmentInfo.startOfSegment - timingInfo.start
20456 };
20457
20458 if (shouldSaveTimelineMapping) {
20459 this.timelines[segmentInfo.timeline] = mappingObj;
20460 this.trigger('timestampoffset');
20461 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
20462 }
20463
20464 start = segmentInfo.startOfSegment;
20465 end = timingInfo.end + mappingObj.mapping;
20466 } else if (mappingObj) {
20467 start = timingInfo.start + mappingObj.mapping;
20468 end = timingInfo.end + mappingObj.mapping;
20469 } else {
20470 return false;
20471 }
20472
20473 if (part) {
20474 part.start = start;
20475 part.end = end;
20476 } // If we don't have a segment start yet or the start value we got
20477 // is less than our current segment.start value, save a new start value.
20478 // We have to do this because parts will have segment timing info saved
20479 // multiple times and we want segment start to be the earliest part start
20480 // value for that segment.
20481
20482
20483 if (!segment.start || start < segment.start) {
20484 segment.start = start;
20485 }
20486
20487 segment.end = end;
20488 return true;
20489 }
20490 /**
20491 * Each time we have discontinuity in the playlist, attempt to calculate the location
20492 * in display of the start of the discontinuity and save that. We also save an accuracy
20493 * value so that we save values with the most accuracy (closest to 0.)
20494 *
20495 * @private
20496 * @param {SegmentInfo} segmentInfo - The current active request information
20497 */
20498 ;
20499
20500 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
20501 var playlist = segmentInfo.playlist;
20502 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
20503 // the start of the range and it's accuracy is 0 (greater accuracy values
20504 // mean more approximation)
20505
20506 if (segment.discontinuity) {
20507 this.discontinuities[segment.timeline] = {
20508 time: segment.start,
20509 accuracy: 0
20510 };
20511 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20512 // Search for future discontinuities that we can provide better timing
20513 // information for and save that information for sync purposes
20514 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20515 var segmentIndex = playlist.discontinuityStarts[i];
20516 var discontinuity = playlist.discontinuitySequence + i + 1;
20517 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
20518 var accuracy = Math.abs(mediaIndexDiff);
20519
20520 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
20521 var time = void 0;
20522
20523 if (mediaIndexDiff < 0) {
20524 time = segment.start - sumDurations({
20525 defaultDuration: playlist.targetDuration,
20526 durationList: playlist.segments,
20527 startIndex: segmentInfo.mediaIndex,
20528 endIndex: segmentIndex
20529 });
20530 } else {
20531 time = segment.end + sumDurations({
20532 defaultDuration: playlist.targetDuration,
20533 durationList: playlist.segments,
20534 startIndex: segmentInfo.mediaIndex + 1,
20535 endIndex: segmentIndex
20536 });
20537 }
20538
20539 this.discontinuities[discontinuity] = {
20540 time: time,
20541 accuracy: accuracy
20542 };
20543 }
20544 }
20545 }
20546 };
20547
20548 _proto.dispose = function dispose() {
20549 this.trigger('dispose');
20550 this.off();
20551 };
20552
20553 return SyncController;
20554}(videojs.EventTarget);
20555
20556/**
20557 * The TimelineChangeController acts as a source for segment loaders to listen for and
20558 * keep track of latest and pending timeline changes. This is useful to ensure proper
20559 * sync, as each loader may need to make a consideration for what timeline the other
20560 * loader is on before making changes which could impact the other loader's media.
20561 *
20562 * @class TimelineChangeController
20563 * @extends videojs.EventTarget
20564 */
20565
20566var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
20567 _inheritsLoose(TimelineChangeController, _videojs$EventTarget);
20568
20569 function TimelineChangeController() {
20570 var _this;
20571
20572 _this = _videojs$EventTarget.call(this) || this;
20573 _this.pendingTimelineChanges_ = {};
20574 _this.lastTimelineChanges_ = {};
20575 return _this;
20576 }
20577
20578 var _proto = TimelineChangeController.prototype;
20579
20580 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
20581 this.pendingTimelineChanges_[type] = null;
20582 this.trigger('pendingtimelinechange');
20583 };
20584
20585 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
20586 var type = _ref.type,
20587 from = _ref.from,
20588 to = _ref.to;
20589
20590 if (typeof from === 'number' && typeof to === 'number') {
20591 this.pendingTimelineChanges_[type] = {
20592 type: type,
20593 from: from,
20594 to: to
20595 };
20596 this.trigger('pendingtimelinechange');
20597 }
20598
20599 return this.pendingTimelineChanges_[type];
20600 };
20601
20602 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
20603 var type = _ref2.type,
20604 from = _ref2.from,
20605 to = _ref2.to;
20606
20607 if (typeof from === 'number' && typeof to === 'number') {
20608 this.lastTimelineChanges_[type] = {
20609 type: type,
20610 from: from,
20611 to: to
20612 };
20613 delete this.pendingTimelineChanges_[type];
20614 this.trigger('timelinechange');
20615 }
20616
20617 return this.lastTimelineChanges_[type];
20618 };
20619
20620 _proto.dispose = function dispose() {
20621 this.trigger('dispose');
20622 this.pendingTimelineChanges_ = {};
20623 this.lastTimelineChanges_ = {};
20624 this.off();
20625 };
20626
20627 return TimelineChangeController;
20628}(videojs.EventTarget);
20629
20630/* rollup-plugin-worker-factory start for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
20631var workerCode = transform(getWorkerString(function () {
20632
20633 var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
20634
20635 function createCommonjsModule(fn, basedir, module) {
20636 return module = {
20637 path: basedir,
20638 exports: {},
20639 require: function require(path, base) {
20640 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
20641 }
20642 }, fn(module, module.exports), module.exports;
20643 }
20644
20645 function commonjsRequire() {
20646 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
20647 }
20648
20649 var createClass = createCommonjsModule(function (module) {
20650 function _defineProperties(target, props) {
20651 for (var i = 0; i < props.length; i++) {
20652 var descriptor = props[i];
20653 descriptor.enumerable = descriptor.enumerable || false;
20654 descriptor.configurable = true;
20655 if ("value" in descriptor) descriptor.writable = true;
20656 Object.defineProperty(target, descriptor.key, descriptor);
20657 }
20658 }
20659
20660 function _createClass(Constructor, protoProps, staticProps) {
20661 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
20662 if (staticProps) _defineProperties(Constructor, staticProps);
20663 return Constructor;
20664 }
20665
20666 module.exports = _createClass;
20667 module.exports["default"] = module.exports, module.exports.__esModule = true;
20668 });
20669 var setPrototypeOf = createCommonjsModule(function (module) {
20670 function _setPrototypeOf(o, p) {
20671 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
20672 o.__proto__ = p;
20673 return o;
20674 };
20675
20676 module.exports["default"] = module.exports, module.exports.__esModule = true;
20677 return _setPrototypeOf(o, p);
20678 }
20679
20680 module.exports = _setPrototypeOf;
20681 module.exports["default"] = module.exports, module.exports.__esModule = true;
20682 });
20683 var inheritsLoose = createCommonjsModule(function (module) {
20684 function _inheritsLoose(subClass, superClass) {
20685 subClass.prototype = Object.create(superClass.prototype);
20686 subClass.prototype.constructor = subClass;
20687 setPrototypeOf(subClass, superClass);
20688 }
20689
20690 module.exports = _inheritsLoose;
20691 module.exports["default"] = module.exports, module.exports.__esModule = true;
20692 });
20693 /**
20694 * @file stream.js
20695 */
20696
20697 /**
20698 * A lightweight readable stream implemention that handles event dispatching.
20699 *
20700 * @class Stream
20701 */
20702
20703 var Stream = /*#__PURE__*/function () {
20704 function Stream() {
20705 this.listeners = {};
20706 }
20707 /**
20708 * Add a listener for a specified event type.
20709 *
20710 * @param {string} type the event name
20711 * @param {Function} listener the callback to be invoked when an event of
20712 * the specified type occurs
20713 */
20714
20715
20716 var _proto = Stream.prototype;
20717
20718 _proto.on = function on(type, listener) {
20719 if (!this.listeners[type]) {
20720 this.listeners[type] = [];
20721 }
20722
20723 this.listeners[type].push(listener);
20724 }
20725 /**
20726 * Remove a listener for a specified event type.
20727 *
20728 * @param {string} type the event name
20729 * @param {Function} listener a function previously registered for this
20730 * type of event through `on`
20731 * @return {boolean} if we could turn it off or not
20732 */
20733 ;
20734
20735 _proto.off = function off(type, listener) {
20736 if (!this.listeners[type]) {
20737 return false;
20738 }
20739
20740 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
20741 // In Video.js we slice listener functions
20742 // on trigger so that it does not mess up the order
20743 // while we loop through.
20744 //
20745 // Here we slice on off so that the loop in trigger
20746 // can continue using it's old reference to loop without
20747 // messing up the order.
20748
20749 this.listeners[type] = this.listeners[type].slice(0);
20750 this.listeners[type].splice(index, 1);
20751 return index > -1;
20752 }
20753 /**
20754 * Trigger an event of the specified type on this stream. Any additional
20755 * arguments to this function are passed as parameters to event listeners.
20756 *
20757 * @param {string} type the event name
20758 */
20759 ;
20760
20761 _proto.trigger = function trigger(type) {
20762 var callbacks = this.listeners[type];
20763
20764 if (!callbacks) {
20765 return;
20766 } // Slicing the arguments on every invocation of this method
20767 // can add a significant amount of overhead. Avoid the
20768 // intermediate object creation for the common case of a
20769 // single callback argument
20770
20771
20772 if (arguments.length === 2) {
20773 var length = callbacks.length;
20774
20775 for (var i = 0; i < length; ++i) {
20776 callbacks[i].call(this, arguments[1]);
20777 }
20778 } else {
20779 var args = Array.prototype.slice.call(arguments, 1);
20780 var _length = callbacks.length;
20781
20782 for (var _i = 0; _i < _length; ++_i) {
20783 callbacks[_i].apply(this, args);
20784 }
20785 }
20786 }
20787 /**
20788 * Destroys the stream and cleans up.
20789 */
20790 ;
20791
20792 _proto.dispose = function dispose() {
20793 this.listeners = {};
20794 }
20795 /**
20796 * Forwards all `data` events on this stream to the destination stream. The
20797 * destination stream should provide a method `push` to receive the data
20798 * events as they arrive.
20799 *
20800 * @param {Stream} destination the stream that will receive all `data` events
20801 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
20802 */
20803 ;
20804
20805 _proto.pipe = function pipe(destination) {
20806 this.on('data', function (data) {
20807 destination.push(data);
20808 });
20809 };
20810
20811 return Stream;
20812 }();
20813 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
20814
20815 /**
20816 * Returns the subarray of a Uint8Array without PKCS#7 padding.
20817 *
20818 * @param padded {Uint8Array} unencrypted bytes that have been padded
20819 * @return {Uint8Array} the unpadded bytes
20820 * @see http://tools.ietf.org/html/rfc5652
20821 */
20822
20823
20824 function unpad(padded) {
20825 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
20826 }
20827 /*! @name aes-decrypter @version 3.1.3 @license Apache-2.0 */
20828
20829 /**
20830 * @file aes.js
20831 *
20832 * This file contains an adaptation of the AES decryption algorithm
20833 * from the Standford Javascript Cryptography Library. That work is
20834 * covered by the following copyright and permissions notice:
20835 *
20836 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
20837 * All rights reserved.
20838 *
20839 * Redistribution and use in source and binary forms, with or without
20840 * modification, are permitted provided that the following conditions are
20841 * met:
20842 *
20843 * 1. Redistributions of source code must retain the above copyright
20844 * notice, this list of conditions and the following disclaimer.
20845 *
20846 * 2. Redistributions in binary form must reproduce the above
20847 * copyright notice, this list of conditions and the following
20848 * disclaimer in the documentation and/or other materials provided
20849 * with the distribution.
20850 *
20851 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
20852 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20853 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20854 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
20855 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20856 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
20857 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
20858 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
20859 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
20860 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
20861 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
20862 *
20863 * The views and conclusions contained in the software and documentation
20864 * are those of the authors and should not be interpreted as representing
20865 * official policies, either expressed or implied, of the authors.
20866 */
20867
20868 /**
20869 * Expand the S-box tables.
20870 *
20871 * @private
20872 */
20873
20874
20875 var precompute = function precompute() {
20876 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
20877 var encTable = tables[0];
20878 var decTable = tables[1];
20879 var sbox = encTable[4];
20880 var sboxInv = decTable[4];
20881 var i;
20882 var x;
20883 var xInv;
20884 var d = [];
20885 var th = [];
20886 var x2;
20887 var x4;
20888 var x8;
20889 var s;
20890 var tEnc;
20891 var tDec; // Compute double and third tables
20892
20893 for (i = 0; i < 256; i++) {
20894 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
20895 }
20896
20897 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
20898 // Compute sbox
20899 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
20900 s = s >> 8 ^ s & 255 ^ 99;
20901 sbox[x] = s;
20902 sboxInv[s] = x; // Compute MixColumns
20903
20904 x8 = d[x4 = d[x2 = d[x]]];
20905 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
20906 tEnc = d[s] * 0x101 ^ s * 0x1010100;
20907
20908 for (i = 0; i < 4; i++) {
20909 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
20910 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
20911 }
20912 } // Compactify. Considerable speedup on Firefox.
20913
20914
20915 for (i = 0; i < 5; i++) {
20916 encTable[i] = encTable[i].slice(0);
20917 decTable[i] = decTable[i].slice(0);
20918 }
20919
20920 return tables;
20921 };
20922
20923 var aesTables = null;
20924 /**
20925 * Schedule out an AES key for both encryption and decryption. This
20926 * is a low-level class. Use a cipher mode to do bulk encryption.
20927 *
20928 * @class AES
20929 * @param key {Array} The key as an array of 4, 6 or 8 words.
20930 */
20931
20932 var AES = /*#__PURE__*/function () {
20933 function AES(key) {
20934 /**
20935 * The expanded S-box and inverse S-box tables. These will be computed
20936 * on the client so that we don't have to send them down the wire.
20937 *
20938 * There are two tables, _tables[0] is for encryption and
20939 * _tables[1] is for decryption.
20940 *
20941 * The first 4 sub-tables are the expanded S-box with MixColumns. The
20942 * last (_tables[01][4]) is the S-box itself.
20943 *
20944 * @private
20945 */
20946 // if we have yet to precompute the S-box tables
20947 // do so now
20948 if (!aesTables) {
20949 aesTables = precompute();
20950 } // then make a copy of that object for use
20951
20952
20953 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
20954 var i;
20955 var j;
20956 var tmp;
20957 var sbox = this._tables[0][4];
20958 var decTable = this._tables[1];
20959 var keyLen = key.length;
20960 var rcon = 1;
20961
20962 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
20963 throw new Error('Invalid aes key size');
20964 }
20965
20966 var encKey = key.slice(0);
20967 var decKey = [];
20968 this._key = [encKey, decKey]; // schedule encryption keys
20969
20970 for (i = keyLen; i < 4 * keyLen + 28; i++) {
20971 tmp = encKey[i - 1]; // apply sbox
20972
20973 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
20974 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
20975
20976 if (i % keyLen === 0) {
20977 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
20978 rcon = rcon << 1 ^ (rcon >> 7) * 283;
20979 }
20980 }
20981
20982 encKey[i] = encKey[i - keyLen] ^ tmp;
20983 } // schedule decryption keys
20984
20985
20986 for (j = 0; i; j++, i--) {
20987 tmp = encKey[j & 3 ? i : i - 4];
20988
20989 if (i <= 4 || j < 4) {
20990 decKey[j] = tmp;
20991 } else {
20992 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
20993 }
20994 }
20995 }
20996 /**
20997 * Decrypt 16 bytes, specified as four 32-bit words.
20998 *
20999 * @param {number} encrypted0 the first word to decrypt
21000 * @param {number} encrypted1 the second word to decrypt
21001 * @param {number} encrypted2 the third word to decrypt
21002 * @param {number} encrypted3 the fourth word to decrypt
21003 * @param {Int32Array} out the array to write the decrypted words
21004 * into
21005 * @param {number} offset the offset into the output array to start
21006 * writing results
21007 * @return {Array} The plaintext.
21008 */
21009
21010
21011 var _proto = AES.prototype;
21012
21013 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
21014 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
21015
21016 var a = encrypted0 ^ key[0];
21017 var b = encrypted3 ^ key[1];
21018 var c = encrypted2 ^ key[2];
21019 var d = encrypted1 ^ key[3];
21020 var a2;
21021 var b2;
21022 var c2; // key.length === 2 ?
21023
21024 var nInnerRounds = key.length / 4 - 2;
21025 var i;
21026 var kIndex = 4;
21027 var table = this._tables[1]; // load up the tables
21028
21029 var table0 = table[0];
21030 var table1 = table[1];
21031 var table2 = table[2];
21032 var table3 = table[3];
21033 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
21034
21035 for (i = 0; i < nInnerRounds; i++) {
21036 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
21037 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
21038 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
21039 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
21040 kIndex += 4;
21041 a = a2;
21042 b = b2;
21043 c = c2;
21044 } // Last round.
21045
21046
21047 for (i = 0; i < 4; i++) {
21048 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
21049 a2 = a;
21050 a = b;
21051 b = c;
21052 c = d;
21053 d = a2;
21054 }
21055 };
21056
21057 return AES;
21058 }();
21059 /**
21060 * A wrapper around the Stream class to use setTimeout
21061 * and run stream "jobs" Asynchronously
21062 *
21063 * @class AsyncStream
21064 * @extends Stream
21065 */
21066
21067
21068 var AsyncStream = /*#__PURE__*/function (_Stream) {
21069 inheritsLoose(AsyncStream, _Stream);
21070
21071 function AsyncStream() {
21072 var _this;
21073
21074 _this = _Stream.call(this, Stream) || this;
21075 _this.jobs = [];
21076 _this.delay = 1;
21077 _this.timeout_ = null;
21078 return _this;
21079 }
21080 /**
21081 * process an async job
21082 *
21083 * @private
21084 */
21085
21086
21087 var _proto = AsyncStream.prototype;
21088
21089 _proto.processJob_ = function processJob_() {
21090 this.jobs.shift()();
21091
21092 if (this.jobs.length) {
21093 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
21094 } else {
21095 this.timeout_ = null;
21096 }
21097 }
21098 /**
21099 * push a job into the stream
21100 *
21101 * @param {Function} job the job to push into the stream
21102 */
21103 ;
21104
21105 _proto.push = function push(job) {
21106 this.jobs.push(job);
21107
21108 if (!this.timeout_) {
21109 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
21110 }
21111 };
21112
21113 return AsyncStream;
21114 }(Stream);
21115 /**
21116 * Convert network-order (big-endian) bytes into their little-endian
21117 * representation.
21118 */
21119
21120
21121 var ntoh = function ntoh(word) {
21122 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
21123 };
21124 /**
21125 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
21126 *
21127 * @param {Uint8Array} encrypted the encrypted bytes
21128 * @param {Uint32Array} key the bytes of the decryption key
21129 * @param {Uint32Array} initVector the initialization vector (IV) to
21130 * use for the first round of CBC.
21131 * @return {Uint8Array} the decrypted bytes
21132 *
21133 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
21134 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
21135 * @see https://tools.ietf.org/html/rfc2315
21136 */
21137
21138
21139 var decrypt = function decrypt(encrypted, key, initVector) {
21140 // word-level access to the encrypted bytes
21141 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
21142 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
21143
21144 var decrypted = new Uint8Array(encrypted.byteLength);
21145 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
21146 // decrypted data
21147
21148 var init0;
21149 var init1;
21150 var init2;
21151 var init3;
21152 var encrypted0;
21153 var encrypted1;
21154 var encrypted2;
21155 var encrypted3; // iteration variable
21156
21157 var wordIx; // pull out the words of the IV to ensure we don't modify the
21158 // passed-in reference and easier access
21159
21160 init0 = initVector[0];
21161 init1 = initVector[1];
21162 init2 = initVector[2];
21163 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
21164 // to each decrypted block
21165
21166 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
21167 // convert big-endian (network order) words into little-endian
21168 // (javascript order)
21169 encrypted0 = ntoh(encrypted32[wordIx]);
21170 encrypted1 = ntoh(encrypted32[wordIx + 1]);
21171 encrypted2 = ntoh(encrypted32[wordIx + 2]);
21172 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
21173
21174 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
21175 // plaintext
21176
21177 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
21178 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
21179 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
21180 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
21181
21182 init0 = encrypted0;
21183 init1 = encrypted1;
21184 init2 = encrypted2;
21185 init3 = encrypted3;
21186 }
21187
21188 return decrypted;
21189 };
21190 /**
21191 * The `Decrypter` class that manages decryption of AES
21192 * data through `AsyncStream` objects and the `decrypt`
21193 * function
21194 *
21195 * @param {Uint8Array} encrypted the encrypted bytes
21196 * @param {Uint32Array} key the bytes of the decryption key
21197 * @param {Uint32Array} initVector the initialization vector (IV) to
21198 * @param {Function} done the function to run when done
21199 * @class Decrypter
21200 */
21201
21202
21203 var Decrypter = /*#__PURE__*/function () {
21204 function Decrypter(encrypted, key, initVector, done) {
21205 var step = Decrypter.STEP;
21206 var encrypted32 = new Int32Array(encrypted.buffer);
21207 var decrypted = new Uint8Array(encrypted.byteLength);
21208 var i = 0;
21209 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
21210
21211 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21212
21213 for (i = step; i < encrypted32.length; i += step) {
21214 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
21215 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21216 } // invoke the done() callback when everything is finished
21217
21218
21219 this.asyncStream_.push(function () {
21220 // remove pkcs#7 padding from the decrypted bytes
21221 done(null, unpad(decrypted));
21222 });
21223 }
21224 /**
21225 * a getter for step the maximum number of bytes to process at one time
21226 *
21227 * @return {number} the value of step 32000
21228 */
21229
21230
21231 var _proto = Decrypter.prototype;
21232 /**
21233 * @private
21234 */
21235
21236 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
21237 return function () {
21238 var bytes = decrypt(encrypted, key, initVector);
21239 decrypted.set(bytes, encrypted.byteOffset);
21240 };
21241 };
21242
21243 createClass(Decrypter, null, [{
21244 key: "STEP",
21245 get: function get() {
21246 // 4 * 8000;
21247 return 32000;
21248 }
21249 }]);
21250 return Decrypter;
21251 }();
21252
21253 var win;
21254
21255 if (typeof window !== "undefined") {
21256 win = window;
21257 } else if (typeof commonjsGlobal !== "undefined") {
21258 win = commonjsGlobal;
21259 } else if (typeof self !== "undefined") {
21260 win = self;
21261 } else {
21262 win = {};
21263 }
21264
21265 var window_1 = win;
21266
21267 var isArrayBufferView = function isArrayBufferView(obj) {
21268 if (ArrayBuffer.isView === 'function') {
21269 return ArrayBuffer.isView(obj);
21270 }
21271
21272 return obj && obj.buffer instanceof ArrayBuffer;
21273 };
21274
21275 var BigInt = window_1.BigInt || Number;
21276 [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
21277
21278 (function () {
21279 var a = new Uint16Array([0xFFCC]);
21280 var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
21281
21282 if (b[0] === 0xFF) {
21283 return 'big';
21284 }
21285
21286 if (b[0] === 0xCC) {
21287 return 'little';
21288 }
21289
21290 return 'unknown';
21291 })();
21292 /**
21293 * Creates an object for sending to a web worker modifying properties that are TypedArrays
21294 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
21295 *
21296 * @param {Object} message
21297 * Object of properties and values to send to the web worker
21298 * @return {Object}
21299 * Modified message with TypedArray values expanded
21300 * @function createTransferableMessage
21301 */
21302
21303
21304 var createTransferableMessage = function createTransferableMessage(message) {
21305 var transferable = {};
21306 Object.keys(message).forEach(function (key) {
21307 var value = message[key];
21308
21309 if (isArrayBufferView(value)) {
21310 transferable[key] = {
21311 bytes: value.buffer,
21312 byteOffset: value.byteOffset,
21313 byteLength: value.byteLength
21314 };
21315 } else {
21316 transferable[key] = value;
21317 }
21318 });
21319 return transferable;
21320 };
21321 /* global self */
21322
21323 /**
21324 * Our web worker interface so that things can talk to aes-decrypter
21325 * that will be running in a web worker. the scope is passed to this by
21326 * webworkify.
21327 */
21328
21329
21330 self.onmessage = function (event) {
21331 var data = event.data;
21332 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
21333 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
21334 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
21335 /* eslint-disable no-new, handle-callback-err */
21336
21337 new Decrypter(encrypted, key, iv, function (err, bytes) {
21338 self.postMessage(createTransferableMessage({
21339 source: data.source,
21340 decrypted: bytes
21341 }), [bytes.buffer]);
21342 });
21343 /* eslint-enable */
21344 };
21345}));
21346var Decrypter = factory(workerCode);
21347/* rollup-plugin-worker-factory end for worker!/Users/bclifford/Code/vhs-release-test/src/decrypter-worker.js */
21348
21349/**
21350 * Convert the properties of an HLS track into an audioTrackKind.
21351 *
21352 * @private
21353 */
21354
21355var audioTrackKind_ = function audioTrackKind_(properties) {
21356 var kind = properties.default ? 'main' : 'alternative';
21357
21358 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
21359 kind = 'main-desc';
21360 }
21361
21362 return kind;
21363};
21364/**
21365 * Pause provided segment loader and playlist loader if active
21366 *
21367 * @param {SegmentLoader} segmentLoader
21368 * SegmentLoader to pause
21369 * @param {Object} mediaType
21370 * Active media type
21371 * @function stopLoaders
21372 */
21373
21374
21375var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
21376 segmentLoader.abort();
21377 segmentLoader.pause();
21378
21379 if (mediaType && mediaType.activePlaylistLoader) {
21380 mediaType.activePlaylistLoader.pause();
21381 mediaType.activePlaylistLoader = null;
21382 }
21383};
21384/**
21385 * Start loading provided segment loader and playlist loader
21386 *
21387 * @param {PlaylistLoader} playlistLoader
21388 * PlaylistLoader to start loading
21389 * @param {Object} mediaType
21390 * Active media type
21391 * @function startLoaders
21392 */
21393
21394var startLoaders = function startLoaders(playlistLoader, mediaType) {
21395 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
21396 // playlist loader
21397 mediaType.activePlaylistLoader = playlistLoader;
21398 playlistLoader.load();
21399};
21400/**
21401 * Returns a function to be called when the media group changes. It performs a
21402 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
21403 * change of group is merely a rendition switch of the same content at another encoding,
21404 * rather than a change of content, such as switching audio from English to Spanish.
21405 *
21406 * @param {string} type
21407 * MediaGroup type
21408 * @param {Object} settings
21409 * Object containing required information for media groups
21410 * @return {Function}
21411 * Handler for a non-destructive resync of SegmentLoader when the active media
21412 * group changes.
21413 * @function onGroupChanged
21414 */
21415
21416var onGroupChanged = function onGroupChanged(type, settings) {
21417 return function () {
21418 var _settings$segmentLoad = settings.segmentLoaders,
21419 segmentLoader = _settings$segmentLoad[type],
21420 mainSegmentLoader = _settings$segmentLoad.main,
21421 mediaType = settings.mediaTypes[type];
21422 var activeTrack = mediaType.activeTrack();
21423 var activeGroup = mediaType.getActiveGroup();
21424 var previousActiveLoader = mediaType.activePlaylistLoader;
21425 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
21426
21427 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
21428 return;
21429 }
21430
21431 mediaType.lastGroup_ = activeGroup;
21432 mediaType.lastTrack_ = activeTrack;
21433 stopLoaders(segmentLoader, mediaType);
21434
21435 if (!activeGroup || activeGroup.isMasterPlaylist) {
21436 // there is no group active or active group is a main playlist and won't change
21437 return;
21438 }
21439
21440 if (!activeGroup.playlistLoader) {
21441 if (previousActiveLoader) {
21442 // The previous group had a playlist loader but the new active group does not
21443 // this means we are switching from demuxed to muxed audio. In this case we want to
21444 // do a destructive reset of the main segment loader and not restart the audio
21445 // loaders.
21446 mainSegmentLoader.resetEverything();
21447 }
21448
21449 return;
21450 } // Non-destructive resync
21451
21452
21453 segmentLoader.resyncLoader();
21454 startLoaders(activeGroup.playlistLoader, mediaType);
21455 };
21456};
21457var onGroupChanging = function onGroupChanging(type, settings) {
21458 return function () {
21459 var segmentLoader = settings.segmentLoaders[type],
21460 mediaType = settings.mediaTypes[type];
21461 mediaType.lastGroup_ = null;
21462 segmentLoader.abort();
21463 segmentLoader.pause();
21464 };
21465};
21466/**
21467 * Returns a function to be called when the media track changes. It performs a
21468 * destructive reset of the SegmentLoader to ensure we start loading as close to
21469 * currentTime as possible.
21470 *
21471 * @param {string} type
21472 * MediaGroup type
21473 * @param {Object} settings
21474 * Object containing required information for media groups
21475 * @return {Function}
21476 * Handler for a destructive reset of SegmentLoader when the active media
21477 * track changes.
21478 * @function onTrackChanged
21479 */
21480
21481var onTrackChanged = function onTrackChanged(type, settings) {
21482 return function () {
21483 var masterPlaylistLoader = settings.masterPlaylistLoader,
21484 _settings$segmentLoad2 = settings.segmentLoaders,
21485 segmentLoader = _settings$segmentLoad2[type],
21486 mainSegmentLoader = _settings$segmentLoad2.main,
21487 mediaType = settings.mediaTypes[type];
21488 var activeTrack = mediaType.activeTrack();
21489 var activeGroup = mediaType.getActiveGroup();
21490 var previousActiveLoader = mediaType.activePlaylistLoader;
21491 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
21492
21493 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
21494 return;
21495 }
21496
21497 mediaType.lastGroup_ = activeGroup;
21498 mediaType.lastTrack_ = activeTrack;
21499 stopLoaders(segmentLoader, mediaType);
21500
21501 if (!activeGroup) {
21502 // there is no group active so we do not want to restart loaders
21503 return;
21504 }
21505
21506 if (activeGroup.isMasterPlaylist) {
21507 // track did not change, do nothing
21508 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
21509 return;
21510 }
21511
21512 var mpc = settings.vhs.masterPlaylistController_;
21513 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
21514
21515 if (mpc.media() === newPlaylist) {
21516 return;
21517 }
21518
21519 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
21520 masterPlaylistLoader.pause();
21521 mainSegmentLoader.resetEverything();
21522 mpc.fastQualityChange_(newPlaylist);
21523 return;
21524 }
21525
21526 if (type === 'AUDIO') {
21527 if (!activeGroup.playlistLoader) {
21528 // when switching from demuxed audio/video to muxed audio/video (noted by no
21529 // playlist loader for the audio group), we want to do a destructive reset of the
21530 // main segment loader and not restart the audio loaders
21531 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
21532 // it should be stopped
21533
21534 mainSegmentLoader.resetEverything();
21535 return;
21536 } // although the segment loader is an audio segment loader, call the setAudio
21537 // function to ensure it is prepared to re-append the init segment (or handle other
21538 // config changes)
21539
21540
21541 segmentLoader.setAudio(true);
21542 mainSegmentLoader.setAudio(false);
21543 }
21544
21545 if (previousActiveLoader === activeGroup.playlistLoader) {
21546 // Nothing has actually changed. This can happen because track change events can fire
21547 // multiple times for a "single" change. One for enabling the new active track, and
21548 // one for disabling the track that was active
21549 startLoaders(activeGroup.playlistLoader, mediaType);
21550 return;
21551 }
21552
21553 if (segmentLoader.track) {
21554 // For WebVTT, set the new text track in the segmentloader
21555 segmentLoader.track(activeTrack);
21556 } // destructive reset
21557
21558
21559 segmentLoader.resetEverything();
21560 startLoaders(activeGroup.playlistLoader, mediaType);
21561 };
21562};
21563var onError = {
21564 /**
21565 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21566 * an error.
21567 *
21568 * @param {string} type
21569 * MediaGroup type
21570 * @param {Object} settings
21571 * Object containing required information for media groups
21572 * @return {Function}
21573 * Error handler. Logs warning (or error if the playlist is blacklisted) to
21574 * console and switches back to default audio track.
21575 * @function onError.AUDIO
21576 */
21577 AUDIO: function AUDIO(type, settings) {
21578 return function () {
21579 var segmentLoader = settings.segmentLoaders[type],
21580 mediaType = settings.mediaTypes[type],
21581 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
21582 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
21583
21584 var activeTrack = mediaType.activeTrack();
21585 var activeGroup = mediaType.activeGroup();
21586 var id = (activeGroup.filter(function (group) {
21587 return group.default;
21588 })[0] || activeGroup[0]).id;
21589 var defaultTrack = mediaType.tracks[id];
21590
21591 if (activeTrack === defaultTrack) {
21592 // Default track encountered an error. All we can do now is blacklist the current
21593 // rendition and hope another will switch audio groups
21594 blacklistCurrentPlaylist({
21595 message: 'Problem encountered loading the default audio track.'
21596 });
21597 return;
21598 }
21599
21600 videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
21601
21602 for (var trackId in mediaType.tracks) {
21603 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
21604 }
21605
21606 mediaType.onTrackChanged();
21607 };
21608 },
21609
21610 /**
21611 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21612 * an error.
21613 *
21614 * @param {string} type
21615 * MediaGroup type
21616 * @param {Object} settings
21617 * Object containing required information for media groups
21618 * @return {Function}
21619 * Error handler. Logs warning to console and disables the active subtitle track
21620 * @function onError.SUBTITLES
21621 */
21622 SUBTITLES: function SUBTITLES(type, settings) {
21623 return function () {
21624 var segmentLoader = settings.segmentLoaders[type],
21625 mediaType = settings.mediaTypes[type];
21626 videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
21627 stopLoaders(segmentLoader, mediaType);
21628 var track = mediaType.activeTrack();
21629
21630 if (track) {
21631 track.mode = 'disabled';
21632 }
21633
21634 mediaType.onTrackChanged();
21635 };
21636 }
21637};
21638var setupListeners = {
21639 /**
21640 * Setup event listeners for audio playlist loader
21641 *
21642 * @param {string} type
21643 * MediaGroup type
21644 * @param {PlaylistLoader|null} playlistLoader
21645 * PlaylistLoader to register listeners on
21646 * @param {Object} settings
21647 * Object containing required information for media groups
21648 * @function setupListeners.AUDIO
21649 */
21650 AUDIO: function AUDIO(type, playlistLoader, settings) {
21651 if (!playlistLoader) {
21652 // no playlist loader means audio will be muxed with the video
21653 return;
21654 }
21655
21656 var tech = settings.tech,
21657 requestOptions = settings.requestOptions,
21658 segmentLoader = settings.segmentLoaders[type];
21659 playlistLoader.on('loadedmetadata', function () {
21660 var media = playlistLoader.media();
21661 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
21662 // permits, start downloading segments
21663
21664 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21665 segmentLoader.load();
21666 }
21667 });
21668 playlistLoader.on('loadedplaylist', function () {
21669 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21670
21671 if (!tech.paused()) {
21672 segmentLoader.load();
21673 }
21674 });
21675 playlistLoader.on('error', onError[type](type, settings));
21676 },
21677
21678 /**
21679 * Setup event listeners for subtitle playlist loader
21680 *
21681 * @param {string} type
21682 * MediaGroup type
21683 * @param {PlaylistLoader|null} playlistLoader
21684 * PlaylistLoader to register listeners on
21685 * @param {Object} settings
21686 * Object containing required information for media groups
21687 * @function setupListeners.SUBTITLES
21688 */
21689 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
21690 var tech = settings.tech,
21691 requestOptions = settings.requestOptions,
21692 segmentLoader = settings.segmentLoaders[type],
21693 mediaType = settings.mediaTypes[type];
21694 playlistLoader.on('loadedmetadata', function () {
21695 var media = playlistLoader.media();
21696 segmentLoader.playlist(media, requestOptions);
21697 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
21698 // permits, start downloading segments
21699
21700 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21701 segmentLoader.load();
21702 }
21703 });
21704 playlistLoader.on('loadedplaylist', function () {
21705 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21706
21707 if (!tech.paused()) {
21708 segmentLoader.load();
21709 }
21710 });
21711 playlistLoader.on('error', onError[type](type, settings));
21712 }
21713};
21714var initialize = {
21715 /**
21716 * Setup PlaylistLoaders and AudioTracks for the audio groups
21717 *
21718 * @param {string} type
21719 * MediaGroup type
21720 * @param {Object} settings
21721 * Object containing required information for media groups
21722 * @function initialize.AUDIO
21723 */
21724 'AUDIO': function AUDIO(type, settings) {
21725 var vhs = settings.vhs,
21726 sourceType = settings.sourceType,
21727 segmentLoader = settings.segmentLoaders[type],
21728 requestOptions = settings.requestOptions,
21729 mediaGroups = settings.master.mediaGroups,
21730 _settings$mediaTypes$ = settings.mediaTypes[type],
21731 groups = _settings$mediaTypes$.groups,
21732 tracks = _settings$mediaTypes$.tracks,
21733 logger_ = _settings$mediaTypes$.logger_,
21734 masterPlaylistLoader = settings.masterPlaylistLoader;
21735 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
21736
21737 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
21738 mediaGroups[type] = {
21739 main: {
21740 default: {
21741 default: true
21742 }
21743 }
21744 };
21745
21746 if (audioOnlyMaster) {
21747 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
21748 }
21749 }
21750
21751 for (var groupId in mediaGroups[type]) {
21752 if (!groups[groupId]) {
21753 groups[groupId] = [];
21754 }
21755
21756 for (var variantLabel in mediaGroups[type][groupId]) {
21757 var properties = mediaGroups[type][groupId][variantLabel];
21758 var playlistLoader = void 0;
21759
21760 if (audioOnlyMaster) {
21761 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
21762 properties.isMasterPlaylist = true;
21763 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
21764 // use the resolved media playlist object
21765 } else if (sourceType === 'vhs-json' && properties.playlists) {
21766 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
21767 } else if (properties.resolvedUri) {
21768 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
21769 // should we even have properties.playlists in this check.
21770 } else if (properties.playlists && sourceType === 'dash') {
21771 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21772 } else {
21773 // no resolvedUri means the audio is muxed with the video when using this
21774 // audio track
21775 playlistLoader = null;
21776 }
21777
21778 properties = videojs.mergeOptions({
21779 id: variantLabel,
21780 playlistLoader: playlistLoader
21781 }, properties);
21782 setupListeners[type](type, properties.playlistLoader, settings);
21783 groups[groupId].push(properties);
21784
21785 if (typeof tracks[variantLabel] === 'undefined') {
21786 var track = new videojs.AudioTrack({
21787 id: variantLabel,
21788 kind: audioTrackKind_(properties),
21789 enabled: false,
21790 language: properties.language,
21791 default: properties.default,
21792 label: variantLabel
21793 });
21794 tracks[variantLabel] = track;
21795 }
21796 }
21797 } // setup single error event handler for the segment loader
21798
21799
21800 segmentLoader.on('error', onError[type](type, settings));
21801 },
21802
21803 /**
21804 * Setup PlaylistLoaders and TextTracks for the subtitle groups
21805 *
21806 * @param {string} type
21807 * MediaGroup type
21808 * @param {Object} settings
21809 * Object containing required information for media groups
21810 * @function initialize.SUBTITLES
21811 */
21812 'SUBTITLES': function SUBTITLES(type, settings) {
21813 var tech = settings.tech,
21814 vhs = settings.vhs,
21815 sourceType = settings.sourceType,
21816 segmentLoader = settings.segmentLoaders[type],
21817 requestOptions = settings.requestOptions,
21818 mediaGroups = settings.master.mediaGroups,
21819 _settings$mediaTypes$2 = settings.mediaTypes[type],
21820 groups = _settings$mediaTypes$2.groups,
21821 tracks = _settings$mediaTypes$2.tracks,
21822 masterPlaylistLoader = settings.masterPlaylistLoader;
21823
21824 for (var groupId in mediaGroups[type]) {
21825 if (!groups[groupId]) {
21826 groups[groupId] = [];
21827 }
21828
21829 for (var variantLabel in mediaGroups[type][groupId]) {
21830 if (mediaGroups[type][groupId][variantLabel].forced) {
21831 // Subtitle playlists with the forced attribute are not selectable in Safari.
21832 // According to Apple's HLS Authoring Specification:
21833 // If content has forced subtitles and regular subtitles in a given language,
21834 // the regular subtitles track in that language MUST contain both the forced
21835 // subtitles and the regular subtitles for that language.
21836 // Because of this requirement and that Safari does not add forced subtitles,
21837 // forced subtitles are skipped here to maintain consistent experience across
21838 // all platforms
21839 continue;
21840 }
21841
21842 var properties = mediaGroups[type][groupId][variantLabel];
21843 var playlistLoader = void 0;
21844
21845 if (sourceType === 'hls') {
21846 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
21847 } else if (sourceType === 'dash') {
21848 var playlists = properties.playlists.filter(function (p) {
21849 return p.excludeUntil !== Infinity;
21850 });
21851
21852 if (!playlists.length) {
21853 return;
21854 }
21855
21856 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21857 } else if (sourceType === 'vhs-json') {
21858 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
21859 // as provided, otherwise use the resolved URI to load the playlist
21860 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
21861 }
21862
21863 properties = videojs.mergeOptions({
21864 id: variantLabel,
21865 playlistLoader: playlistLoader
21866 }, properties);
21867 setupListeners[type](type, properties.playlistLoader, settings);
21868 groups[groupId].push(properties);
21869
21870 if (typeof tracks[variantLabel] === 'undefined') {
21871 var track = tech.addRemoteTextTrack({
21872 id: variantLabel,
21873 kind: 'subtitles',
21874 default: properties.default && properties.autoselect,
21875 language: properties.language,
21876 label: variantLabel
21877 }, false).track;
21878 tracks[variantLabel] = track;
21879 }
21880 }
21881 } // setup single error event handler for the segment loader
21882
21883
21884 segmentLoader.on('error', onError[type](type, settings));
21885 },
21886
21887 /**
21888 * Setup TextTracks for the closed-caption groups
21889 *
21890 * @param {String} type
21891 * MediaGroup type
21892 * @param {Object} settings
21893 * Object containing required information for media groups
21894 * @function initialize['CLOSED-CAPTIONS']
21895 */
21896 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
21897 var tech = settings.tech,
21898 mediaGroups = settings.master.mediaGroups,
21899 _settings$mediaTypes$3 = settings.mediaTypes[type],
21900 groups = _settings$mediaTypes$3.groups,
21901 tracks = _settings$mediaTypes$3.tracks;
21902
21903 for (var groupId in mediaGroups[type]) {
21904 if (!groups[groupId]) {
21905 groups[groupId] = [];
21906 }
21907
21908 for (var variantLabel in mediaGroups[type][groupId]) {
21909 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
21910
21911 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
21912 continue;
21913 }
21914
21915 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
21916 var newProps = {
21917 label: variantLabel,
21918 language: properties.language,
21919 instreamId: properties.instreamId,
21920 default: properties.default && properties.autoselect
21921 };
21922
21923 if (captionServices[newProps.instreamId]) {
21924 newProps = videojs.mergeOptions(newProps, captionServices[newProps.instreamId]);
21925 }
21926
21927 if (newProps.default === undefined) {
21928 delete newProps.default;
21929 } // No PlaylistLoader is required for Closed-Captions because the captions are
21930 // embedded within the video stream
21931
21932
21933 groups[groupId].push(videojs.mergeOptions({
21934 id: variantLabel
21935 }, properties));
21936
21937 if (typeof tracks[variantLabel] === 'undefined') {
21938 var track = tech.addRemoteTextTrack({
21939 id: newProps.instreamId,
21940 kind: 'captions',
21941 default: newProps.default,
21942 language: newProps.language,
21943 label: newProps.label
21944 }, false).track;
21945 tracks[variantLabel] = track;
21946 }
21947 }
21948 }
21949 }
21950};
21951
21952var groupMatch = function groupMatch(list, media) {
21953 for (var i = 0; i < list.length; i++) {
21954 if (playlistMatch(media, list[i])) {
21955 return true;
21956 }
21957
21958 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
21959 return true;
21960 }
21961 }
21962
21963 return false;
21964};
21965/**
21966 * Returns a function used to get the active group of the provided type
21967 *
21968 * @param {string} type
21969 * MediaGroup type
21970 * @param {Object} settings
21971 * Object containing required information for media groups
21972 * @return {Function}
21973 * Function that returns the active media group for the provided type. Takes an
21974 * optional parameter {TextTrack} track. If no track is provided, a list of all
21975 * variants in the group, otherwise the variant corresponding to the provided
21976 * track is returned.
21977 * @function activeGroup
21978 */
21979
21980
21981var activeGroup = function activeGroup(type, settings) {
21982 return function (track) {
21983 var masterPlaylistLoader = settings.masterPlaylistLoader,
21984 groups = settings.mediaTypes[type].groups;
21985 var media = masterPlaylistLoader.media();
21986
21987 if (!media) {
21988 return null;
21989 }
21990
21991 var variants = null; // set to variants to main media active group
21992
21993 if (media.attributes[type]) {
21994 variants = groups[media.attributes[type]];
21995 }
21996
21997 var groupKeys = Object.keys(groups);
21998
21999 if (!variants) {
22000 // find the masterPlaylistLoader media
22001 // that is in a media group if we are dealing
22002 // with audio only
22003 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
22004 for (var i = 0; i < groupKeys.length; i++) {
22005 var groupPropertyList = groups[groupKeys[i]];
22006
22007 if (groupMatch(groupPropertyList, media)) {
22008 variants = groupPropertyList;
22009 break;
22010 }
22011 } // use the main group if it exists
22012
22013 } else if (groups.main) {
22014 variants = groups.main; // only one group, use that one
22015 } else if (groupKeys.length === 1) {
22016 variants = groups[groupKeys[0]];
22017 }
22018 }
22019
22020 if (typeof track === 'undefined') {
22021 return variants;
22022 }
22023
22024 if (track === null || !variants) {
22025 // An active track was specified so a corresponding group is expected. track === null
22026 // means no track is currently active so there is no corresponding group
22027 return null;
22028 }
22029
22030 return variants.filter(function (props) {
22031 return props.id === track.id;
22032 })[0] || null;
22033 };
22034};
22035var activeTrack = {
22036 /**
22037 * Returns a function used to get the active track of type provided
22038 *
22039 * @param {string} type
22040 * MediaGroup type
22041 * @param {Object} settings
22042 * Object containing required information for media groups
22043 * @return {Function}
22044 * Function that returns the active media track for the provided type. Returns
22045 * null if no track is active
22046 * @function activeTrack.AUDIO
22047 */
22048 AUDIO: function AUDIO(type, settings) {
22049 return function () {
22050 var tracks = settings.mediaTypes[type].tracks;
22051
22052 for (var id in tracks) {
22053 if (tracks[id].enabled) {
22054 return tracks[id];
22055 }
22056 }
22057
22058 return null;
22059 };
22060 },
22061
22062 /**
22063 * Returns a function used to get the active track of type provided
22064 *
22065 * @param {string} type
22066 * MediaGroup type
22067 * @param {Object} settings
22068 * Object containing required information for media groups
22069 * @return {Function}
22070 * Function that returns the active media track for the provided type. Returns
22071 * null if no track is active
22072 * @function activeTrack.SUBTITLES
22073 */
22074 SUBTITLES: function SUBTITLES(type, settings) {
22075 return function () {
22076 var tracks = settings.mediaTypes[type].tracks;
22077
22078 for (var id in tracks) {
22079 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
22080 return tracks[id];
22081 }
22082 }
22083
22084 return null;
22085 };
22086 }
22087};
22088var getActiveGroup = function getActiveGroup(type, _ref) {
22089 var mediaTypes = _ref.mediaTypes;
22090 return function () {
22091 var activeTrack_ = mediaTypes[type].activeTrack();
22092
22093 if (!activeTrack_) {
22094 return null;
22095 }
22096
22097 return mediaTypes[type].activeGroup(activeTrack_);
22098 };
22099};
22100/**
22101 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
22102 * Closed-Captions) specified in the master manifest.
22103 *
22104 * @param {Object} settings
22105 * Object containing required information for setting up the media groups
22106 * @param {Tech} settings.tech
22107 * The tech of the player
22108 * @param {Object} settings.requestOptions
22109 * XHR request options used by the segment loaders
22110 * @param {PlaylistLoader} settings.masterPlaylistLoader
22111 * PlaylistLoader for the master source
22112 * @param {VhsHandler} settings.vhs
22113 * VHS SourceHandler
22114 * @param {Object} settings.master
22115 * The parsed master manifest
22116 * @param {Object} settings.mediaTypes
22117 * Object to store the loaders, tracks, and utility methods for each media type
22118 * @param {Function} settings.blacklistCurrentPlaylist
22119 * Blacklists the current rendition and forces a rendition switch.
22120 * @function setupMediaGroups
22121 */
22122
22123var setupMediaGroups = function setupMediaGroups(settings) {
22124 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
22125 initialize[type](type, settings);
22126 });
22127 var mediaTypes = settings.mediaTypes,
22128 masterPlaylistLoader = settings.masterPlaylistLoader,
22129 tech = settings.tech,
22130 vhs = settings.vhs,
22131 _settings$segmentLoad3 = settings.segmentLoaders,
22132 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
22133 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
22134
22135 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22136 mediaTypes[type].activeGroup = activeGroup(type, settings);
22137 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
22138 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
22139 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
22140 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
22141 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
22142 }); // DO NOT enable the default subtitle or caption track.
22143 // DO enable the default audio track
22144
22145 var audioGroup = mediaTypes.AUDIO.activeGroup();
22146
22147 if (audioGroup) {
22148 var groupId = (audioGroup.filter(function (group) {
22149 return group.default;
22150 })[0] || audioGroup[0]).id;
22151 mediaTypes.AUDIO.tracks[groupId].enabled = true;
22152 mediaTypes.AUDIO.onGroupChanged();
22153 mediaTypes.AUDIO.onTrackChanged();
22154 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
22155 // track is changed, but needs to be handled here since the track may not be considered
22156 // changed on the first call to onTrackChanged
22157
22158 if (!activeAudioGroup.playlistLoader) {
22159 // either audio is muxed with video or the stream is audio only
22160 mainSegmentLoader.setAudio(true);
22161 } else {
22162 // audio is demuxed
22163 mainSegmentLoader.setAudio(false);
22164 audioSegmentLoader.setAudio(true);
22165 }
22166 }
22167
22168 masterPlaylistLoader.on('mediachange', function () {
22169 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22170 return mediaTypes[type].onGroupChanged();
22171 });
22172 });
22173 masterPlaylistLoader.on('mediachanging', function () {
22174 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
22175 return mediaTypes[type].onGroupChanging();
22176 });
22177 }); // custom audio track change event handler for usage event
22178
22179 var onAudioTrackChanged = function onAudioTrackChanged() {
22180 mediaTypes.AUDIO.onTrackChanged();
22181 tech.trigger({
22182 type: 'usage',
22183 name: 'vhs-audio-change'
22184 });
22185 tech.trigger({
22186 type: 'usage',
22187 name: 'hls-audio-change'
22188 });
22189 };
22190
22191 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
22192 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
22193 vhs.on('dispose', function () {
22194 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
22195 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
22196 }); // clear existing audio tracks and add the ones we just created
22197
22198 tech.clearTracks('audio');
22199
22200 for (var id in mediaTypes.AUDIO.tracks) {
22201 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
22202 }
22203};
22204/**
22205 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
22206 * media type
22207 *
22208 * @return {Object}
22209 * Object to store the loaders, tracks, and utility methods for each media type
22210 * @function createMediaTypes
22211 */
22212
22213var createMediaTypes = function createMediaTypes() {
22214 var mediaTypes = {};
22215 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
22216 mediaTypes[type] = {
22217 groups: {},
22218 tracks: {},
22219 activePlaylistLoader: null,
22220 activeGroup: noop,
22221 activeTrack: noop,
22222 getActiveGroup: noop,
22223 onGroupChanged: noop,
22224 onTrackChanged: noop,
22225 lastTrack_: null,
22226 logger_: logger("MediaGroups[" + type + "]")
22227 };
22228 });
22229 return mediaTypes;
22230};
22231
22232var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
22233var Vhs$1; // SegmentLoader stats that need to have each loader's
22234// values summed to calculate the final value
22235
22236var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
22237
22238var sumLoaderStat = function sumLoaderStat(stat) {
22239 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
22240};
22241
22242var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
22243 var currentPlaylist = _ref.currentPlaylist,
22244 buffered = _ref.buffered,
22245 currentTime = _ref.currentTime,
22246 nextPlaylist = _ref.nextPlaylist,
22247 bufferLowWaterLine = _ref.bufferLowWaterLine,
22248 bufferHighWaterLine = _ref.bufferHighWaterLine,
22249 duration = _ref.duration,
22250 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
22251 log = _ref.log;
22252
22253 // we have no other playlist to switch to
22254 if (!nextPlaylist) {
22255 videojs.log.warn('We received no playlist to switch to. Please check your stream.');
22256 return false;
22257 }
22258
22259 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
22260
22261 if (!currentPlaylist) {
22262 log(sharedLogLine + " as current playlist is not set");
22263 return true;
22264 } // no need to switch if playlist is the same
22265
22266
22267 if (nextPlaylist.id === currentPlaylist.id) {
22268 return false;
22269 } // determine if current time is in a buffered range.
22270
22271
22272 var isBuffered = Boolean(findRange(buffered, currentTime).length); // If the playlist is live, then we want to not take low water line into account.
22273 // This is because in LIVE, the player plays 3 segments from the end of the
22274 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
22275 // in those segments, a viewer will never experience a rendition upswitch.
22276
22277 if (!currentPlaylist.endList) {
22278 // For LLHLS live streams, don't switch renditions before playback has started, as it almost
22279 // doubles the time to first playback.
22280 if (!isBuffered && typeof currentPlaylist.partTargetDuration === 'number') {
22281 log("not " + sharedLogLine + " as current playlist is live llhls, but currentTime isn't in buffered.");
22282 return false;
22283 }
22284
22285 log(sharedLogLine + " as current playlist is live");
22286 return true;
22287 }
22288
22289 var forwardBuffer = timeAheadOf(buffered, currentTime);
22290 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
22291 // duration is below the max potential low water line
22292
22293 if (duration < maxBufferLowWaterLine) {
22294 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
22295 return true;
22296 }
22297
22298 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
22299 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
22300 // we can switch down
22301
22302 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
22303 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
22304
22305 if (experimentalBufferBasedABR) {
22306 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
22307 }
22308
22309 log(logLine);
22310 return true;
22311 } // and if our buffer is higher than the low water line,
22312 // we can switch up
22313
22314
22315 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
22316 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
22317
22318 if (experimentalBufferBasedABR) {
22319 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
22320 }
22321
22322 log(_logLine);
22323 return true;
22324 }
22325
22326 log("not " + sharedLogLine + " as no switching criteria met");
22327 return false;
22328};
22329/**
22330 * the master playlist controller controller all interactons
22331 * between playlists and segmentloaders. At this time this mainly
22332 * involves a master playlist and a series of audio playlists
22333 * if they are available
22334 *
22335 * @class MasterPlaylistController
22336 * @extends videojs.EventTarget
22337 */
22338
22339
22340var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
22341 _inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
22342
22343 function MasterPlaylistController(options) {
22344 var _this;
22345
22346 _this = _videojs$EventTarget.call(this) || this;
22347 var src = options.src,
22348 handleManifestRedirects = options.handleManifestRedirects,
22349 withCredentials = options.withCredentials,
22350 tech = options.tech,
22351 bandwidth = options.bandwidth,
22352 externVhs = options.externVhs,
22353 useCueTags = options.useCueTags,
22354 blacklistDuration = options.blacklistDuration,
22355 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
22356 sourceType = options.sourceType,
22357 cacheEncryptionKeys = options.cacheEncryptionKeys,
22358 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
22359 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
22360 captionServices = options.captionServices;
22361
22362 if (!src) {
22363 throw new Error('A non-empty playlist URL or JSON manifest string is required');
22364 }
22365
22366 var maxPlaylistRetries = options.maxPlaylistRetries;
22367
22368 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
22369 maxPlaylistRetries = Infinity;
22370 }
22371
22372 Vhs$1 = externVhs;
22373 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
22374 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
22375 _this.withCredentials = withCredentials;
22376 _this.tech_ = tech;
22377 _this.vhs_ = tech.vhs;
22378 _this.sourceType_ = sourceType;
22379 _this.useCueTags_ = useCueTags;
22380 _this.blacklistDuration = blacklistDuration;
22381 _this.maxPlaylistRetries = maxPlaylistRetries;
22382 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
22383
22384 if (_this.useCueTags_) {
22385 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
22386 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
22387 }
22388
22389 _this.requestOptions_ = {
22390 withCredentials: withCredentials,
22391 handleManifestRedirects: handleManifestRedirects,
22392 maxPlaylistRetries: maxPlaylistRetries,
22393 timeout: null
22394 };
22395
22396 _this.on('error', _this.pauseLoading);
22397
22398 _this.mediaTypes_ = createMediaTypes();
22399 _this.mediaSource = new window$1.MediaSource();
22400 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized(_this));
22401 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized(_this));
22402 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized(_this));
22403
22404 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
22405
22406
22407 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
22408
22409 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
22410 // everything, and the MediaSource should not be detached without a proper disposal
22411
22412
22413 _this.seekable_ = videojs.createTimeRanges();
22414 _this.hasPlayed_ = false;
22415 _this.syncController_ = new SyncController(options);
22416 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
22417 kind: 'metadata',
22418 label: 'segment-metadata'
22419 }, false).track;
22420 _this.decrypter_ = new Decrypter();
22421 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
22422 _this.inbandTextTracks_ = {};
22423 _this.timelineChangeController_ = new TimelineChangeController();
22424 var segmentLoaderSettings = {
22425 vhs: _this.vhs_,
22426 parse708captions: options.parse708captions,
22427 useDtsForTimestampOffset: options.useDtsForTimestampOffset,
22428 captionServices: captionServices,
22429 mediaSource: _this.mediaSource,
22430 currentTime: _this.tech_.currentTime.bind(_this.tech_),
22431 seekable: function seekable() {
22432 return _this.seekable();
22433 },
22434 seeking: function seeking() {
22435 return _this.tech_.seeking();
22436 },
22437 duration: function duration() {
22438 return _this.duration();
22439 },
22440 hasPlayed: function hasPlayed() {
22441 return _this.hasPlayed_;
22442 },
22443 goalBufferLength: function goalBufferLength() {
22444 return _this.goalBufferLength();
22445 },
22446 bandwidth: bandwidth,
22447 syncController: _this.syncController_,
22448 decrypter: _this.decrypter_,
22449 sourceType: _this.sourceType_,
22450 inbandTextTracks: _this.inbandTextTracks_,
22451 cacheEncryptionKeys: cacheEncryptionKeys,
22452 sourceUpdater: _this.sourceUpdater_,
22453 timelineChangeController: _this.timelineChangeController_,
22454 experimentalExactManifestTimings: options.experimentalExactManifestTimings
22455 }; // The source type check not only determines whether a special DASH playlist loader
22456 // should be used, but also covers the case where the provided src is a vhs-json
22457 // manifest object (instead of a URL). In the case of vhs-json, the default
22458 // PlaylistLoader should be used.
22459
22460 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
22461
22462 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
22463 // combined audio/video or just video when alternate audio track is selected
22464
22465
22466 _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22467 segmentMetadataTrack: _this.segmentMetadataTrack_,
22468 loaderType: 'main'
22469 }), options); // alternate audio track
22470
22471 _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22472 loaderType: 'audio'
22473 }), options);
22474 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22475 loaderType: 'vtt',
22476 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
22477 }), options);
22478
22479 _this.setupSegmentLoaderListeners_();
22480
22481 if (_this.experimentalBufferBasedABR) {
22482 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
22483 return _this.startABRTimer_();
22484 });
22485
22486 _this.tech_.on('pause', function () {
22487 return _this.stopABRTimer_();
22488 });
22489
22490 _this.tech_.on('play', function () {
22491 return _this.startABRTimer_();
22492 });
22493 } // Create SegmentLoader stat-getters
22494 // mediaRequests_
22495 // mediaRequestsAborted_
22496 // mediaRequestsTimedout_
22497 // mediaRequestsErrored_
22498 // mediaTransferDuration_
22499 // mediaBytesTransferred_
22500 // mediaAppends_
22501
22502
22503 loaderStats.forEach(function (stat) {
22504 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized(_this), stat);
22505 });
22506 _this.logger_ = logger('MPC');
22507 _this.triggeredFmp4Usage = false;
22508
22509 if (_this.tech_.preload() === 'none') {
22510 _this.loadOnPlay_ = function () {
22511 _this.loadOnPlay_ = null;
22512
22513 _this.masterPlaylistLoader_.load();
22514 };
22515
22516 _this.tech_.one('play', _this.loadOnPlay_);
22517 } else {
22518 _this.masterPlaylistLoader_.load();
22519 }
22520
22521 _this.timeToLoadedData__ = -1;
22522 _this.mainAppendsToLoadedData__ = -1;
22523 _this.audioAppendsToLoadedData__ = -1;
22524 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
22525
22526 _this.tech_.one(event, function () {
22527 var timeToLoadedDataStart = Date.now();
22528
22529 _this.tech_.one('loadeddata', function () {
22530 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
22531 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
22532 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
22533 });
22534 });
22535
22536 return _this;
22537 }
22538
22539 var _proto = MasterPlaylistController.prototype;
22540
22541 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
22542 return this.mainAppendsToLoadedData__;
22543 };
22544
22545 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
22546 return this.audioAppendsToLoadedData__;
22547 };
22548
22549 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
22550 var main = this.mainAppendsToLoadedData_();
22551 var audio = this.audioAppendsToLoadedData_();
22552
22553 if (main === -1 || audio === -1) {
22554 return -1;
22555 }
22556
22557 return main + audio;
22558 };
22559
22560 _proto.timeToLoadedData_ = function timeToLoadedData_() {
22561 return this.timeToLoadedData__;
22562 }
22563 /**
22564 * Run selectPlaylist and switch to the new playlist if we should
22565 *
22566 * @private
22567 *
22568 */
22569 ;
22570
22571 _proto.checkABR_ = function checkABR_() {
22572 var nextPlaylist = this.selectPlaylist();
22573
22574 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
22575 this.switchMedia_(nextPlaylist, 'abr');
22576 }
22577 };
22578
22579 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
22580 var oldMedia = this.media();
22581 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
22582 var newId = playlist.id || playlist.uri;
22583
22584 if (oldId && oldId !== newId) {
22585 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
22586 this.tech_.trigger({
22587 type: 'usage',
22588 name: "vhs-rendition-change-" + cause
22589 });
22590 }
22591
22592 this.masterPlaylistLoader_.media(playlist, delay);
22593 }
22594 /**
22595 * Start a timer that periodically calls checkABR_
22596 *
22597 * @private
22598 */
22599 ;
22600
22601 _proto.startABRTimer_ = function startABRTimer_() {
22602 var _this2 = this;
22603
22604 this.stopABRTimer_();
22605 this.abrTimer_ = window$1.setInterval(function () {
22606 return _this2.checkABR_();
22607 }, 250);
22608 }
22609 /**
22610 * Stop the timer that periodically calls checkABR_
22611 *
22612 * @private
22613 */
22614 ;
22615
22616 _proto.stopABRTimer_ = function stopABRTimer_() {
22617 // if we're scrubbing, we don't need to pause.
22618 // This getter will be added to Video.js in version 7.11.
22619 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
22620 return;
22621 }
22622
22623 window$1.clearInterval(this.abrTimer_);
22624 this.abrTimer_ = null;
22625 }
22626 /**
22627 * Get a list of playlists for the currently selected audio playlist
22628 *
22629 * @return {Array} the array of audio playlists
22630 */
22631 ;
22632
22633 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
22634 var master = this.master();
22635 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
22636 // assume that the audio tracks are contained in masters
22637 // playlist array, use that or an empty array.
22638
22639 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
22640 return defaultPlaylists;
22641 }
22642
22643 var AUDIO = master.mediaGroups.AUDIO;
22644 var groupKeys = Object.keys(AUDIO);
22645 var track; // get the current active track
22646
22647 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
22648 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
22649 } else {
22650 // default group is `main` or just the first group.
22651 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
22652
22653 for (var label in defaultGroup) {
22654 if (defaultGroup[label].default) {
22655 track = {
22656 label: label
22657 };
22658 break;
22659 }
22660 }
22661 } // no active track no playlists.
22662
22663
22664 if (!track) {
22665 return defaultPlaylists;
22666 }
22667
22668 var playlists = []; // get all of the playlists that are possible for the
22669 // active track.
22670
22671 for (var group in AUDIO) {
22672 if (AUDIO[group][track.label]) {
22673 var properties = AUDIO[group][track.label];
22674
22675 if (properties.playlists && properties.playlists.length) {
22676 playlists.push.apply(playlists, properties.playlists);
22677 } else if (properties.uri) {
22678 playlists.push(properties);
22679 } else if (master.playlists.length) {
22680 // if an audio group does not have a uri
22681 // see if we have main playlists that use it as a group.
22682 // if we do then add those to the playlists list.
22683 for (var i = 0; i < master.playlists.length; i++) {
22684 var playlist = master.playlists[i];
22685
22686 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
22687 playlists.push(playlist);
22688 }
22689 }
22690 }
22691 }
22692 }
22693
22694 if (!playlists.length) {
22695 return defaultPlaylists;
22696 }
22697
22698 return playlists;
22699 }
22700 /**
22701 * Register event handlers on the master playlist loader. A helper
22702 * function for construction time.
22703 *
22704 * @private
22705 */
22706 ;
22707
22708 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
22709 var _this3 = this;
22710
22711 this.masterPlaylistLoader_.on('loadedmetadata', function () {
22712 var media = _this3.masterPlaylistLoader_.media();
22713
22714 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22715 // timeout the request.
22716
22717 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22718 _this3.requestOptions_.timeout = 0;
22719 } else {
22720 _this3.requestOptions_.timeout = requestTimeout;
22721 } // if this isn't a live video and preload permits, start
22722 // downloading segments
22723
22724
22725 if (media.endList && _this3.tech_.preload() !== 'none') {
22726 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22727
22728 _this3.mainSegmentLoader_.load();
22729 }
22730
22731 setupMediaGroups({
22732 sourceType: _this3.sourceType_,
22733 segmentLoaders: {
22734 AUDIO: _this3.audioSegmentLoader_,
22735 SUBTITLES: _this3.subtitleSegmentLoader_,
22736 main: _this3.mainSegmentLoader_
22737 },
22738 tech: _this3.tech_,
22739 requestOptions: _this3.requestOptions_,
22740 masterPlaylistLoader: _this3.masterPlaylistLoader_,
22741 vhs: _this3.vhs_,
22742 master: _this3.master(),
22743 mediaTypes: _this3.mediaTypes_,
22744 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
22745 });
22746
22747 _this3.triggerPresenceUsage_(_this3.master(), media);
22748
22749 _this3.setupFirstPlay();
22750
22751 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
22752 _this3.trigger('selectedinitialmedia');
22753 } else {
22754 // We must wait for the active audio playlist loader to
22755 // finish setting up before triggering this event so the
22756 // representations API and EME setup is correct
22757 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
22758 _this3.trigger('selectedinitialmedia');
22759 });
22760 }
22761 });
22762 this.masterPlaylistLoader_.on('loadedplaylist', function () {
22763 if (_this3.loadOnPlay_) {
22764 _this3.tech_.off('play', _this3.loadOnPlay_);
22765 }
22766
22767 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
22768
22769 if (!updatedPlaylist) {
22770 // exclude any variants that are not supported by the browser before selecting
22771 // an initial media as the playlist selectors do not consider browser support
22772 _this3.excludeUnsupportedVariants_();
22773
22774 var selectedMedia;
22775
22776 if (_this3.enableLowInitialPlaylist) {
22777 selectedMedia = _this3.selectInitialPlaylist();
22778 }
22779
22780 if (!selectedMedia) {
22781 selectedMedia = _this3.selectPlaylist();
22782 }
22783
22784 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
22785 return;
22786 }
22787
22788 _this3.initialMedia_ = selectedMedia;
22789
22790 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
22791 // fire again since the playlist will be requested. In the case of vhs-json
22792 // (where the manifest object is provided as the source), when the media
22793 // playlist's `segments` list is already available, a media playlist won't be
22794 // requested, and loadedplaylist won't fire again, so the playlist handler must be
22795 // called on its own here.
22796
22797
22798 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
22799
22800 if (!haveJsonSource) {
22801 return;
22802 }
22803
22804 updatedPlaylist = _this3.initialMedia_;
22805 }
22806
22807 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
22808 });
22809 this.masterPlaylistLoader_.on('error', function () {
22810 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
22811 });
22812 this.masterPlaylistLoader_.on('mediachanging', function () {
22813 _this3.mainSegmentLoader_.abort();
22814
22815 _this3.mainSegmentLoader_.pause();
22816 });
22817 this.masterPlaylistLoader_.on('mediachange', function () {
22818 var media = _this3.masterPlaylistLoader_.media();
22819
22820 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22821 // timeout the request.
22822
22823 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22824 _this3.requestOptions_.timeout = 0;
22825 } else {
22826 _this3.requestOptions_.timeout = requestTimeout;
22827 } // TODO: Create a new event on the PlaylistLoader that signals
22828 // that the segments have changed in some way and use that to
22829 // update the SegmentLoader instead of doing it twice here and
22830 // on `loadedplaylist`
22831
22832
22833 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22834
22835 _this3.mainSegmentLoader_.load();
22836
22837 _this3.tech_.trigger({
22838 type: 'mediachange',
22839 bubbles: true
22840 });
22841 });
22842 this.masterPlaylistLoader_.on('playlistunchanged', function () {
22843 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
22844 // excluded for not-changing. We likely just have a really slowly updating
22845 // playlist.
22846
22847
22848 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
22849 return;
22850 }
22851
22852 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
22853
22854 if (playlistOutdated) {
22855 // Playlist has stopped updating and we're stuck at its end. Try to
22856 // blacklist it and switch to another playlist in the hope that that
22857 // one is updating (and give the player a chance to re-adjust to the
22858 // safe live point).
22859 _this3.blacklistCurrentPlaylist({
22860 message: 'Playlist no longer updating.',
22861 reason: 'playlist-unchanged'
22862 }); // useful for monitoring QoS
22863
22864
22865 _this3.tech_.trigger('playliststuck');
22866 }
22867 });
22868 this.masterPlaylistLoader_.on('renditiondisabled', function () {
22869 _this3.tech_.trigger({
22870 type: 'usage',
22871 name: 'vhs-rendition-disabled'
22872 });
22873
22874 _this3.tech_.trigger({
22875 type: 'usage',
22876 name: 'hls-rendition-disabled'
22877 });
22878 });
22879 this.masterPlaylistLoader_.on('renditionenabled', function () {
22880 _this3.tech_.trigger({
22881 type: 'usage',
22882 name: 'vhs-rendition-enabled'
22883 });
22884
22885 _this3.tech_.trigger({
22886 type: 'usage',
22887 name: 'hls-rendition-enabled'
22888 });
22889 });
22890 }
22891 /**
22892 * Given an updated media playlist (whether it was loaded for the first time, or
22893 * refreshed for live playlists), update any relevant properties and state to reflect
22894 * changes in the media that should be accounted for (e.g., cues and duration).
22895 *
22896 * @param {Object} updatedPlaylist the updated media playlist object
22897 *
22898 * @private
22899 */
22900 ;
22901
22902 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
22903 if (this.useCueTags_) {
22904 this.updateAdCues_(updatedPlaylist);
22905 } // TODO: Create a new event on the PlaylistLoader that signals
22906 // that the segments have changed in some way and use that to
22907 // update the SegmentLoader instead of doing it twice here and
22908 // on `mediachange`
22909
22910
22911 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
22912 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
22913 // as it is possible that it was temporarily stopped while waiting for
22914 // a playlist (e.g., in case the playlist errored and we re-requested it).
22915
22916 if (!this.tech_.paused()) {
22917 this.mainSegmentLoader_.load();
22918
22919 if (this.audioSegmentLoader_) {
22920 this.audioSegmentLoader_.load();
22921 }
22922 }
22923 }
22924 /**
22925 * A helper function for triggerring presence usage events once per source
22926 *
22927 * @private
22928 */
22929 ;
22930
22931 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
22932 var mediaGroups = master.mediaGroups || {};
22933 var defaultDemuxed = true;
22934 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
22935
22936 for (var mediaGroup in mediaGroups.AUDIO) {
22937 for (var label in mediaGroups.AUDIO[mediaGroup]) {
22938 var properties = mediaGroups.AUDIO[mediaGroup][label];
22939
22940 if (!properties.uri) {
22941 defaultDemuxed = false;
22942 }
22943 }
22944 }
22945
22946 if (defaultDemuxed) {
22947 this.tech_.trigger({
22948 type: 'usage',
22949 name: 'vhs-demuxed'
22950 });
22951 this.tech_.trigger({
22952 type: 'usage',
22953 name: 'hls-demuxed'
22954 });
22955 }
22956
22957 if (Object.keys(mediaGroups.SUBTITLES).length) {
22958 this.tech_.trigger({
22959 type: 'usage',
22960 name: 'vhs-webvtt'
22961 });
22962 this.tech_.trigger({
22963 type: 'usage',
22964 name: 'hls-webvtt'
22965 });
22966 }
22967
22968 if (Vhs$1.Playlist.isAes(media)) {
22969 this.tech_.trigger({
22970 type: 'usage',
22971 name: 'vhs-aes'
22972 });
22973 this.tech_.trigger({
22974 type: 'usage',
22975 name: 'hls-aes'
22976 });
22977 }
22978
22979 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
22980 this.tech_.trigger({
22981 type: 'usage',
22982 name: 'vhs-alternate-audio'
22983 });
22984 this.tech_.trigger({
22985 type: 'usage',
22986 name: 'hls-alternate-audio'
22987 });
22988 }
22989
22990 if (this.useCueTags_) {
22991 this.tech_.trigger({
22992 type: 'usage',
22993 name: 'vhs-playlist-cue-tags'
22994 });
22995 this.tech_.trigger({
22996 type: 'usage',
22997 name: 'hls-playlist-cue-tags'
22998 });
22999 }
23000 };
23001
23002 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
23003 var currentPlaylist = this.masterPlaylistLoader_.media() || this.masterPlaylistLoader_.pendingMedia_;
23004 var currentTime = this.tech_.currentTime();
23005 var bufferLowWaterLine = this.bufferLowWaterLine();
23006 var bufferHighWaterLine = this.bufferHighWaterLine();
23007 var buffered = this.tech_.buffered();
23008 return shouldSwitchToMedia({
23009 buffered: buffered,
23010 currentTime: currentTime,
23011 currentPlaylist: currentPlaylist,
23012 nextPlaylist: nextPlaylist,
23013 bufferLowWaterLine: bufferLowWaterLine,
23014 bufferHighWaterLine: bufferHighWaterLine,
23015 duration: this.duration(),
23016 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
23017 log: this.logger_
23018 });
23019 }
23020 /**
23021 * Register event handlers on the segment loaders. A helper function
23022 * for construction time.
23023 *
23024 * @private
23025 */
23026 ;
23027
23028 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
23029 var _this4 = this;
23030
23031 if (!this.experimentalBufferBasedABR) {
23032 this.mainSegmentLoader_.on('bandwidthupdate', function () {
23033 var nextPlaylist = _this4.selectPlaylist();
23034
23035 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
23036 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
23037 }
23038
23039 _this4.tech_.trigger('bandwidthupdate');
23040 });
23041 this.mainSegmentLoader_.on('progress', function () {
23042 _this4.trigger('progress');
23043 });
23044 }
23045
23046 this.mainSegmentLoader_.on('error', function () {
23047 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
23048 });
23049 this.mainSegmentLoader_.on('appenderror', function () {
23050 _this4.error = _this4.mainSegmentLoader_.error_;
23051
23052 _this4.trigger('error');
23053 });
23054 this.mainSegmentLoader_.on('syncinfoupdate', function () {
23055 _this4.onSyncInfoUpdate_();
23056 });
23057 this.mainSegmentLoader_.on('timestampoffset', function () {
23058 _this4.tech_.trigger({
23059 type: 'usage',
23060 name: 'vhs-timestamp-offset'
23061 });
23062
23063 _this4.tech_.trigger({
23064 type: 'usage',
23065 name: 'hls-timestamp-offset'
23066 });
23067 });
23068 this.audioSegmentLoader_.on('syncinfoupdate', function () {
23069 _this4.onSyncInfoUpdate_();
23070 });
23071 this.audioSegmentLoader_.on('appenderror', function () {
23072 _this4.error = _this4.audioSegmentLoader_.error_;
23073
23074 _this4.trigger('error');
23075 });
23076 this.mainSegmentLoader_.on('ended', function () {
23077 _this4.logger_('main segment loader ended');
23078
23079 _this4.onEndOfStream();
23080 });
23081 this.mainSegmentLoader_.on('earlyabort', function (event) {
23082 // never try to early abort with the new ABR algorithm
23083 if (_this4.experimentalBufferBasedABR) {
23084 return;
23085 }
23086
23087 _this4.delegateLoaders_('all', ['abort']);
23088
23089 _this4.blacklistCurrentPlaylist({
23090 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
23091 }, ABORT_EARLY_BLACKLIST_SECONDS);
23092 });
23093
23094 var updateCodecs = function updateCodecs() {
23095 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
23096 return _this4.tryToCreateSourceBuffers_();
23097 }
23098
23099 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
23100
23101
23102 if (!codecs) {
23103 return;
23104 }
23105
23106 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
23107 };
23108
23109 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
23110 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
23111 this.mainSegmentLoader_.on('fmp4', function () {
23112 if (!_this4.triggeredFmp4Usage) {
23113 _this4.tech_.trigger({
23114 type: 'usage',
23115 name: 'vhs-fmp4'
23116 });
23117
23118 _this4.tech_.trigger({
23119 type: 'usage',
23120 name: 'hls-fmp4'
23121 });
23122
23123 _this4.triggeredFmp4Usage = true;
23124 }
23125 });
23126 this.audioSegmentLoader_.on('fmp4', function () {
23127 if (!_this4.triggeredFmp4Usage) {
23128 _this4.tech_.trigger({
23129 type: 'usage',
23130 name: 'vhs-fmp4'
23131 });
23132
23133 _this4.tech_.trigger({
23134 type: 'usage',
23135 name: 'hls-fmp4'
23136 });
23137
23138 _this4.triggeredFmp4Usage = true;
23139 }
23140 });
23141 this.audioSegmentLoader_.on('ended', function () {
23142 _this4.logger_('audioSegmentLoader ended');
23143
23144 _this4.onEndOfStream();
23145 });
23146 };
23147
23148 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
23149 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
23150 }
23151 /**
23152 * Call load on our SegmentLoaders
23153 */
23154 ;
23155
23156 _proto.load = function load() {
23157 this.mainSegmentLoader_.load();
23158
23159 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23160 this.audioSegmentLoader_.load();
23161 }
23162
23163 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23164 this.subtitleSegmentLoader_.load();
23165 }
23166 }
23167 /**
23168 * Re-tune playback quality level for the current player
23169 * conditions without performing destructive actions, like
23170 * removing already buffered content
23171 *
23172 * @private
23173 * @deprecated
23174 */
23175 ;
23176
23177 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
23178 if (media === void 0) {
23179 media = this.selectPlaylist();
23180 }
23181
23182 this.fastQualityChange_(media);
23183 }
23184 /**
23185 * Re-tune playback quality level for the current player
23186 * conditions. This method will perform destructive actions like removing
23187 * already buffered content in order to readjust the currently active
23188 * playlist quickly. This is good for manual quality changes
23189 *
23190 * @private
23191 */
23192 ;
23193
23194 _proto.fastQualityChange_ = function fastQualityChange_(media) {
23195 var _this5 = this;
23196
23197 if (media === void 0) {
23198 media = this.selectPlaylist();
23199 }
23200
23201 if (media === this.masterPlaylistLoader_.media()) {
23202 this.logger_('skipping fastQualityChange because new media is same as old');
23203 return;
23204 }
23205
23206 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
23207 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
23208 // ahead is roughly the minimum that will accomplish this across a variety of content
23209 // in IE and Edge, but seeking in place is sufficient on all other browsers)
23210 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
23211 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
23212
23213 this.mainSegmentLoader_.resetEverything(function () {
23214 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
23215 // from the previously enabled rendition to load before the new playlist has finished loading
23216 if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
23217 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
23218 } else {
23219 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
23220 }
23221 }); // don't need to reset audio as it is reset when media changes
23222 }
23223 /**
23224 * Begin playback.
23225 */
23226 ;
23227
23228 _proto.play = function play() {
23229 if (this.setupFirstPlay()) {
23230 return;
23231 }
23232
23233 if (this.tech_.ended()) {
23234 this.tech_.setCurrentTime(0);
23235 }
23236
23237 if (this.hasPlayed_) {
23238 this.load();
23239 }
23240
23241 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
23242 // seek forward to the live point
23243
23244 if (this.tech_.duration() === Infinity) {
23245 if (this.tech_.currentTime() < seekable.start(0)) {
23246 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
23247 }
23248 }
23249 }
23250 /**
23251 * Seek to the latest media position if this is a live video and the
23252 * player and video are loaded and initialized.
23253 */
23254 ;
23255
23256 _proto.setupFirstPlay = function setupFirstPlay() {
23257 var _this6 = this;
23258
23259 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
23260 // If 1) there is no active media
23261 // 2) the player is paused
23262 // 3) the first play has already been setup
23263 // then exit early
23264
23265 if (!media || this.tech_.paused() || this.hasPlayed_) {
23266 return false;
23267 } // when the video is a live stream
23268
23269
23270 if (!media.endList) {
23271 var seekable = this.seekable();
23272
23273 if (!seekable.length) {
23274 // without a seekable range, the player cannot seek to begin buffering at the live
23275 // point
23276 return false;
23277 }
23278
23279 if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
23280 // IE11 throws an InvalidStateError if you try to set currentTime while the
23281 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
23282 this.tech_.one('loadedmetadata', function () {
23283 _this6.trigger('firstplay');
23284
23285 _this6.tech_.setCurrentTime(seekable.end(0));
23286
23287 _this6.hasPlayed_ = true;
23288 });
23289 return false;
23290 } // trigger firstplay to inform the source handler to ignore the next seek event
23291
23292
23293 this.trigger('firstplay'); // seek to the live point
23294
23295 this.tech_.setCurrentTime(seekable.end(0));
23296 }
23297
23298 this.hasPlayed_ = true; // we can begin loading now that everything is ready
23299
23300 this.load();
23301 return true;
23302 }
23303 /**
23304 * handle the sourceopen event on the MediaSource
23305 *
23306 * @private
23307 */
23308 ;
23309
23310 _proto.handleSourceOpen_ = function handleSourceOpen_() {
23311 // Only attempt to create the source buffer if none already exist.
23312 // handleSourceOpen is also called when we are "re-opening" a source buffer
23313 // after `endOfStream` has been called (in response to a seek for instance)
23314 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
23315 // code in video.js but is required because play() must be invoked
23316 // *after* the media source has opened.
23317
23318 if (this.tech_.autoplay()) {
23319 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
23320 // on browsers which return a promise
23321
23322 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
23323 playPromise.then(null, function (e) {});
23324 }
23325 }
23326
23327 this.trigger('sourceopen');
23328 }
23329 /**
23330 * handle the sourceended event on the MediaSource
23331 *
23332 * @private
23333 */
23334 ;
23335
23336 _proto.handleSourceEnded_ = function handleSourceEnded_() {
23337 if (!this.inbandTextTracks_.metadataTrack_) {
23338 return;
23339 }
23340
23341 var cues = this.inbandTextTracks_.metadataTrack_.cues;
23342
23343 if (!cues || !cues.length) {
23344 return;
23345 }
23346
23347 var duration = this.duration();
23348 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
23349 }
23350 /**
23351 * handle the durationchange event on the MediaSource
23352 *
23353 * @private
23354 */
23355 ;
23356
23357 _proto.handleDurationChange_ = function handleDurationChange_() {
23358 this.tech_.trigger('durationchange');
23359 }
23360 /**
23361 * Calls endOfStream on the media source when all active stream types have called
23362 * endOfStream
23363 *
23364 * @param {string} streamType
23365 * Stream type of the segment loader that called endOfStream
23366 * @private
23367 */
23368 ;
23369
23370 _proto.onEndOfStream = function onEndOfStream() {
23371 var isEndOfStream = this.mainSegmentLoader_.ended_;
23372
23373 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23374 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
23375
23376 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
23377 // if we do not know if the main segment loader contains video yet or if we
23378 // definitively know the main segment loader contains video, then we need to wait
23379 // for both main and audio segment loaders to call endOfStream
23380 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
23381 } else {
23382 // otherwise just rely on the audio loader
23383 isEndOfStream = this.audioSegmentLoader_.ended_;
23384 }
23385 }
23386
23387 if (!isEndOfStream) {
23388 return;
23389 }
23390
23391 this.stopABRTimer_();
23392 this.sourceUpdater_.endOfStream();
23393 }
23394 /**
23395 * Check if a playlist has stopped being updated
23396 *
23397 * @param {Object} playlist the media playlist object
23398 * @return {boolean} whether the playlist has stopped being updated or not
23399 */
23400 ;
23401
23402 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
23403 var seekable = this.seekable();
23404
23405 if (!seekable.length) {
23406 // playlist doesn't have enough information to determine whether we are stuck
23407 return false;
23408 }
23409
23410 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
23411
23412 if (expired === null) {
23413 return false;
23414 } // does not use the safe live end to calculate playlist end, since we
23415 // don't want to say we are stuck while there is still content
23416
23417
23418 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
23419 var currentTime = this.tech_.currentTime();
23420 var buffered = this.tech_.buffered();
23421
23422 if (!buffered.length) {
23423 // return true if the playhead reached the absolute end of the playlist
23424 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
23425 }
23426
23427 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
23428 // end of playlist
23429
23430 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
23431 }
23432 /**
23433 * Blacklists a playlist when an error occurs for a set amount of time
23434 * making it unavailable for selection by the rendition selection algorithm
23435 * and then forces a new playlist (rendition) selection.
23436 *
23437 * @param {Object=} error an optional error that may include the playlist
23438 * to blacklist
23439 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
23440 * playlist
23441 */
23442 ;
23443
23444 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
23445 if (error === void 0) {
23446 error = {};
23447 }
23448
23449 // If the `error` was generated by the playlist loader, it will contain
23450 // the playlist we were trying to load (but failed) and that should be
23451 // blacklisted instead of the currently selected playlist which is likely
23452 // out-of-date in this scenario
23453 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
23454 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
23455 // trying to load the master OR while we were disposing of the tech
23456
23457 if (!currentPlaylist) {
23458 this.error = error;
23459
23460 if (this.mediaSource.readyState !== 'open') {
23461 this.trigger('error');
23462 } else {
23463 this.sourceUpdater_.endOfStream('network');
23464 }
23465
23466 return;
23467 }
23468
23469 currentPlaylist.playlistErrors_++;
23470 var playlists = this.masterPlaylistLoader_.master.playlists;
23471 var enabledPlaylists = playlists.filter(isEnabled);
23472 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
23473 // forever
23474
23475 if (playlists.length === 1 && blacklistDuration !== Infinity) {
23476 videojs.log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
23477 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
23478
23479 return this.masterPlaylistLoader_.load(isFinalRendition);
23480 }
23481
23482 if (isFinalRendition) {
23483 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
23484 // it, instead of erring the player or retrying this playlist, clear out the current
23485 // blacklist. This allows other playlists to be attempted in case any have been
23486 // fixed.
23487 var reincluded = false;
23488 playlists.forEach(function (playlist) {
23489 // skip current playlist which is about to be blacklisted
23490 if (playlist === currentPlaylist) {
23491 return;
23492 }
23493
23494 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
23495
23496 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
23497 reincluded = true;
23498 delete playlist.excludeUntil;
23499 }
23500 });
23501
23502 if (reincluded) {
23503 videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
23504 // playlist. This is needed for users relying on the retryplaylist event to catch a
23505 // case where the player might be stuck and looping through "dead" playlists.
23506
23507 this.tech_.trigger('retryplaylist');
23508 }
23509 } // Blacklist this playlist
23510
23511
23512 var excludeUntil;
23513
23514 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
23515 excludeUntil = Infinity;
23516 } else {
23517 excludeUntil = Date.now() + blacklistDuration * 1000;
23518 }
23519
23520 currentPlaylist.excludeUntil = excludeUntil;
23521
23522 if (error.reason) {
23523 currentPlaylist.lastExcludeReason_ = error.reason;
23524 }
23525
23526 this.tech_.trigger('blacklistplaylist');
23527 this.tech_.trigger({
23528 type: 'usage',
23529 name: 'vhs-rendition-blacklisted'
23530 });
23531 this.tech_.trigger({
23532 type: 'usage',
23533 name: 'hls-rendition-blacklisted'
23534 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
23535 // Would be something like media().id !=== currentPlaylist.id and we would need something
23536 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
23537 // from loading a new playlist on any blacklist.
23538 // Select a new playlist
23539
23540 var nextPlaylist = this.selectPlaylist();
23541
23542 if (!nextPlaylist) {
23543 this.error = 'Playback cannot continue. No available working or supported playlists.';
23544 this.trigger('error');
23545 return;
23546 }
23547
23548 var logFn = error.internal ? this.logger_ : videojs.log.warn;
23549 var errorMessage = error.message ? ' ' + error.message : '';
23550 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
23551
23552 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
23553 this.delegateLoaders_('audio', ['abort', 'pause']);
23554 } // if subtitle group changed reset subtitle loaders
23555
23556
23557 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
23558 this.delegateLoaders_('subtitle', ['abort', 'pause']);
23559 }
23560
23561 this.delegateLoaders_('main', ['abort', 'pause']);
23562 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
23563 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
23564
23565 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
23566 }
23567 /**
23568 * Pause all segment/playlist loaders
23569 */
23570 ;
23571
23572 _proto.pauseLoading = function pauseLoading() {
23573 this.delegateLoaders_('all', ['abort', 'pause']);
23574 this.stopABRTimer_();
23575 }
23576 /**
23577 * Call a set of functions in order on playlist loaders, segment loaders,
23578 * or both types of loaders.
23579 *
23580 * @param {string} filter
23581 * Filter loaders that should call fnNames using a string. Can be:
23582 * * all - run on all loaders
23583 * * audio - run on all audio loaders
23584 * * subtitle - run on all subtitle loaders
23585 * * main - run on the main/master loaders
23586 *
23587 * @param {Array|string} fnNames
23588 * A string or array of function names to call.
23589 */
23590 ;
23591
23592 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
23593 var _this7 = this;
23594
23595 var loaders = [];
23596 var dontFilterPlaylist = filter === 'all';
23597
23598 if (dontFilterPlaylist || filter === 'main') {
23599 loaders.push(this.masterPlaylistLoader_);
23600 }
23601
23602 var mediaTypes = [];
23603
23604 if (dontFilterPlaylist || filter === 'audio') {
23605 mediaTypes.push('AUDIO');
23606 }
23607
23608 if (dontFilterPlaylist || filter === 'subtitle') {
23609 mediaTypes.push('CLOSED-CAPTIONS');
23610 mediaTypes.push('SUBTITLES');
23611 }
23612
23613 mediaTypes.forEach(function (mediaType) {
23614 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
23615
23616 if (loader) {
23617 loaders.push(loader);
23618 }
23619 });
23620 ['main', 'audio', 'subtitle'].forEach(function (name) {
23621 var loader = _this7[name + "SegmentLoader_"];
23622
23623 if (loader && (filter === name || filter === 'all')) {
23624 loaders.push(loader);
23625 }
23626 });
23627 loaders.forEach(function (loader) {
23628 return fnNames.forEach(function (fnName) {
23629 if (typeof loader[fnName] === 'function') {
23630 loader[fnName]();
23631 }
23632 });
23633 });
23634 }
23635 /**
23636 * set the current time on all segment loaders
23637 *
23638 * @param {TimeRange} currentTime the current time to set
23639 * @return {TimeRange} the current time
23640 */
23641 ;
23642
23643 _proto.setCurrentTime = function setCurrentTime(currentTime) {
23644 var buffered = findRange(this.tech_.buffered(), currentTime);
23645
23646 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
23647 // return immediately if the metadata is not ready yet
23648 return 0;
23649 } // it's clearly an edge-case but don't thrown an error if asked to
23650 // seek within an empty playlist
23651
23652
23653 if (!this.masterPlaylistLoader_.media().segments) {
23654 return 0;
23655 } // if the seek location is already buffered, continue buffering as usual
23656
23657
23658 if (buffered && buffered.length) {
23659 return currentTime;
23660 } // cancel outstanding requests so we begin buffering at the new
23661 // location
23662
23663
23664 this.mainSegmentLoader_.resetEverything();
23665 this.mainSegmentLoader_.abort();
23666
23667 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23668 this.audioSegmentLoader_.resetEverything();
23669 this.audioSegmentLoader_.abort();
23670 }
23671
23672 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23673 this.subtitleSegmentLoader_.resetEverything();
23674 this.subtitleSegmentLoader_.abort();
23675 } // start segment loader loading in case they are paused
23676
23677
23678 this.load();
23679 }
23680 /**
23681 * get the current duration
23682 *
23683 * @return {TimeRange} the duration
23684 */
23685 ;
23686
23687 _proto.duration = function duration() {
23688 if (!this.masterPlaylistLoader_) {
23689 return 0;
23690 }
23691
23692 var media = this.masterPlaylistLoader_.media();
23693
23694 if (!media) {
23695 // no playlists loaded yet, so can't determine a duration
23696 return 0;
23697 } // Don't rely on the media source for duration in the case of a live playlist since
23698 // setting the native MediaSource's duration to infinity ends up with consequences to
23699 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23700 //
23701 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23702 // however, few browsers have support for setLiveSeekableRange()
23703 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23704 //
23705 // Until a time when the duration of the media source can be set to infinity, and a
23706 // seekable range specified across browsers, just return Infinity.
23707
23708
23709 if (!media.endList) {
23710 return Infinity;
23711 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
23712 // available). If it's not available, fall back to a playlist-calculated estimate.
23713
23714
23715 if (this.mediaSource) {
23716 return this.mediaSource.duration;
23717 }
23718
23719 return Vhs$1.Playlist.duration(media);
23720 }
23721 /**
23722 * check the seekable range
23723 *
23724 * @return {TimeRange} the seekable range
23725 */
23726 ;
23727
23728 _proto.seekable = function seekable() {
23729 return this.seekable_;
23730 };
23731
23732 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
23733 var audioSeekable; // TODO check for creation of both source buffers before updating seekable
23734 //
23735 // A fix was made to this function where a check for
23736 // this.sourceUpdater_.hasCreatedSourceBuffers
23737 // was added to ensure that both source buffers were created before seekable was
23738 // updated. However, it originally had a bug where it was checking for a true and
23739 // returning early instead of checking for false. Setting it to check for false to
23740 // return early though created other issues. A call to play() would check for seekable
23741 // end without verifying that a seekable range was present. In addition, even checking
23742 // for that didn't solve some issues, as handleFirstPlay is sometimes worked around
23743 // due to a media update calling load on the segment loaders, skipping a seek to live,
23744 // thereby starting live streams at the beginning of the stream rather than at the end.
23745 //
23746 // This conditional should be fixed to wait for the creation of two source buffers at
23747 // the same time as the other sections of code are fixed to properly seek to live and
23748 // not throw an error due to checking for a seekable end when no seekable range exists.
23749 //
23750 // For now, fall back to the older behavior, with the understanding that the seekable
23751 // range may not be completely correct, leading to a suboptimal initial live point.
23752
23753 if (!this.masterPlaylistLoader_) {
23754 return;
23755 }
23756
23757 var media = this.masterPlaylistLoader_.media();
23758
23759 if (!media) {
23760 return;
23761 }
23762
23763 var expired = this.syncController_.getExpiredTime(media, this.duration());
23764
23765 if (expired === null) {
23766 // not enough information to update seekable
23767 return;
23768 }
23769
23770 var master = this.masterPlaylistLoader_.master;
23771 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23772
23773 if (mainSeekable.length === 0) {
23774 return;
23775 }
23776
23777 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23778 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
23779 expired = this.syncController_.getExpiredTime(media, this.duration());
23780
23781 if (expired === null) {
23782 return;
23783 }
23784
23785 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23786
23787 if (audioSeekable.length === 0) {
23788 return;
23789 }
23790 }
23791
23792 var oldEnd;
23793 var oldStart;
23794
23795 if (this.seekable_ && this.seekable_.length) {
23796 oldEnd = this.seekable_.end(0);
23797 oldStart = this.seekable_.start(0);
23798 }
23799
23800 if (!audioSeekable) {
23801 // seekable has been calculated based on buffering video data so it
23802 // can be returned directly
23803 this.seekable_ = mainSeekable;
23804 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
23805 // seekables are pretty far off, rely on main
23806 this.seekable_ = mainSeekable;
23807 } else {
23808 this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
23809 } // seekable is the same as last time
23810
23811
23812 if (this.seekable_ && this.seekable_.length) {
23813 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
23814 return;
23815 }
23816 }
23817
23818 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
23819 this.tech_.trigger('seekablechanged');
23820 }
23821 /**
23822 * Update the player duration
23823 */
23824 ;
23825
23826 _proto.updateDuration = function updateDuration(isLive) {
23827 if (this.updateDuration_) {
23828 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23829 this.updateDuration_ = null;
23830 }
23831
23832 if (this.mediaSource.readyState !== 'open') {
23833 this.updateDuration_ = this.updateDuration.bind(this, isLive);
23834 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
23835 return;
23836 }
23837
23838 if (isLive) {
23839 var seekable = this.seekable();
23840
23841 if (!seekable.length) {
23842 return;
23843 } // Even in the case of a live playlist, the native MediaSource's duration should not
23844 // be set to Infinity (even though this would be expected for a live playlist), since
23845 // setting the native MediaSource's duration to infinity ends up with consequences to
23846 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23847 //
23848 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23849 // however, few browsers have support for setLiveSeekableRange()
23850 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23851 //
23852 // Until a time when the duration of the media source can be set to infinity, and a
23853 // seekable range specified across browsers, the duration should be greater than or
23854 // equal to the last possible seekable value.
23855 // MediaSource duration starts as NaN
23856 // It is possible (and probable) that this case will never be reached for many
23857 // sources, since the MediaSource reports duration as the highest value without
23858 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
23859 // we buffered times 0 to 100 with real times of 100 to 200, even though current
23860 // time will be between 0 and 100, the native media source may report the duration
23861 // as 200. However, since we report duration separate from the media source (as
23862 // Infinity), and as long as the native media source duration value is greater than
23863 // our reported seekable range, seeks will work as expected. The large number as
23864 // duration for live is actually a strategy used by some players to work around the
23865 // issue of live seekable ranges cited above.
23866
23867
23868 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
23869 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
23870 }
23871
23872 return;
23873 }
23874
23875 var buffered = this.tech_.buffered();
23876 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
23877
23878 if (buffered.length > 0) {
23879 duration = Math.max(duration, buffered.end(buffered.length - 1));
23880 }
23881
23882 if (this.mediaSource.duration !== duration) {
23883 this.sourceUpdater_.setDuration(duration);
23884 }
23885 }
23886 /**
23887 * dispose of the MasterPlaylistController and everything
23888 * that it controls
23889 */
23890 ;
23891
23892 _proto.dispose = function dispose() {
23893 var _this8 = this;
23894
23895 this.trigger('dispose');
23896 this.decrypter_.terminate();
23897 this.masterPlaylistLoader_.dispose();
23898 this.mainSegmentLoader_.dispose();
23899
23900 if (this.loadOnPlay_) {
23901 this.tech_.off('play', this.loadOnPlay_);
23902 }
23903
23904 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
23905 var groups = _this8.mediaTypes_[type].groups;
23906
23907 for (var id in groups) {
23908 groups[id].forEach(function (group) {
23909 if (group.playlistLoader) {
23910 group.playlistLoader.dispose();
23911 }
23912 });
23913 }
23914 });
23915 this.audioSegmentLoader_.dispose();
23916 this.subtitleSegmentLoader_.dispose();
23917 this.sourceUpdater_.dispose();
23918 this.timelineChangeController_.dispose();
23919 this.stopABRTimer_();
23920
23921 if (this.updateDuration_) {
23922 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23923 }
23924
23925 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
23926
23927 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
23928 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
23929 this.off();
23930 }
23931 /**
23932 * return the master playlist object if we have one
23933 *
23934 * @return {Object} the master playlist object that we parsed
23935 */
23936 ;
23937
23938 _proto.master = function master() {
23939 return this.masterPlaylistLoader_.master;
23940 }
23941 /**
23942 * return the currently selected playlist
23943 *
23944 * @return {Object} the currently selected playlist object that we parsed
23945 */
23946 ;
23947
23948 _proto.media = function media() {
23949 // playlist loader will not return media if it has not been fully loaded
23950 return this.masterPlaylistLoader_.media() || this.initialMedia_;
23951 };
23952
23953 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
23954 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
23955 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
23956 // otherwise check on the segment loader.
23957
23958 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
23959
23960 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
23961 return false;
23962 }
23963
23964 return true;
23965 };
23966
23967 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
23968 var _this9 = this;
23969
23970 var media = {
23971 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
23972 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
23973 }; // set "main" media equal to video
23974
23975 media.video = media.main;
23976 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
23977 var codecs = {};
23978 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
23979
23980 if (media.main.hasVideo) {
23981 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
23982 }
23983
23984 if (media.main.isMuxed) {
23985 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
23986 }
23987
23988 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
23989 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
23990
23991 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
23992 } // no codecs, no playback.
23993
23994
23995 if (!codecs.audio && !codecs.video) {
23996 this.blacklistCurrentPlaylist({
23997 playlist: this.media(),
23998 message: 'Could not determine codecs for playlist.',
23999 blacklistDuration: Infinity
24000 });
24001 return;
24002 } // fmp4 relies on browser support, while ts relies on muxer support
24003
24004
24005 var supportFunction = function supportFunction(isFmp4, codec) {
24006 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
24007 };
24008
24009 var unsupportedCodecs = {};
24010 var unsupportedAudio;
24011 ['video', 'audio'].forEach(function (type) {
24012 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
24013 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
24014 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
24015 unsupportedCodecs[supporter].push(codecs[type]);
24016
24017 if (type === 'audio') {
24018 unsupportedAudio = supporter;
24019 }
24020 }
24021 });
24022
24023 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
24024 var audioGroup = this.media().attributes.AUDIO;
24025 this.master().playlists.forEach(function (variant) {
24026 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
24027
24028 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
24029 variant.excludeUntil = Infinity;
24030 }
24031 });
24032 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
24033 } // if we have any unsupported codecs blacklist this playlist.
24034
24035
24036 if (Object.keys(unsupportedCodecs).length) {
24037 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
24038 if (acc) {
24039 acc += ', ';
24040 }
24041
24042 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
24043 return acc;
24044 }, '') + '.';
24045 this.blacklistCurrentPlaylist({
24046 playlist: this.media(),
24047 internal: true,
24048 message: message,
24049 blacklistDuration: Infinity
24050 });
24051 return;
24052 } // check if codec switching is happening
24053
24054
24055 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
24056 var switchMessages = [];
24057 ['video', 'audio'].forEach(function (type) {
24058 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
24059 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
24060
24061 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
24062 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
24063 }
24064 });
24065
24066 if (switchMessages.length) {
24067 this.blacklistCurrentPlaylist({
24068 playlist: this.media(),
24069 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
24070 blacklistDuration: Infinity,
24071 internal: true
24072 });
24073 return;
24074 }
24075 } // TODO: when using the muxer shouldn't we just return
24076 // the codecs that the muxer outputs?
24077
24078
24079 return codecs;
24080 }
24081 /**
24082 * Create source buffers and exlude any incompatible renditions.
24083 *
24084 * @private
24085 */
24086 ;
24087
24088 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
24089 // media source is not ready yet or sourceBuffers are already
24090 // created.
24091 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
24092 return;
24093 }
24094
24095 if (!this.areMediaTypesKnown_()) {
24096 return;
24097 }
24098
24099 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
24100
24101 if (!codecs) {
24102 return;
24103 }
24104
24105 this.sourceUpdater_.createSourceBuffers(codecs);
24106 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
24107 this.excludeIncompatibleVariants_(codecString);
24108 }
24109 /**
24110 * Excludes playlists with codecs that are unsupported by the muxer and browser.
24111 */
24112 ;
24113
24114 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
24115 var _this10 = this;
24116
24117 var playlists = this.master().playlists;
24118 var ids = []; // TODO: why don't we have a property to loop through all
24119 // playlist? Why did we ever mix indexes and keys?
24120
24121 Object.keys(playlists).forEach(function (key) {
24122 var variant = playlists[key]; // check if we already processed this playlist.
24123
24124 if (ids.indexOf(variant.id) !== -1) {
24125 return;
24126 }
24127
24128 ids.push(variant.id);
24129 var codecs = codecsForPlaylist(_this10.master, variant);
24130 var unsupported = [];
24131
24132 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
24133 unsupported.push("audio codec " + codecs.audio);
24134 }
24135
24136 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
24137 unsupported.push("video codec " + codecs.video);
24138 }
24139
24140 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
24141 unsupported.push("text codec " + codecs.text);
24142 }
24143
24144 if (unsupported.length) {
24145 variant.excludeUntil = Infinity;
24146
24147 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
24148 }
24149 });
24150 }
24151 /**
24152 * Blacklist playlists that are known to be codec or
24153 * stream-incompatible with the SourceBuffer configuration. For
24154 * instance, Media Source Extensions would cause the video element to
24155 * stall waiting for video data if you switched from a variant with
24156 * video and audio to an audio-only one.
24157 *
24158 * @param {Object} media a media playlist compatible with the current
24159 * set of SourceBuffers. Variants in the current master playlist that
24160 * do not appear to have compatible codec or stream configurations
24161 * will be excluded from the default playlist selection algorithm
24162 * indefinitely.
24163 * @private
24164 */
24165 ;
24166
24167 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
24168 var _this11 = this;
24169
24170 var ids = [];
24171 var playlists = this.master().playlists;
24172 var codecs = unwrapCodecList(parseCodecs(codecString));
24173 var codecCount_ = codecCount(codecs);
24174 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
24175 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
24176 Object.keys(playlists).forEach(function (key) {
24177 var variant = playlists[key]; // check if we already processed this playlist.
24178 // or it if it is already excluded forever.
24179
24180 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
24181 return;
24182 }
24183
24184 ids.push(variant.id);
24185 var blacklistReasons = []; // get codecs from the playlist for this variant
24186
24187 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
24188 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
24189 // variant is incompatible. Wait for mux.js to probe
24190
24191 if (!variantCodecs.audio && !variantCodecs.video) {
24192 return;
24193 } // TODO: we can support this by removing the
24194 // old media source and creating a new one, but it will take some work.
24195 // The number of streams cannot change
24196
24197
24198 if (variantCodecCount !== codecCount_) {
24199 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
24200 } // only exclude playlists by codec change, if codecs cannot switch
24201 // during playback.
24202
24203
24204 if (!_this11.sourceUpdater_.canChangeType()) {
24205 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
24206 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
24207
24208 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
24209 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
24210 } // the audio codec cannot change
24211
24212
24213 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
24214 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
24215 }
24216 }
24217
24218 if (blacklistReasons.length) {
24219 variant.excludeUntil = Infinity;
24220
24221 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
24222 }
24223 });
24224 };
24225
24226 _proto.updateAdCues_ = function updateAdCues_(media) {
24227 var offset = 0;
24228 var seekable = this.seekable();
24229
24230 if (seekable.length) {
24231 offset = seekable.start(0);
24232 }
24233
24234 updateAdCues(media, this.cueTagsTrack_, offset);
24235 }
24236 /**
24237 * Calculates the desired forward buffer length based on current time
24238 *
24239 * @return {number} Desired forward buffer length in seconds
24240 */
24241 ;
24242
24243 _proto.goalBufferLength = function goalBufferLength() {
24244 var currentTime = this.tech_.currentTime();
24245 var initial = Config.GOAL_BUFFER_LENGTH;
24246 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
24247 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
24248 return Math.min(initial + currentTime * rate, max);
24249 }
24250 /**
24251 * Calculates the desired buffer low water line based on current time
24252 *
24253 * @return {number} Desired buffer low water line in seconds
24254 */
24255 ;
24256
24257 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
24258 var currentTime = this.tech_.currentTime();
24259 var initial = Config.BUFFER_LOW_WATER_LINE;
24260 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
24261 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
24262 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
24263 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
24264 };
24265
24266 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
24267 return Config.BUFFER_HIGH_WATER_LINE;
24268 };
24269
24270 return MasterPlaylistController;
24271}(videojs.EventTarget);
24272
24273/**
24274 * Returns a function that acts as the Enable/disable playlist function.
24275 *
24276 * @param {PlaylistLoader} loader - The master playlist loader
24277 * @param {string} playlistID - id of the playlist
24278 * @param {Function} changePlaylistFn - A function to be called after a
24279 * playlist's enabled-state has been changed. Will NOT be called if a
24280 * playlist's enabled-state is unchanged
24281 * @param {boolean=} enable - Value to set the playlist enabled-state to
24282 * or if undefined returns the current enabled-state for the playlist
24283 * @return {Function} Function for setting/getting enabled
24284 */
24285
24286var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
24287 return function (enable) {
24288 var playlist = loader.master.playlists[playlistID];
24289 var incompatible = isIncompatible(playlist);
24290 var currentlyEnabled = isEnabled(playlist);
24291
24292 if (typeof enable === 'undefined') {
24293 return currentlyEnabled;
24294 }
24295
24296 if (enable) {
24297 delete playlist.disabled;
24298 } else {
24299 playlist.disabled = true;
24300 }
24301
24302 if (enable !== currentlyEnabled && !incompatible) {
24303 // Ensure the outside world knows about our changes
24304 changePlaylistFn();
24305
24306 if (enable) {
24307 loader.trigger('renditionenabled');
24308 } else {
24309 loader.trigger('renditiondisabled');
24310 }
24311 }
24312
24313 return enable;
24314 };
24315};
24316/**
24317 * The representation object encapsulates the publicly visible information
24318 * in a media playlist along with a setter/getter-type function (enabled)
24319 * for changing the enabled-state of a particular playlist entry
24320 *
24321 * @class Representation
24322 */
24323
24324
24325var Representation = function Representation(vhsHandler, playlist, id) {
24326 var mpc = vhsHandler.masterPlaylistController_,
24327 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
24328
24329 var changeType = smoothQualityChange ? 'smooth' : 'fast';
24330 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
24331
24332 if (playlist.attributes) {
24333 var resolution = playlist.attributes.RESOLUTION;
24334 this.width = resolution && resolution.width;
24335 this.height = resolution && resolution.height;
24336 this.bandwidth = playlist.attributes.BANDWIDTH;
24337 }
24338
24339 this.codecs = codecsForPlaylist(mpc.master(), playlist);
24340 this.playlist = playlist; // The id is simply the ordinality of the media playlist
24341 // within the master playlist
24342
24343 this.id = id; // Partially-apply the enableFunction to create a playlist-
24344 // specific variant
24345
24346 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
24347};
24348/**
24349 * A mixin function that adds the `representations` api to an instance
24350 * of the VhsHandler class
24351 *
24352 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
24353 * representation API into
24354 */
24355
24356
24357var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
24358 // Add a single API-specific function to the VhsHandler instance
24359 vhsHandler.representations = function () {
24360 var master = vhsHandler.masterPlaylistController_.master();
24361 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
24362
24363 if (!playlists) {
24364 return [];
24365 }
24366
24367 return playlists.filter(function (media) {
24368 return !isIncompatible(media);
24369 }).map(function (e, i) {
24370 return new Representation(vhsHandler, e, e.id);
24371 });
24372 };
24373};
24374
24375/**
24376 * @file playback-watcher.js
24377 *
24378 * Playback starts, and now my watch begins. It shall not end until my death. I shall
24379 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
24380 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
24381 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
24382 * my life and honor to the Playback Watch, for this Player and all the Players to come.
24383 */
24384
24385var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
24386/**
24387 * @class PlaybackWatcher
24388 */
24389
24390var PlaybackWatcher = /*#__PURE__*/function () {
24391 /**
24392 * Represents an PlaybackWatcher object.
24393 *
24394 * @class
24395 * @param {Object} options an object that includes the tech and settings
24396 */
24397 function PlaybackWatcher(options) {
24398 var _this = this;
24399
24400 this.masterPlaylistController_ = options.masterPlaylistController;
24401 this.tech_ = options.tech;
24402 this.seekable = options.seekable;
24403 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
24404 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
24405 this.media = options.media;
24406 this.consecutiveUpdates = 0;
24407 this.lastRecordedTime = null;
24408 this.timer_ = null;
24409 this.checkCurrentTimeTimeout_ = null;
24410 this.logger_ = logger('PlaybackWatcher');
24411 this.logger_('initialize');
24412
24413 var playHandler = function playHandler() {
24414 return _this.monitorCurrentTime_();
24415 };
24416
24417 var canPlayHandler = function canPlayHandler() {
24418 return _this.monitorCurrentTime_();
24419 };
24420
24421 var waitingHandler = function waitingHandler() {
24422 return _this.techWaiting_();
24423 };
24424
24425 var cancelTimerHandler = function cancelTimerHandler() {
24426 return _this.cancelTimer_();
24427 };
24428
24429 var mpc = this.masterPlaylistController_;
24430 var loaderTypes = ['main', 'subtitle', 'audio'];
24431 var loaderChecks = {};
24432 loaderTypes.forEach(function (type) {
24433 loaderChecks[type] = {
24434 reset: function reset() {
24435 return _this.resetSegmentDownloads_(type);
24436 },
24437 updateend: function updateend() {
24438 return _this.checkSegmentDownloads_(type);
24439 }
24440 };
24441 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
24442 // isn't changing we want to reset. We cannot assume that the new rendition
24443 // will also be stalled, until after new appends.
24444
24445 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
24446 // This prevents one segment playlists (single vtt or single segment content)
24447 // from being detected as stalling. As the buffer will not change in those cases, since
24448 // the buffer is the entire video duration.
24449
24450 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
24451 });
24452 /**
24453 * We check if a seek was into a gap through the following steps:
24454 * 1. We get a seeking event and we do not get a seeked event. This means that
24455 * a seek was attempted but not completed.
24456 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
24457 * removed everything from our buffer and appended a segment, and should be ready
24458 * to check for gaps.
24459 */
24460
24461 var setSeekingHandlers = function setSeekingHandlers(fn) {
24462 ['main', 'audio'].forEach(function (type) {
24463 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
24464 });
24465 };
24466
24467 this.seekingAppendCheck_ = function () {
24468 if (_this.fixesBadSeeks_()) {
24469 _this.consecutiveUpdates = 0;
24470 _this.lastRecordedTime = _this.tech_.currentTime();
24471 setSeekingHandlers('off');
24472 }
24473 };
24474
24475 this.clearSeekingAppendCheck_ = function () {
24476 return setSeekingHandlers('off');
24477 };
24478
24479 this.watchForBadSeeking_ = function () {
24480 _this.clearSeekingAppendCheck_();
24481
24482 setSeekingHandlers('on');
24483 };
24484
24485 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
24486 this.tech_.on('seeking', this.watchForBadSeeking_);
24487 this.tech_.on('waiting', waitingHandler);
24488 this.tech_.on(timerCancelEvents, cancelTimerHandler);
24489 this.tech_.on('canplay', canPlayHandler);
24490 /*
24491 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
24492 is surfaced in one of two ways:
24493 1) The `waiting` event is fired before the player has buffered content, making it impossible
24494 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
24495 we can check if playback is stalled due to a gap, and skip the gap if necessary.
24496 2) A source with a gap at the beginning of the stream is loaded programatically while the player
24497 is in a playing state. To catch this case, it's important that our one-time play listener is setup
24498 even if the player is in a playing state
24499 */
24500
24501 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
24502
24503 this.dispose = function () {
24504 _this.clearSeekingAppendCheck_();
24505
24506 _this.logger_('dispose');
24507
24508 _this.tech_.off('waiting', waitingHandler);
24509
24510 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
24511
24512 _this.tech_.off('canplay', canPlayHandler);
24513
24514 _this.tech_.off('play', playHandler);
24515
24516 _this.tech_.off('seeking', _this.watchForBadSeeking_);
24517
24518 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
24519
24520 loaderTypes.forEach(function (type) {
24521 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
24522 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
24523
24524 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
24525 });
24526
24527 if (_this.checkCurrentTimeTimeout_) {
24528 window$1.clearTimeout(_this.checkCurrentTimeTimeout_);
24529 }
24530
24531 _this.cancelTimer_();
24532 };
24533 }
24534 /**
24535 * Periodically check current time to see if playback stopped
24536 *
24537 * @private
24538 */
24539
24540
24541 var _proto = PlaybackWatcher.prototype;
24542
24543 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
24544 this.checkCurrentTime_();
24545
24546 if (this.checkCurrentTimeTimeout_) {
24547 window$1.clearTimeout(this.checkCurrentTimeTimeout_);
24548 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
24549
24550
24551 this.checkCurrentTimeTimeout_ = window$1.setTimeout(this.monitorCurrentTime_.bind(this), 250);
24552 }
24553 /**
24554 * Reset stalled download stats for a specific type of loader
24555 *
24556 * @param {string} type
24557 * The segment loader type to check.
24558 *
24559 * @listens SegmentLoader#playlistupdate
24560 * @listens Tech#seeking
24561 * @listens Tech#seeked
24562 */
24563 ;
24564
24565 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
24566 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
24567
24568 if (this[type + "StalledDownloads_"] > 0) {
24569 this.logger_("resetting possible stalled download count for " + type + " loader");
24570 }
24571
24572 this[type + "StalledDownloads_"] = 0;
24573 this[type + "Buffered_"] = loader.buffered_();
24574 }
24575 /**
24576 * Checks on every segment `appendsdone` to see
24577 * if segment appends are making progress. If they are not
24578 * and we are still downloading bytes. We blacklist the playlist.
24579 *
24580 * @param {string} type
24581 * The segment loader type to check.
24582 *
24583 * @listens SegmentLoader#appendsdone
24584 */
24585 ;
24586
24587 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
24588 var mpc = this.masterPlaylistController_;
24589 var loader = mpc[type + "SegmentLoader_"];
24590 var buffered = loader.buffered_();
24591 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
24592 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
24593 // the buffered value for this loader changed
24594 // appends are working
24595
24596 if (isBufferedDifferent) {
24597 this.resetSegmentDownloads_(type);
24598 return;
24599 }
24600
24601 this[type + "StalledDownloads_"]++;
24602 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
24603 playlistId: loader.playlist_ && loader.playlist_.id,
24604 buffered: timeRangesToArray(buffered)
24605 }); // after 10 possibly stalled appends with no reset, exclude
24606
24607 if (this[type + "StalledDownloads_"] < 10) {
24608 return;
24609 }
24610
24611 this.logger_(type + " loader stalled download exclusion");
24612 this.resetSegmentDownloads_(type);
24613 this.tech_.trigger({
24614 type: 'usage',
24615 name: "vhs-" + type + "-download-exclusion"
24616 });
24617
24618 if (type === 'subtitle') {
24619 return;
24620 } // TODO: should we exclude audio tracks rather than main tracks
24621 // when type is audio?
24622
24623
24624 mpc.blacklistCurrentPlaylist({
24625 message: "Excessive " + type + " segment downloading detected."
24626 }, Infinity);
24627 }
24628 /**
24629 * The purpose of this function is to emulate the "waiting" event on
24630 * browsers that do not emit it when they are waiting for more
24631 * data to continue playback
24632 *
24633 * @private
24634 */
24635 ;
24636
24637 _proto.checkCurrentTime_ = function checkCurrentTime_() {
24638 if (this.tech_.paused() || this.tech_.seeking()) {
24639 return;
24640 }
24641
24642 var currentTime = this.tech_.currentTime();
24643 var buffered = this.tech_.buffered();
24644
24645 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
24646 // If current time is at the end of the final buffered region, then any playback
24647 // stall is most likely caused by buffering in a low bandwidth environment. The tech
24648 // should fire a `waiting` event in this scenario, but due to browser and tech
24649 // inconsistencies. Calling `techWaiting_` here allows us to simulate
24650 // responding to a native `waiting` event when the tech fails to emit one.
24651 return this.techWaiting_();
24652 }
24653
24654 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
24655 this.consecutiveUpdates++;
24656 this.waiting_();
24657 } else if (currentTime === this.lastRecordedTime) {
24658 this.consecutiveUpdates++;
24659 } else {
24660 this.consecutiveUpdates = 0;
24661 this.lastRecordedTime = currentTime;
24662 }
24663 }
24664 /**
24665 * Cancels any pending timers and resets the 'timeupdate' mechanism
24666 * designed to detect that we are stalled
24667 *
24668 * @private
24669 */
24670 ;
24671
24672 _proto.cancelTimer_ = function cancelTimer_() {
24673 this.consecutiveUpdates = 0;
24674
24675 if (this.timer_) {
24676 this.logger_('cancelTimer_');
24677 clearTimeout(this.timer_);
24678 }
24679
24680 this.timer_ = null;
24681 }
24682 /**
24683 * Fixes situations where there's a bad seek
24684 *
24685 * @return {boolean} whether an action was taken to fix the seek
24686 * @private
24687 */
24688 ;
24689
24690 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
24691 var seeking = this.tech_.seeking();
24692
24693 if (!seeking) {
24694 return false;
24695 } // TODO: It's possible that these seekable checks should be moved out of this function
24696 // and into a function that runs on seekablechange. It's also possible that we only need
24697 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
24698 // seekable range.
24699
24700
24701 var seekable = this.seekable();
24702 var currentTime = this.tech_.currentTime();
24703 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
24704 var seekTo;
24705
24706 if (isAfterSeekableRange) {
24707 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
24708
24709 seekTo = seekableEnd;
24710 }
24711
24712 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24713 var seekableStart = seekable.start(0); // sync to the beginning of the live window
24714 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
24715
24716 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
24717 // happen in live with a 3 segment playlist), then don't use a time delta
24718 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
24719 }
24720
24721 if (typeof seekTo !== 'undefined') {
24722 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
24723 this.tech_.setCurrentTime(seekTo);
24724 return true;
24725 }
24726
24727 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
24728 var buffered = this.tech_.buffered();
24729 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
24730 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null;
24731 var media = this.media(); // verify that at least two segment durations or one part duration have been
24732 // appended before checking for a gap.
24733
24734 var minAppendedDuration = media.partTargetDuration ? media.partTargetDuration : (media.targetDuration - TIME_FUDGE_FACTOR) * 2; // verify that at least two segment durations have been
24735 // appended before checking for a gap.
24736
24737 var bufferedToCheck = [audioBuffered, videoBuffered];
24738
24739 for (var i = 0; i < bufferedToCheck.length; i++) {
24740 // skip null buffered
24741 if (!bufferedToCheck[i]) {
24742 continue;
24743 }
24744
24745 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations or one part
24746 // duration behind we haven't appended enough to call this a bad seek.
24747
24748 if (timeAhead < minAppendedDuration) {
24749 return false;
24750 }
24751 }
24752
24753 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
24754 // to seek over the gap
24755
24756 if (nextRange.length === 0) {
24757 return false;
24758 }
24759
24760 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
24761 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
24762 this.tech_.setCurrentTime(seekTo);
24763 return true;
24764 }
24765 /**
24766 * Handler for situations when we determine the player is waiting.
24767 *
24768 * @private
24769 */
24770 ;
24771
24772 _proto.waiting_ = function waiting_() {
24773 if (this.techWaiting_()) {
24774 return;
24775 } // All tech waiting checks failed. Use last resort correction
24776
24777
24778 var currentTime = this.tech_.currentTime();
24779 var buffered = this.tech_.buffered();
24780 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
24781 // region with no indication that anything is amiss (seen in Firefox). Seeking to
24782 // currentTime is usually enough to kickstart the player. This checks that the player
24783 // is currently within a buffered region before attempting a corrective seek.
24784 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
24785 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
24786 // make sure there is ~3 seconds of forward buffer before taking any corrective action
24787 // to avoid triggering an `unknownwaiting` event when the network is slow.
24788
24789 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
24790 this.cancelTimer_();
24791 this.tech_.setCurrentTime(currentTime);
24792 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
24793
24794 this.tech_.trigger({
24795 type: 'usage',
24796 name: 'vhs-unknown-waiting'
24797 });
24798 this.tech_.trigger({
24799 type: 'usage',
24800 name: 'hls-unknown-waiting'
24801 });
24802 return;
24803 }
24804 }
24805 /**
24806 * Handler for situations when the tech fires a `waiting` event
24807 *
24808 * @return {boolean}
24809 * True if an action (or none) was needed to correct the waiting. False if no
24810 * checks passed
24811 * @private
24812 */
24813 ;
24814
24815 _proto.techWaiting_ = function techWaiting_() {
24816 var seekable = this.seekable();
24817 var currentTime = this.tech_.currentTime();
24818
24819 if (this.tech_.seeking() || this.timer_ !== null) {
24820 // Tech is seeking or already waiting on another action, no action needed
24821 return true;
24822 }
24823
24824 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24825 var livePoint = seekable.end(seekable.length - 1);
24826 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
24827 this.cancelTimer_();
24828 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
24829
24830 this.tech_.trigger({
24831 type: 'usage',
24832 name: 'vhs-live-resync'
24833 });
24834 this.tech_.trigger({
24835 type: 'usage',
24836 name: 'hls-live-resync'
24837 });
24838 return true;
24839 }
24840
24841 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
24842 var buffered = this.tech_.buffered();
24843 var videoUnderflow = this.videoUnderflow_({
24844 audioBuffered: sourceUpdater.audioBuffered(),
24845 videoBuffered: sourceUpdater.videoBuffered(),
24846 currentTime: currentTime
24847 });
24848
24849 if (videoUnderflow) {
24850 // Even though the video underflowed and was stuck in a gap, the audio overplayed
24851 // the gap, leading currentTime into a buffered range. Seeking to currentTime
24852 // allows the video to catch up to the audio position without losing any audio
24853 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
24854 this.cancelTimer_();
24855 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
24856
24857 this.tech_.trigger({
24858 type: 'usage',
24859 name: 'vhs-video-underflow'
24860 });
24861 this.tech_.trigger({
24862 type: 'usage',
24863 name: 'hls-video-underflow'
24864 });
24865 return true;
24866 }
24867
24868 var nextRange = findNextRange(buffered, currentTime); // check for gap
24869
24870 if (nextRange.length > 0) {
24871 var difference = nextRange.start(0) - currentTime;
24872 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
24873 this.cancelTimer_();
24874 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
24875 return true;
24876 } // All checks failed. Returning false to indicate failure to correct waiting
24877
24878
24879 return false;
24880 };
24881
24882 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
24883 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
24884 allowSeeksWithinUnsafeLiveWindow = false;
24885 }
24886
24887 if (!seekable.length) {
24888 // we can't make a solid case if there's no seekable, default to false
24889 return false;
24890 }
24891
24892 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
24893 var isLive = !playlist.endList;
24894
24895 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
24896 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
24897 }
24898
24899 if (currentTime > allowedEnd) {
24900 return true;
24901 }
24902
24903 return false;
24904 };
24905
24906 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
24907 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
24908 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
24909 return true;
24910 }
24911
24912 return false;
24913 };
24914
24915 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
24916 var videoBuffered = _ref.videoBuffered,
24917 audioBuffered = _ref.audioBuffered,
24918 currentTime = _ref.currentTime;
24919
24920 // audio only content will not have video underflow :)
24921 if (!videoBuffered) {
24922 return;
24923 }
24924
24925 var gap; // find a gap in demuxed content.
24926
24927 if (videoBuffered.length && audioBuffered.length) {
24928 // in Chrome audio will continue to play for ~3s when we run out of video
24929 // so we have to check that the video buffer did have some buffer in the
24930 // past.
24931 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
24932 var videoRange = findRange(videoBuffered, currentTime);
24933 var audioRange = findRange(audioBuffered, currentTime);
24934
24935 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
24936 gap = {
24937 start: lastVideoRange.end(0),
24938 end: audioRange.end(0)
24939 };
24940 } // find a gap in muxed content.
24941
24942 } else {
24943 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
24944 // stuck in a gap due to video underflow.
24945
24946 if (!nextRange.length) {
24947 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
24948 }
24949 }
24950
24951 if (gap) {
24952 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
24953 return true;
24954 }
24955
24956 return false;
24957 }
24958 /**
24959 * Timer callback. If playback still has not proceeded, then we seek
24960 * to the start of the next buffered region.
24961 *
24962 * @private
24963 */
24964 ;
24965
24966 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
24967 var buffered = this.tech_.buffered();
24968 var currentTime = this.tech_.currentTime();
24969 var nextRange = findNextRange(buffered, currentTime);
24970 this.cancelTimer_();
24971
24972 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
24973 return;
24974 }
24975
24976 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
24977
24978 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
24979 this.tech_.trigger({
24980 type: 'usage',
24981 name: 'vhs-gap-skip'
24982 });
24983 this.tech_.trigger({
24984 type: 'usage',
24985 name: 'hls-gap-skip'
24986 });
24987 };
24988
24989 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
24990 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
24991 // playing for ~3 seconds after the video gap starts. This is done to account for
24992 // video buffer underflow/underrun (note that this is not done when there is audio
24993 // buffer underflow/underrun -- in that case the video will stop as soon as it
24994 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
24995 // video stalls). The player's time will reflect the playthrough of audio, so the
24996 // time will appear as if we are in a buffered region, even if we are stuck in a
24997 // "gap."
24998 //
24999 // Example:
25000 // video buffer: 0 => 10.1, 10.2 => 20
25001 // audio buffer: 0 => 20
25002 // overall buffer: 0 => 10.1, 10.2 => 20
25003 // current time: 13
25004 //
25005 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
25006 // however, the audio continued playing until it reached ~3 seconds past the gap
25007 // (13 seconds), at which point it stops as well. Since current time is past the
25008 // gap, findNextRange will return no ranges.
25009 //
25010 // To check for this issue, we see if there is a gap that starts somewhere within
25011 // a 3 second range (3 seconds +/- 1 second) back from our current time.
25012 var gaps = findGaps(buffered);
25013
25014 for (var i = 0; i < gaps.length; i++) {
25015 var start = gaps.start(i);
25016 var end = gaps.end(i); // gap is starts no more than 4 seconds back
25017
25018 if (currentTime - start < 4 && currentTime - start > 2) {
25019 return {
25020 start: start,
25021 end: end
25022 };
25023 }
25024 }
25025
25026 return null;
25027 };
25028
25029 return PlaybackWatcher;
25030}();
25031
25032var defaultOptions = {
25033 errorInterval: 30,
25034 getSource: function getSource(next) {
25035 var tech = this.tech({
25036 IWillNotUseThisInPlugins: true
25037 });
25038 var sourceObj = tech.currentSource_ || this.currentSource();
25039 return next(sourceObj);
25040 }
25041};
25042/**
25043 * Main entry point for the plugin
25044 *
25045 * @param {Player} player a reference to a videojs Player instance
25046 * @param {Object} [options] an object with plugin options
25047 * @private
25048 */
25049
25050var initPlugin = function initPlugin(player, options) {
25051 var lastCalled = 0;
25052 var seekTo = 0;
25053 var localOptions = videojs.mergeOptions(defaultOptions, options);
25054 player.ready(function () {
25055 player.trigger({
25056 type: 'usage',
25057 name: 'vhs-error-reload-initialized'
25058 });
25059 player.trigger({
25060 type: 'usage',
25061 name: 'hls-error-reload-initialized'
25062 });
25063 });
25064 /**
25065 * Player modifications to perform that must wait until `loadedmetadata`
25066 * has been triggered
25067 *
25068 * @private
25069 */
25070
25071 var loadedMetadataHandler = function loadedMetadataHandler() {
25072 if (seekTo) {
25073 player.currentTime(seekTo);
25074 }
25075 };
25076 /**
25077 * Set the source on the player element, play, and seek if necessary
25078 *
25079 * @param {Object} sourceObj An object specifying the source url and mime-type to play
25080 * @private
25081 */
25082
25083
25084 var setSource = function setSource(sourceObj) {
25085 if (sourceObj === null || sourceObj === undefined) {
25086 return;
25087 }
25088
25089 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
25090 player.one('loadedmetadata', loadedMetadataHandler);
25091 player.src(sourceObj);
25092 player.trigger({
25093 type: 'usage',
25094 name: 'vhs-error-reload'
25095 });
25096 player.trigger({
25097 type: 'usage',
25098 name: 'hls-error-reload'
25099 });
25100 player.play();
25101 };
25102 /**
25103 * Attempt to get a source from either the built-in getSource function
25104 * or a custom function provided via the options
25105 *
25106 * @private
25107 */
25108
25109
25110 var errorHandler = function errorHandler() {
25111 // Do not attempt to reload the source if a source-reload occurred before
25112 // 'errorInterval' time has elapsed since the last source-reload
25113 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
25114 player.trigger({
25115 type: 'usage',
25116 name: 'vhs-error-reload-canceled'
25117 });
25118 player.trigger({
25119 type: 'usage',
25120 name: 'hls-error-reload-canceled'
25121 });
25122 return;
25123 }
25124
25125 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
25126 videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
25127 return;
25128 }
25129
25130 lastCalled = Date.now();
25131 return localOptions.getSource.call(player, setSource);
25132 };
25133 /**
25134 * Unbind any event handlers that were bound by the plugin
25135 *
25136 * @private
25137 */
25138
25139
25140 var cleanupEvents = function cleanupEvents() {
25141 player.off('loadedmetadata', loadedMetadataHandler);
25142 player.off('error', errorHandler);
25143 player.off('dispose', cleanupEvents);
25144 };
25145 /**
25146 * Cleanup before re-initializing the plugin
25147 *
25148 * @param {Object} [newOptions] an object with plugin options
25149 * @private
25150 */
25151
25152
25153 var reinitPlugin = function reinitPlugin(newOptions) {
25154 cleanupEvents();
25155 initPlugin(player, newOptions);
25156 };
25157
25158 player.on('error', errorHandler);
25159 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
25160 // initializing the plugin
25161
25162 player.reloadSourceOnError = reinitPlugin;
25163};
25164/**
25165 * Reload the source when an error is detected as long as there
25166 * wasn't an error previously within the last 30 seconds
25167 *
25168 * @param {Object} [options] an object with plugin options
25169 */
25170
25171
25172var reloadSourceOnError = function reloadSourceOnError(options) {
25173 initPlugin(this, options);
25174};
25175
25176var version$4 = "2.14.2";
25177
25178var version$3 = "6.0.1";
25179
25180var version$2 = "0.21.1";
25181
25182var version$1 = "4.7.1";
25183
25184var version = "3.1.3";
25185
25186var Vhs = {
25187 PlaylistLoader: PlaylistLoader,
25188 Playlist: Playlist,
25189 utils: utils,
25190 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
25191 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
25192 lastBandwidthSelector: lastBandwidthSelector,
25193 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
25194 comparePlaylistBandwidth: comparePlaylistBandwidth,
25195 comparePlaylistResolution: comparePlaylistResolution,
25196 xhr: xhrFactory()
25197}; // Define getter/setters for config properties
25198
25199Object.keys(Config).forEach(function (prop) {
25200 Object.defineProperty(Vhs, prop, {
25201 get: function get() {
25202 videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
25203 return Config[prop];
25204 },
25205 set: function set(value) {
25206 videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
25207
25208 if (typeof value !== 'number' || value < 0) {
25209 videojs.log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
25210 return;
25211 }
25212
25213 Config[prop] = value;
25214 }
25215 });
25216});
25217var LOCAL_STORAGE_KEY = 'videojs-vhs';
25218/**
25219 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
25220 *
25221 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
25222 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
25223 * @function handleVhsMediaChange
25224 */
25225
25226var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
25227 var newPlaylist = playlistLoader.media();
25228 var selectedIndex = -1;
25229
25230 for (var i = 0; i < qualityLevels.length; i++) {
25231 if (qualityLevels[i].id === newPlaylist.id) {
25232 selectedIndex = i;
25233 break;
25234 }
25235 }
25236
25237 qualityLevels.selectedIndex_ = selectedIndex;
25238 qualityLevels.trigger({
25239 selectedIndex: selectedIndex,
25240 type: 'change'
25241 });
25242};
25243/**
25244 * Adds quality levels to list once playlist metadata is available
25245 *
25246 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
25247 * @param {Object} vhs Vhs object to listen to for media events.
25248 * @function handleVhsLoadedMetadata
25249 */
25250
25251
25252var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
25253 vhs.representations().forEach(function (rep) {
25254 qualityLevels.addQualityLevel(rep);
25255 });
25256 handleVhsMediaChange(qualityLevels, vhs.playlists);
25257}; // HLS is a source handler, not a tech. Make sure attempts to use it
25258// as one do not cause exceptions.
25259
25260
25261Vhs.canPlaySource = function () {
25262 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25263};
25264
25265var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
25266 if (!keySystemOptions) {
25267 return keySystemOptions;
25268 }
25269
25270 var codecs = {};
25271
25272 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
25273 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
25274 }
25275
25276 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
25277 codecs.audio = audioPlaylist.attributes.CODECS;
25278 }
25279
25280 var videoContentType = getMimeForCodec(codecs.video);
25281 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
25282
25283 var keySystemContentTypes = {};
25284
25285 for (var keySystem in keySystemOptions) {
25286 keySystemContentTypes[keySystem] = {};
25287
25288 if (audioContentType) {
25289 keySystemContentTypes[keySystem].audioContentType = audioContentType;
25290 }
25291
25292 if (videoContentType) {
25293 keySystemContentTypes[keySystem].videoContentType = videoContentType;
25294 } // Default to using the video playlist's PSSH even though they may be different, as
25295 // videojs-contrib-eme will only accept one in the options.
25296 //
25297 // This shouldn't be an issue for most cases as early intialization will handle all
25298 // unique PSSH values, and if they aren't, then encrypted events should have the
25299 // specific information needed for the unique license.
25300
25301
25302 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
25303 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
25304 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
25305 // so we need to prevent overwriting the URL entirely
25306
25307
25308 if (typeof keySystemOptions[keySystem] === 'string') {
25309 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
25310 }
25311 }
25312
25313 return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
25314};
25315/**
25316 * @typedef {Object} KeySystems
25317 *
25318 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
25319 * Note: not all options are listed here.
25320 *
25321 * @property {Uint8Array} [pssh]
25322 * Protection System Specific Header
25323 */
25324
25325/**
25326 * Goes through all the playlists and collects an array of KeySystems options objects
25327 * containing each playlist's keySystems and their pssh values, if available.
25328 *
25329 * @param {Object[]} playlists
25330 * The playlists to look through
25331 * @param {string[]} keySystems
25332 * The keySystems to collect pssh values for
25333 *
25334 * @return {KeySystems[]}
25335 * An array of KeySystems objects containing available key systems and their
25336 * pssh values
25337 */
25338
25339
25340var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
25341 return playlists.reduce(function (keySystemsArr, playlist) {
25342 if (!playlist.contentProtection) {
25343 return keySystemsArr;
25344 }
25345
25346 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
25347 var keySystemOptions = playlist.contentProtection[keySystem];
25348
25349 if (keySystemOptions && keySystemOptions.pssh) {
25350 keySystemsObj[keySystem] = {
25351 pssh: keySystemOptions.pssh
25352 };
25353 }
25354
25355 return keySystemsObj;
25356 }, {});
25357
25358 if (Object.keys(keySystemsOptions).length) {
25359 keySystemsArr.push(keySystemsOptions);
25360 }
25361
25362 return keySystemsArr;
25363 }, []);
25364};
25365/**
25366 * Returns a promise that waits for the
25367 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
25368 *
25369 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
25370 * browsers.
25371 *
25372 * As per the above ticket, this is particularly important for Chrome, where, if
25373 * unencrypted content is appended before encrypted content and the key session has not
25374 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
25375 * during playback.
25376 *
25377 * @param {Object} player
25378 * The player instance
25379 * @param {Object[]} sourceKeySystems
25380 * The key systems options from the player source
25381 * @param {Object} [audioMedia]
25382 * The active audio media playlist (optional)
25383 * @param {Object[]} mainPlaylists
25384 * The playlists found on the master playlist object
25385 *
25386 * @return {Object}
25387 * Promise that resolves when the key session has been created
25388 */
25389
25390
25391var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
25392 var player = _ref.player,
25393 sourceKeySystems = _ref.sourceKeySystems,
25394 audioMedia = _ref.audioMedia,
25395 mainPlaylists = _ref.mainPlaylists;
25396
25397 if (!player.eme.initializeMediaKeys) {
25398 return Promise.resolve();
25399 } // TODO should all audio PSSH values be initialized for DRM?
25400 //
25401 // All unique video rendition pssh values are initialized for DRM, but here only
25402 // the initial audio playlist license is initialized. In theory, an encrypted
25403 // event should be fired if the user switches to an alternative audio playlist
25404 // where a license is required, but this case hasn't yet been tested. In addition, there
25405 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
25406 // languages).
25407
25408
25409 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
25410 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
25411 var initializationFinishedPromises = [];
25412 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
25413 // only place where it should not be deduped is for ms-prefixed APIs, but the early
25414 // return for IE11 above, and the existence of modern EME APIs in addition to
25415 // ms-prefixed APIs on Edge should prevent this from being a concern.
25416 // initializeMediaKeys also won't use the webkit-prefixed APIs.
25417
25418 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
25419 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
25420 player.tech_.one('keysessioncreated', resolve);
25421 }));
25422 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
25423 player.eme.initializeMediaKeys({
25424 keySystems: keySystemsOptions
25425 }, function (err) {
25426 if (err) {
25427 reject(err);
25428 return;
25429 }
25430
25431 resolve();
25432 });
25433 }));
25434 }); // The reasons Promise.race is chosen over Promise.any:
25435 //
25436 // * Promise.any is only available in Safari 14+.
25437 // * None of these promises are expected to reject. If they do reject, it might be
25438 // better here for the race to surface the rejection, rather than mask it by using
25439 // Promise.any.
25440
25441 return Promise.race([// If a session was previously created, these will all finish resolving without
25442 // creating a new session, otherwise it will take until the end of all license
25443 // requests, which is why the key session check is used (to make setup much faster).
25444 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
25445 Promise.race(keySessionCreatedPromises)]);
25446};
25447/**
25448 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
25449 * there are keySystems on the source, sets up source options to prepare the source for
25450 * eme.
25451 *
25452 * @param {Object} player
25453 * The player instance
25454 * @param {Object[]} sourceKeySystems
25455 * The key systems options from the player source
25456 * @param {Object} media
25457 * The active media playlist
25458 * @param {Object} [audioMedia]
25459 * The active audio media playlist (optional)
25460 *
25461 * @return {boolean}
25462 * Whether or not options were configured and EME is available
25463 */
25464
25465var setupEmeOptions = function setupEmeOptions(_ref2) {
25466 var player = _ref2.player,
25467 sourceKeySystems = _ref2.sourceKeySystems,
25468 media = _ref2.media,
25469 audioMedia = _ref2.audioMedia;
25470 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
25471
25472 if (!sourceOptions) {
25473 return false;
25474 }
25475
25476 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
25477 // do nothing.
25478
25479 if (sourceOptions && !player.eme) {
25480 videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
25481 return false;
25482 }
25483
25484 return true;
25485};
25486
25487var getVhsLocalStorage = function getVhsLocalStorage() {
25488 if (!window$1.localStorage) {
25489 return null;
25490 }
25491
25492 var storedObject = window$1.localStorage.getItem(LOCAL_STORAGE_KEY);
25493
25494 if (!storedObject) {
25495 return null;
25496 }
25497
25498 try {
25499 return JSON.parse(storedObject);
25500 } catch (e) {
25501 // someone may have tampered with the value
25502 return null;
25503 }
25504};
25505
25506var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
25507 if (!window$1.localStorage) {
25508 return false;
25509 }
25510
25511 var objectToStore = getVhsLocalStorage();
25512 objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
25513
25514 try {
25515 window$1.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
25516 } catch (e) {
25517 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
25518 // storage is set to 0).
25519 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
25520 // No need to perform any operation.
25521 return false;
25522 }
25523
25524 return objectToStore;
25525};
25526/**
25527 * Parses VHS-supported media types from data URIs. See
25528 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
25529 * for information on data URIs.
25530 *
25531 * @param {string} dataUri
25532 * The data URI
25533 *
25534 * @return {string|Object}
25535 * The parsed object/string, or the original string if no supported media type
25536 * was found
25537 */
25538
25539
25540var expandDataUri = function expandDataUri(dataUri) {
25541 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
25542 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
25543 } // no known case for this data URI, return the string as-is
25544
25545
25546 return dataUri;
25547};
25548/**
25549 * Whether the browser has built-in HLS support.
25550 */
25551
25552
25553Vhs.supportsNativeHls = function () {
25554 if (!document || !document.createElement) {
25555 return false;
25556 }
25557
25558 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
25559
25560 if (!videojs.getTech('Html5').isSupported()) {
25561 return false;
25562 } // HLS manifests can go by many mime-types
25563
25564
25565 var canPlay = [// Apple santioned
25566 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
25567 'audio/mpegurl', // Very common
25568 'audio/x-mpegurl', // Very common
25569 'application/x-mpegurl', // Included for completeness
25570 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
25571 return canPlay.some(function (canItPlay) {
25572 return /maybe|probably/i.test(video.canPlayType(canItPlay));
25573 });
25574}();
25575
25576Vhs.supportsNativeDash = function () {
25577 if (!document || !document.createElement || !videojs.getTech('Html5').isSupported()) {
25578 return false;
25579 }
25580
25581 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
25582}();
25583
25584Vhs.supportsTypeNatively = function (type) {
25585 if (type === 'hls') {
25586 return Vhs.supportsNativeHls;
25587 }
25588
25589 if (type === 'dash') {
25590 return Vhs.supportsNativeDash;
25591 }
25592
25593 return false;
25594};
25595/**
25596 * HLS is a source handler, not a tech. Make sure attempts to use it
25597 * as one do not cause exceptions.
25598 */
25599
25600
25601Vhs.isSupported = function () {
25602 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25603};
25604
25605var Component = videojs.getComponent('Component');
25606/**
25607 * The Vhs Handler object, where we orchestrate all of the parts
25608 * of HLS to interact with video.js
25609 *
25610 * @class VhsHandler
25611 * @extends videojs.Component
25612 * @param {Object} source the soruce object
25613 * @param {Tech} tech the parent tech object
25614 * @param {Object} options optional and required options
25615 */
25616
25617var VhsHandler = /*#__PURE__*/function (_Component) {
25618 _inheritsLoose(VhsHandler, _Component);
25619
25620 function VhsHandler(source, tech, options) {
25621 var _this;
25622
25623 _this = _Component.call(this, tech, videojs.mergeOptions(options.hls, options.vhs)) || this;
25624
25625 if (options.hls && Object.keys(options.hls).length) {
25626 videojs.log.warn('Using hls options is deprecated. Please rename `hls` to `vhs` in your options object.');
25627 } // if a tech level `initialBandwidth` option was passed
25628 // use that over the VHS level `bandwidth` option
25629
25630
25631 if (typeof options.initialBandwidth === 'number') {
25632 _this.options_.bandwidth = options.initialBandwidth;
25633 }
25634
25635 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
25636 // backwards-compatibility
25637
25638 if (tech.options_ && tech.options_.playerId) {
25639 var _player = videojs(tech.options_.playerId);
25640
25641 if (!_player.hasOwnProperty('hls')) {
25642 Object.defineProperty(_player, 'hls', {
25643 get: function get() {
25644 videojs.log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
25645 tech.trigger({
25646 type: 'usage',
25647 name: 'hls-player-access'
25648 });
25649 return _assertThisInitialized(_this);
25650 },
25651 configurable: true
25652 });
25653 }
25654
25655 if (!_player.hasOwnProperty('vhs')) {
25656 Object.defineProperty(_player, 'vhs', {
25657 get: function get() {
25658 videojs.log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
25659 tech.trigger({
25660 type: 'usage',
25661 name: 'vhs-player-access'
25662 });
25663 return _assertThisInitialized(_this);
25664 },
25665 configurable: true
25666 });
25667 }
25668
25669 if (!_player.hasOwnProperty('dash')) {
25670 Object.defineProperty(_player, 'dash', {
25671 get: function get() {
25672 videojs.log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
25673 return _assertThisInitialized(_this);
25674 },
25675 configurable: true
25676 });
25677 }
25678
25679 _this.player_ = _player;
25680 }
25681
25682 _this.tech_ = tech;
25683 _this.source_ = source;
25684 _this.stats = {};
25685 _this.ignoreNextSeekingEvent_ = false;
25686
25687 _this.setOptions_();
25688
25689 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
25690 tech.overrideNativeAudioTracks(true);
25691 tech.overrideNativeVideoTracks(true);
25692 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
25693 // overriding native HLS only works if audio tracks have been emulated
25694 // error early if we're misconfigured
25695 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
25696 } // listen for fullscreenchange events for this player so that we
25697 // can adjust our quality selection quickly
25698
25699
25700 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
25701 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
25702
25703 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
25704 _this.masterPlaylistController_.fastQualityChange_();
25705 } else {
25706 // When leaving fullscreen, since the in page pixel dimensions should be smaller
25707 // than full screen, see if there should be a rendition switch down to preserve
25708 // bandwidth.
25709 _this.masterPlaylistController_.checkABR_();
25710 }
25711 });
25712
25713 _this.on(_this.tech_, 'seeking', function () {
25714 if (this.ignoreNextSeekingEvent_) {
25715 this.ignoreNextSeekingEvent_ = false;
25716 return;
25717 }
25718
25719 this.setCurrentTime(this.tech_.currentTime());
25720 });
25721
25722 _this.on(_this.tech_, 'error', function () {
25723 // verify that the error was real and we are loaded
25724 // enough to have mpc loaded.
25725 if (this.tech_.error() && this.masterPlaylistController_) {
25726 this.masterPlaylistController_.pauseLoading();
25727 }
25728 });
25729
25730 _this.on(_this.tech_, 'play', _this.play);
25731
25732 return _this;
25733 }
25734
25735 var _proto = VhsHandler.prototype;
25736
25737 _proto.setOptions_ = function setOptions_() {
25738 var _this2 = this;
25739
25740 // defaults
25741 this.options_.withCredentials = this.options_.withCredentials || false;
25742 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
25743 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
25744 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
25745 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
25746 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
25747 this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
25748 this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
25749 this.options_.customTagParsers = this.options_.customTagParsers || [];
25750 this.options_.customTagMappers = this.options_.customTagMappers || [];
25751 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
25752
25753 if (typeof this.options_.blacklistDuration !== 'number') {
25754 this.options_.blacklistDuration = 5 * 60;
25755 }
25756
25757 if (typeof this.options_.bandwidth !== 'number') {
25758 if (this.options_.useBandwidthFromLocalStorage) {
25759 var storedObject = getVhsLocalStorage();
25760
25761 if (storedObject && storedObject.bandwidth) {
25762 this.options_.bandwidth = storedObject.bandwidth;
25763 this.tech_.trigger({
25764 type: 'usage',
25765 name: 'vhs-bandwidth-from-local-storage'
25766 });
25767 this.tech_.trigger({
25768 type: 'usage',
25769 name: 'hls-bandwidth-from-local-storage'
25770 });
25771 }
25772
25773 if (storedObject && storedObject.throughput) {
25774 this.options_.throughput = storedObject.throughput;
25775 this.tech_.trigger({
25776 type: 'usage',
25777 name: 'vhs-throughput-from-local-storage'
25778 });
25779 this.tech_.trigger({
25780 type: 'usage',
25781 name: 'hls-throughput-from-local-storage'
25782 });
25783 }
25784 }
25785 } // if bandwidth was not set by options or pulled from local storage, start playlist
25786 // selection at a reasonable bandwidth
25787
25788
25789 if (typeof this.options_.bandwidth !== 'number') {
25790 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
25791 } // If the bandwidth number is unchanged from the initial setting
25792 // then this takes precedence over the enableLowInitialPlaylist option
25793
25794
25795 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
25796
25797 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'useNetworkInformationApi', 'useDtsForTimestampOffset', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
25798 if (typeof _this2.source_[option] !== 'undefined') {
25799 _this2.options_[option] = _this2.source_[option];
25800 }
25801 });
25802 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
25803 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
25804 }
25805 /**
25806 * called when player.src gets called, handle a new source
25807 *
25808 * @param {Object} src the source object to handle
25809 */
25810 ;
25811
25812 _proto.src = function src(_src, type) {
25813 var _this3 = this;
25814
25815 // do nothing if the src is falsey
25816 if (!_src) {
25817 return;
25818 }
25819
25820 this.setOptions_(); // add master playlist controller options
25821
25822 this.options_.src = expandDataUri(this.source_.src);
25823 this.options_.tech = this.tech_;
25824 this.options_.externVhs = Vhs;
25825 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
25826
25827 this.options_.seekTo = function (time) {
25828 _this3.tech_.setCurrentTime(time);
25829 };
25830
25831 if (this.options_.smoothQualityChange) {
25832 videojs.log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
25833 }
25834
25835 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
25836 var playbackWatcherOptions = videojs.mergeOptions({
25837 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
25838 }, this.options_, {
25839 seekable: function seekable() {
25840 return _this3.seekable();
25841 },
25842 media: function media() {
25843 return _this3.masterPlaylistController_.media();
25844 },
25845 masterPlaylistController: this.masterPlaylistController_
25846 });
25847 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
25848 this.masterPlaylistController_.on('error', function () {
25849 var player = videojs.players[_this3.tech_.options_.playerId];
25850 var error = _this3.masterPlaylistController_.error;
25851
25852 if (typeof error === 'object' && !error.code) {
25853 error.code = 3;
25854 } else if (typeof error === 'string') {
25855 error = {
25856 message: error,
25857 code: 3
25858 };
25859 }
25860
25861 player.error(error);
25862 });
25863 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
25864 // compatibility with < v2
25865
25866 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
25867 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
25868
25869 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
25870 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
25871 // controller. Using a custom property for backwards compatibility
25872 // with < v2
25873
25874 Object.defineProperties(this, {
25875 selectPlaylist: {
25876 get: function get() {
25877 return this.masterPlaylistController_.selectPlaylist;
25878 },
25879 set: function set(selectPlaylist) {
25880 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
25881 }
25882 },
25883 throughput: {
25884 get: function get() {
25885 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
25886 },
25887 set: function set(throughput) {
25888 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
25889 // for the cumulative average
25890
25891 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
25892 }
25893 },
25894 bandwidth: {
25895 get: function get() {
25896 var playerBandwidthEst = this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
25897 var networkInformation = window$1.navigator.connection || window$1.navigator.mozConnection || window$1.navigator.webkitConnection;
25898 var tenMbpsAsBitsPerSecond = 10e6;
25899
25900 if (this.options_.useNetworkInformationApi && networkInformation) {
25901 // downlink returns Mbps
25902 // https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink
25903 var networkInfoBandwidthEstBitsPerSec = networkInformation.downlink * 1000 * 1000; // downlink maxes out at 10 Mbps. In the event that both networkInformationApi and the player
25904 // estimate a bandwidth greater than 10 Mbps, use the larger of the two estimates to ensure that
25905 // high quality streams are not filtered out.
25906
25907 if (networkInfoBandwidthEstBitsPerSec >= tenMbpsAsBitsPerSecond && playerBandwidthEst >= tenMbpsAsBitsPerSecond) {
25908 playerBandwidthEst = Math.max(playerBandwidthEst, networkInfoBandwidthEstBitsPerSec);
25909 } else {
25910 playerBandwidthEst = networkInfoBandwidthEstBitsPerSec;
25911 }
25912 }
25913
25914 return playerBandwidthEst;
25915 },
25916 set: function set(bandwidth) {
25917 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
25918 // `count` is set to zero that current value of `rate` isn't included
25919 // in the cumulative average
25920
25921 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
25922 rate: 0,
25923 count: 0
25924 };
25925 }
25926 },
25927
25928 /**
25929 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
25930 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
25931 * the entire process after that - decryption, transmuxing, and appending - provided
25932 * by `throughput`.
25933 *
25934 * Since the two process are serial, the overall system bandwidth is given by:
25935 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
25936 */
25937 systemBandwidth: {
25938 get: function get() {
25939 var invBandwidth = 1 / (this.bandwidth || 1);
25940 var invThroughput;
25941
25942 if (this.throughput > 0) {
25943 invThroughput = 1 / this.throughput;
25944 } else {
25945 invThroughput = 0;
25946 }
25947
25948 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
25949 return systemBitrate;
25950 },
25951 set: function set() {
25952 videojs.log.error('The "systemBandwidth" property is read-only');
25953 }
25954 }
25955 });
25956
25957 if (this.options_.bandwidth) {
25958 this.bandwidth = this.options_.bandwidth;
25959 }
25960
25961 if (this.options_.throughput) {
25962 this.throughput = this.options_.throughput;
25963 }
25964
25965 Object.defineProperties(this.stats, {
25966 bandwidth: {
25967 get: function get() {
25968 return _this3.bandwidth || 0;
25969 },
25970 enumerable: true
25971 },
25972 mediaRequests: {
25973 get: function get() {
25974 return _this3.masterPlaylistController_.mediaRequests_() || 0;
25975 },
25976 enumerable: true
25977 },
25978 mediaRequestsAborted: {
25979 get: function get() {
25980 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
25981 },
25982 enumerable: true
25983 },
25984 mediaRequestsTimedout: {
25985 get: function get() {
25986 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
25987 },
25988 enumerable: true
25989 },
25990 mediaRequestsErrored: {
25991 get: function get() {
25992 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
25993 },
25994 enumerable: true
25995 },
25996 mediaTransferDuration: {
25997 get: function get() {
25998 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
25999 },
26000 enumerable: true
26001 },
26002 mediaBytesTransferred: {
26003 get: function get() {
26004 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
26005 },
26006 enumerable: true
26007 },
26008 mediaSecondsLoaded: {
26009 get: function get() {
26010 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
26011 },
26012 enumerable: true
26013 },
26014 mediaAppends: {
26015 get: function get() {
26016 return _this3.masterPlaylistController_.mediaAppends_() || 0;
26017 },
26018 enumerable: true
26019 },
26020 mainAppendsToLoadedData: {
26021 get: function get() {
26022 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
26023 },
26024 enumerable: true
26025 },
26026 audioAppendsToLoadedData: {
26027 get: function get() {
26028 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
26029 },
26030 enumerable: true
26031 },
26032 appendsToLoadedData: {
26033 get: function get() {
26034 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
26035 },
26036 enumerable: true
26037 },
26038 timeToLoadedData: {
26039 get: function get() {
26040 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
26041 },
26042 enumerable: true
26043 },
26044 buffered: {
26045 get: function get() {
26046 return timeRangesToArray(_this3.tech_.buffered());
26047 },
26048 enumerable: true
26049 },
26050 currentTime: {
26051 get: function get() {
26052 return _this3.tech_.currentTime();
26053 },
26054 enumerable: true
26055 },
26056 currentSource: {
26057 get: function get() {
26058 return _this3.tech_.currentSource_;
26059 },
26060 enumerable: true
26061 },
26062 currentTech: {
26063 get: function get() {
26064 return _this3.tech_.name_;
26065 },
26066 enumerable: true
26067 },
26068 duration: {
26069 get: function get() {
26070 return _this3.tech_.duration();
26071 },
26072 enumerable: true
26073 },
26074 master: {
26075 get: function get() {
26076 return _this3.playlists.master;
26077 },
26078 enumerable: true
26079 },
26080 playerDimensions: {
26081 get: function get() {
26082 return _this3.tech_.currentDimensions();
26083 },
26084 enumerable: true
26085 },
26086 seekable: {
26087 get: function get() {
26088 return timeRangesToArray(_this3.tech_.seekable());
26089 },
26090 enumerable: true
26091 },
26092 timestamp: {
26093 get: function get() {
26094 return Date.now();
26095 },
26096 enumerable: true
26097 },
26098 videoPlaybackQuality: {
26099 get: function get() {
26100 return _this3.tech_.getVideoPlaybackQuality();
26101 },
26102 enumerable: true
26103 }
26104 });
26105 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
26106 this.tech_.on('bandwidthupdate', function () {
26107 if (_this3.options_.useBandwidthFromLocalStorage) {
26108 updateVhsLocalStorage({
26109 bandwidth: _this3.bandwidth,
26110 throughput: Math.round(_this3.throughput)
26111 });
26112 }
26113 });
26114 this.masterPlaylistController_.on('selectedinitialmedia', function () {
26115 // Add the manual rendition mix-in to VhsHandler
26116 renditionSelectionMixin(_this3);
26117 });
26118 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
26119 _this3.setupEme_();
26120 }); // the bandwidth of the primary segment loader is our best
26121 // estimate of overall bandwidth
26122
26123 this.on(this.masterPlaylistController_, 'progress', function () {
26124 this.tech_.trigger('progress');
26125 }); // In the live case, we need to ignore the very first `seeking` event since
26126 // that will be the result of the seek-to-live behavior
26127
26128 this.on(this.masterPlaylistController_, 'firstplay', function () {
26129 this.ignoreNextSeekingEvent_ = true;
26130 });
26131 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
26132 // this can occur if someone sets the src in player.ready(), for instance
26133
26134 if (!this.tech_.el()) {
26135 return;
26136 }
26137
26138 this.mediaSourceUrl_ = window$1.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
26139 this.tech_.src(this.mediaSourceUrl_);
26140 };
26141
26142 _proto.createKeySessions_ = function createKeySessions_() {
26143 var _this4 = this;
26144
26145 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
26146 this.logger_('waiting for EME key session creation');
26147 waitForKeySessionCreation({
26148 player: this.player_,
26149 sourceKeySystems: this.source_.keySystems,
26150 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
26151 mainPlaylists: this.playlists.master.playlists
26152 }).then(function () {
26153 _this4.logger_('created EME key session');
26154
26155 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
26156 }).catch(function (err) {
26157 _this4.logger_('error while creating EME key session', err);
26158
26159 _this4.player_.error({
26160 message: 'Failed to initialize media keys for EME',
26161 code: 3
26162 });
26163 });
26164 };
26165
26166 _proto.handleWaitingForKey_ = function handleWaitingForKey_() {
26167 // If waitingforkey is fired, it's possible that the data that's necessary to retrieve
26168 // the key is in the manifest. While this should've happened on initial source load, it
26169 // may happen again in live streams where the keys change, and the manifest info
26170 // reflects the update.
26171 //
26172 // Because videojs-contrib-eme compares the PSSH data we send to that of PSSH data it's
26173 // already requested keys for, we don't have to worry about this generating extraneous
26174 // requests.
26175 this.logger_('waitingforkey fired, attempting to create any new key sessions');
26176 this.createKeySessions_();
26177 }
26178 /**
26179 * If necessary and EME is available, sets up EME options and waits for key session
26180 * creation.
26181 *
26182 * This function also updates the source updater so taht it can be used, as for some
26183 * browsers, EME must be configured before content is appended (if appending unencrypted
26184 * content before encrypted content).
26185 */
26186 ;
26187
26188 _proto.setupEme_ = function setupEme_() {
26189 var _this5 = this;
26190
26191 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
26192 var didSetupEmeOptions = setupEmeOptions({
26193 player: this.player_,
26194 sourceKeySystems: this.source_.keySystems,
26195 media: this.playlists.media(),
26196 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
26197 });
26198 this.player_.tech_.on('keystatuschange', function (e) {
26199 if (e.status === 'output-restricted') {
26200 _this5.masterPlaylistController_.blacklistCurrentPlaylist({
26201 playlist: _this5.masterPlaylistController_.media(),
26202 message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
26203 blacklistDuration: Infinity
26204 });
26205 }
26206 });
26207 this.handleWaitingForKey_ = this.handleWaitingForKey_.bind(this);
26208 this.player_.tech_.on('waitingforkey', this.handleWaitingForKey_); // In IE11 this is too early to initialize media keys, and IE11 does not support
26209 // promises.
26210
26211 if (videojs.browser.IE_VERSION === 11 || !didSetupEmeOptions) {
26212 // If EME options were not set up, we've done all we could to initialize EME.
26213 this.masterPlaylistController_.sourceUpdater_.initializedEme();
26214 return;
26215 }
26216
26217 this.createKeySessions_();
26218 }
26219 /**
26220 * Initializes the quality levels and sets listeners to update them.
26221 *
26222 * @method setupQualityLevels_
26223 * @private
26224 */
26225 ;
26226
26227 _proto.setupQualityLevels_ = function setupQualityLevels_() {
26228 var _this6 = this;
26229
26230 var player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
26231 // or qualityLevels_ listeners have already been setup, do nothing.
26232
26233 if (!player || !player.qualityLevels || this.qualityLevels_) {
26234 return;
26235 }
26236
26237 this.qualityLevels_ = player.qualityLevels();
26238 this.masterPlaylistController_.on('selectedinitialmedia', function () {
26239 handleVhsLoadedMetadata(_this6.qualityLevels_, _this6);
26240 });
26241 this.playlists.on('mediachange', function () {
26242 handleVhsMediaChange(_this6.qualityLevels_, _this6.playlists);
26243 });
26244 }
26245 /**
26246 * return the version
26247 */
26248 ;
26249
26250 VhsHandler.version = function version$5() {
26251 return {
26252 '@videojs/http-streaming': version$4,
26253 'mux.js': version$3,
26254 'mpd-parser': version$2,
26255 'm3u8-parser': version$1,
26256 'aes-decrypter': version
26257 };
26258 }
26259 /**
26260 * return the version
26261 */
26262 ;
26263
26264 _proto.version = function version() {
26265 return this.constructor.version();
26266 };
26267
26268 _proto.canChangeType = function canChangeType() {
26269 return SourceUpdater.canChangeType();
26270 }
26271 /**
26272 * Begin playing the video.
26273 */
26274 ;
26275
26276 _proto.play = function play() {
26277 this.masterPlaylistController_.play();
26278 }
26279 /**
26280 * a wrapper around the function in MasterPlaylistController
26281 */
26282 ;
26283
26284 _proto.setCurrentTime = function setCurrentTime(currentTime) {
26285 this.masterPlaylistController_.setCurrentTime(currentTime);
26286 }
26287 /**
26288 * a wrapper around the function in MasterPlaylistController
26289 */
26290 ;
26291
26292 _proto.duration = function duration() {
26293 return this.masterPlaylistController_.duration();
26294 }
26295 /**
26296 * a wrapper around the function in MasterPlaylistController
26297 */
26298 ;
26299
26300 _proto.seekable = function seekable() {
26301 return this.masterPlaylistController_.seekable();
26302 }
26303 /**
26304 * Abort all outstanding work and cleanup.
26305 */
26306 ;
26307
26308 _proto.dispose = function dispose() {
26309 if (this.playbackWatcher_) {
26310 this.playbackWatcher_.dispose();
26311 }
26312
26313 if (this.masterPlaylistController_) {
26314 this.masterPlaylistController_.dispose();
26315 }
26316
26317 if (this.qualityLevels_) {
26318 this.qualityLevels_.dispose();
26319 }
26320
26321 if (this.player_) {
26322 delete this.player_.vhs;
26323 delete this.player_.dash;
26324 delete this.player_.hls;
26325 }
26326
26327 if (this.tech_ && this.tech_.vhs) {
26328 delete this.tech_.vhs;
26329 } // don't check this.tech_.hls as it will log a deprecated warning
26330
26331
26332 if (this.tech_) {
26333 delete this.tech_.hls;
26334 }
26335
26336 if (this.mediaSourceUrl_ && window$1.URL.revokeObjectURL) {
26337 window$1.URL.revokeObjectURL(this.mediaSourceUrl_);
26338 this.mediaSourceUrl_ = null;
26339 }
26340
26341 if (this.tech_) {
26342 this.tech_.off('waitingforkey', this.handleWaitingForKey_);
26343 }
26344
26345 _Component.prototype.dispose.call(this);
26346 };
26347
26348 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
26349 return getProgramTime({
26350 playlist: this.masterPlaylistController_.media(),
26351 time: time,
26352 callback: callback
26353 });
26354 } // the player must be playing before calling this
26355 ;
26356
26357 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
26358 if (pauseAfterSeek === void 0) {
26359 pauseAfterSeek = true;
26360 }
26361
26362 if (retryCount === void 0) {
26363 retryCount = 2;
26364 }
26365
26366 return seekToProgramTime({
26367 programTime: programTime,
26368 playlist: this.masterPlaylistController_.media(),
26369 retryCount: retryCount,
26370 pauseAfterSeek: pauseAfterSeek,
26371 seekTo: this.options_.seekTo,
26372 tech: this.options_.tech,
26373 callback: callback
26374 });
26375 };
26376
26377 return VhsHandler;
26378}(Component);
26379/**
26380 * The Source Handler object, which informs video.js what additional
26381 * MIME types are supported and sets up playback. It is registered
26382 * automatically to the appropriate tech based on the capabilities of
26383 * the browser it is running in. It is not necessary to use or modify
26384 * this object in normal usage.
26385 */
26386
26387
26388var VhsSourceHandler = {
26389 name: 'videojs-http-streaming',
26390 VERSION: version$4,
26391 canHandleSource: function canHandleSource(srcObj, options) {
26392 if (options === void 0) {
26393 options = {};
26394 }
26395
26396 var localOptions = videojs.mergeOptions(videojs.options, options);
26397 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
26398 },
26399 handleSource: function handleSource(source, tech, options) {
26400 if (options === void 0) {
26401 options = {};
26402 }
26403
26404 var localOptions = videojs.mergeOptions(videojs.options, options);
26405 tech.vhs = new VhsHandler(source, tech, localOptions);
26406
26407 if (!videojs.hasOwnProperty('hls')) {
26408 Object.defineProperty(tech, 'hls', {
26409 get: function get() {
26410 videojs.log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
26411 return tech.vhs;
26412 },
26413 configurable: true
26414 });
26415 }
26416
26417 tech.vhs.xhr = xhrFactory();
26418 tech.vhs.src(source.src, source.type);
26419 return tech.vhs;
26420 },
26421 canPlayType: function canPlayType(type, options) {
26422 if (options === void 0) {
26423 options = {};
26424 }
26425
26426 var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
26427 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs;
26428
26429 _videojs$mergeOptions2 = _videojs$mergeOptions2 === void 0 ? {} : _videojs$mergeOptions2;
26430 var _videojs$mergeOptions3 = _videojs$mergeOptions2.overrideNative,
26431 overrideNative = _videojs$mergeOptions3 === void 0 ? !videojs.browser.IS_ANY_SAFARI : _videojs$mergeOptions3,
26432 _videojs$mergeOptions4 = _videojs$mergeOptions.hls;
26433 _videojs$mergeOptions4 = _videojs$mergeOptions4 === void 0 ? {} : _videojs$mergeOptions4;
26434 var _videojs$mergeOptions5 = _videojs$mergeOptions4.overrideNative,
26435 legacyOverrideNative = _videojs$mergeOptions5 === void 0 ? false : _videojs$mergeOptions5;
26436 var supportedType = simpleTypeFromSourceType(type);
26437 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || legacyOverrideNative || overrideNative);
26438 return canUseMsePlayback ? 'maybe' : '';
26439 }
26440};
26441/**
26442 * Check to see if the native MediaSource object exists and supports
26443 * an MP4 container with both H.264 video and AAC-LC audio.
26444 *
26445 * @return {boolean} if native media sources are supported
26446 */
26447
26448var supportsNativeMediaSources = function supportsNativeMediaSources() {
26449 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
26450}; // register source handlers with the appropriate techs
26451
26452
26453if (supportsNativeMediaSources()) {
26454 videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
26455}
26456
26457videojs.VhsHandler = VhsHandler;
26458Object.defineProperty(videojs, 'HlsHandler', {
26459 get: function get() {
26460 videojs.log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
26461 return VhsHandler;
26462 },
26463 configurable: true
26464});
26465videojs.VhsSourceHandler = VhsSourceHandler;
26466Object.defineProperty(videojs, 'HlsSourceHandler', {
26467 get: function get() {
26468 videojs.log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
26469 return VhsSourceHandler;
26470 },
26471 configurable: true
26472});
26473videojs.Vhs = Vhs;
26474Object.defineProperty(videojs, 'Hls', {
26475 get: function get() {
26476 videojs.log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
26477 return Vhs;
26478 },
26479 configurable: true
26480});
26481
26482if (!videojs.use) {
26483 videojs.registerComponent('Hls', Vhs);
26484 videojs.registerComponent('Vhs', Vhs);
26485}
26486
26487videojs.options.vhs = videojs.options.vhs || {};
26488videojs.options.hls = videojs.options.hls || {};
26489
26490if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
26491 var registerPlugin = videojs.registerPlugin || videojs.plugin;
26492 registerPlugin('reloadSourceOnError', reloadSourceOnError);
26493}
26494
26495export { LOCAL_STORAGE_KEY, Vhs, VhsHandler, VhsSourceHandler, emeKeySystems, expandDataUri, getAllPsshKeySystemsOptions, setupEmeOptions, waitForKeySessionCreation };