UNPKG

875 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.11.1 @license Apache-2.0 */
2import _assertThisInitialized from '@babel/runtime/helpers/assertThisInitialized';
3import _inheritsLoose from '@babel/runtime/helpers/inheritsLoose';
4import document from 'global/document';
5import window from 'global/window';
6import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
7import videojs from 'video.js';
8import _extends from '@babel/runtime/helpers/extends';
9import { Parser } from 'm3u8-parser';
10import { isAudioCodec, translateLegacyCodec, codecsFromDefault, parseCodecs, getMimeForCodec, DEFAULT_VIDEO_CODEC, DEFAULT_AUDIO_CODEC, browserSupportsCodec, muxerSupportsCodec } from '@videojs/vhs-utils/es/codecs.js';
11import { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
12export { simpleTypeFromSourceType } from '@videojs/vhs-utils/es/media-types.js';
13import { generateSidxKey, parseUTCTiming, parse, addSidxSegmentsToPlaylist } from 'mpd-parser';
14import parseSidx from 'mux.js/lib/tools/parse-sidx';
15import { getId3Offset } from '@videojs/vhs-utils/es/id3-helpers';
16import { detectContainerForBytes, isLikelyFmp4MediaSegment } from '@videojs/vhs-utils/es/containers';
17import { concatTypedArrays, stringToBytes, toUint8 } from '@videojs/vhs-utils/es/byte-helpers';
18import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
19
20/**
21 * @file resolve-url.js - Handling how URLs are resolved and manipulated
22 */
23var resolveUrl = _resolveUrl;
24/**
25 * Checks whether xhr request was redirected and returns correct url depending
26 * on `handleManifestRedirects` option
27 *
28 * @api private
29 *
30 * @param {string} url - an url being requested
31 * @param {XMLHttpRequest} req - xhr request result
32 *
33 * @return {string}
34 */
35
36var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
37 // To understand how the responseURL below is set and generated:
38 // - https://fetch.spec.whatwg.org/#concept-response-url
39 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
40 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
41 return req.responseURL;
42 }
43
44 return url;
45};
46
47var logger = function logger(source) {
48 if (videojs.log.debug) {
49 return videojs.log.debug.bind(videojs, 'VHS:', source + " >");
50 }
51
52 return function () {};
53};
54
55/**
56 * ranges
57 *
58 * Utilities for working with TimeRanges.
59 *
60 */
61
62var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
63// can be misleading because of precision differences or when the current media has poorly
64// aligned audio and video, which can cause values to be slightly off from what you would
65// expect. This value is what we consider to be safe to use in such comparisons to account
66// for these scenarios.
67
68var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
69
70var filterRanges = function filterRanges(timeRanges, predicate) {
71 var results = [];
72 var i;
73
74 if (timeRanges && timeRanges.length) {
75 // Search for ranges that match the predicate
76 for (i = 0; i < timeRanges.length; i++) {
77 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
78 results.push([timeRanges.start(i), timeRanges.end(i)]);
79 }
80 }
81 }
82
83 return videojs.createTimeRanges(results);
84};
85/**
86 * Attempts to find the buffered TimeRange that contains the specified
87 * time.
88 *
89 * @param {TimeRanges} buffered - the TimeRanges object to query
90 * @param {number} time - the time to filter on.
91 * @return {TimeRanges} a new TimeRanges object
92 */
93
94
95var findRange = function findRange(buffered, time) {
96 return filterRanges(buffered, function (start, end) {
97 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
98 });
99};
100/**
101 * Returns the TimeRanges that begin later than the specified time.
102 *
103 * @param {TimeRanges} timeRanges - the TimeRanges object to query
104 * @param {number} time - the time to filter on.
105 * @return {TimeRanges} a new TimeRanges object.
106 */
107
108var findNextRange = function findNextRange(timeRanges, time) {
109 return filterRanges(timeRanges, function (start) {
110 return start - TIME_FUDGE_FACTOR >= time;
111 });
112};
113/**
114 * Returns gaps within a list of TimeRanges
115 *
116 * @param {TimeRanges} buffered - the TimeRanges object
117 * @return {TimeRanges} a TimeRanges object of gaps
118 */
119
120var findGaps = function findGaps(buffered) {
121 if (buffered.length < 2) {
122 return videojs.createTimeRanges();
123 }
124
125 var ranges = [];
126
127 for (var i = 1; i < buffered.length; i++) {
128 var start = buffered.end(i - 1);
129 var end = buffered.start(i);
130 ranges.push([start, end]);
131 }
132
133 return videojs.createTimeRanges(ranges);
134};
135/**
136 * Calculate the intersection of two TimeRanges
137 *
138 * @param {TimeRanges} bufferA
139 * @param {TimeRanges} bufferB
140 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
141 */
142
143var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
144 var start = null;
145 var end = null;
146 var arity = 0;
147 var extents = [];
148 var ranges = [];
149
150 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
151 return videojs.createTimeRange();
152 } // Handle the case where we have both buffers and create an
153 // intersection of the two
154
155
156 var count = bufferA.length; // A) Gather up all start and end times
157
158 while (count--) {
159 extents.push({
160 time: bufferA.start(count),
161 type: 'start'
162 });
163 extents.push({
164 time: bufferA.end(count),
165 type: 'end'
166 });
167 }
168
169 count = bufferB.length;
170
171 while (count--) {
172 extents.push({
173 time: bufferB.start(count),
174 type: 'start'
175 });
176 extents.push({
177 time: bufferB.end(count),
178 type: 'end'
179 });
180 } // B) Sort them by time
181
182
183 extents.sort(function (a, b) {
184 return a.time - b.time;
185 }); // C) Go along one by one incrementing arity for start and decrementing
186 // arity for ends
187
188 for (count = 0; count < extents.length; count++) {
189 if (extents[count].type === 'start') {
190 arity++; // D) If arity is ever incremented to 2 we are entering an
191 // overlapping range
192
193 if (arity === 2) {
194 start = extents[count].time;
195 }
196 } else if (extents[count].type === 'end') {
197 arity--; // E) If arity is ever decremented to 1 we leaving an
198 // overlapping range
199
200 if (arity === 1) {
201 end = extents[count].time;
202 }
203 } // F) Record overlapping ranges
204
205
206 if (start !== null && end !== null) {
207 ranges.push([start, end]);
208 start = null;
209 end = null;
210 }
211 }
212
213 return videojs.createTimeRanges(ranges);
214};
215/**
216 * Gets a human readable string for a TimeRange
217 *
218 * @param {TimeRange} range
219 * @return {string} a human readable string
220 */
221
222var printableRange = function printableRange(range) {
223 var strArr = [];
224
225 if (!range || !range.length) {
226 return '';
227 }
228
229 for (var i = 0; i < range.length; i++) {
230 strArr.push(range.start(i) + ' => ' + range.end(i));
231 }
232
233 return strArr.join(', ');
234};
235/**
236 * Calculates the amount of time left in seconds until the player hits the end of the
237 * buffer and causes a rebuffer
238 *
239 * @param {TimeRange} buffered
240 * The state of the buffer
241 * @param {Numnber} currentTime
242 * The current time of the player
243 * @param {number} playbackRate
244 * The current playback rate of the player. Defaults to 1.
245 * @return {number}
246 * Time until the player has to start rebuffering in seconds.
247 * @function timeUntilRebuffer
248 */
249
250var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
251 if (playbackRate === void 0) {
252 playbackRate = 1;
253 }
254
255 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
256 return (bufferedEnd - currentTime) / playbackRate;
257};
258/**
259 * Converts a TimeRanges object into an array representation
260 *
261 * @param {TimeRanges} timeRanges
262 * @return {Array}
263 */
264
265var timeRangesToArray = function timeRangesToArray(timeRanges) {
266 var timeRangesList = [];
267
268 for (var i = 0; i < timeRanges.length; i++) {
269 timeRangesList.push({
270 start: timeRanges.start(i),
271 end: timeRanges.end(i)
272 });
273 }
274
275 return timeRangesList;
276};
277/**
278 * Determines if two time range objects are different.
279 *
280 * @param {TimeRange} a
281 * the first time range object to check
282 *
283 * @param {TimeRange} b
284 * the second time range object to check
285 *
286 * @return {Boolean}
287 * Whether the time range objects differ
288 */
289
290var isRangeDifferent = function isRangeDifferent(a, b) {
291 // same object
292 if (a === b) {
293 return false;
294 } // one or the other is undefined
295
296
297 if (!a && b || !b && a) {
298 return true;
299 } // length is different
300
301
302 if (a.length !== b.length) {
303 return true;
304 } // see if any start/end pair is different
305
306
307 for (var i = 0; i < a.length; i++) {
308 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
309 return true;
310 }
311 } // if the length and every pair is the same
312 // this is the same time range
313
314
315 return false;
316};
317var lastBufferedEnd = function lastBufferedEnd(a) {
318 if (!a || !a.length || !a.end) {
319 return;
320 }
321
322 return a.end(a.length - 1);
323};
324/**
325 * A utility function to add up the amount of time in a timeRange
326 * after a specified startTime.
327 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
328 * would return 40 as there are 40s seconds after 0 in the timeRange
329 *
330 * @param {TimeRange} range
331 * The range to check against
332 * @param {number} startTime
333 * The time in the time range that you should start counting from
334 *
335 * @return {number}
336 * The number of seconds in the buffer passed the specified time.
337 */
338
339var timeAheadOf = function timeAheadOf(range, startTime) {
340 var time = 0;
341
342 if (!range || !range.length) {
343 return time;
344 }
345
346 for (var i = 0; i < range.length; i++) {
347 var start = range.start(i);
348 var end = range.end(i); // startTime is after this range entirely
349
350 if (startTime > end) {
351 continue;
352 } // startTime is within this range
353
354
355 if (startTime > start && startTime <= end) {
356 time += end - startTime;
357 continue;
358 } // startTime is before this range.
359
360
361 time += end - start;
362 }
363
364 return time;
365};
366
367/**
368 * @file playlist.js
369 *
370 * Playlist related utilities.
371 */
372var createTimeRange = videojs.createTimeRange;
373/**
374 * A function to get a combined list of parts and segments with durations
375 * and indexes.
376 *
377 * @param {Playlist} playlist the playlist to get the list for.
378 *
379 * @return {Array} The part/segment list.
380 */
381
382var getPartsAndSegments = function getPartsAndSegments(playlist) {
383 return (playlist.segments || []).reduce(function (acc, segment, si) {
384 if (segment.parts) {
385 segment.parts.forEach(function (part, pi) {
386 acc.push({
387 duration: part.duration,
388 segmentIndex: si,
389 partIndex: pi,
390 part: part,
391 segment: segment
392 });
393 });
394 } else {
395 acc.push({
396 duration: segment.duration,
397 segmentIndex: si,
398 partIndex: null,
399 segment: segment,
400 part: null
401 });
402 }
403
404 return acc;
405 }, []);
406};
407var getLastParts = function getLastParts(media) {
408 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
409 return lastSegment && lastSegment.parts || [];
410};
411var getKnownPartCount = function getKnownPartCount(_ref) {
412 var preloadSegment = _ref.preloadSegment;
413
414 if (!preloadSegment) {
415 return;
416 }
417
418 var parts = preloadSegment.parts,
419 preloadHints = preloadSegment.preloadHints;
420 var partCount = (preloadHints || []).reduce(function (count, hint) {
421 return count + (hint.type === 'PART' ? 1 : 0);
422 }, 0);
423 partCount += parts && parts.length ? parts.length : 0;
424 return partCount;
425};
426/**
427 * Get the number of seconds to delay from the end of a
428 * live playlist.
429 *
430 * @param {Playlist} master the master playlist
431 * @param {Playlist} media the media playlist
432 * @return {number} the hold back in seconds.
433 */
434
435var liveEdgeDelay = function liveEdgeDelay(master, media) {
436 if (media.endList) {
437 return 0;
438 } // dash suggestedPresentationDelay trumps everything
439
440
441 if (master && master.suggestedPresentationDelay) {
442 return master.suggestedPresentationDelay;
443 }
444
445 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
446
447 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
448 return media.serverControl.partHoldBack;
449 } else if (hasParts && media.partTargetDuration) {
450 return media.partTargetDuration * 3; // finally look for full segment delays
451 } else if (media.serverControl && media.serverControl.holdBack) {
452 return media.serverControl.holdBack;
453 } else if (media.targetDuration) {
454 return media.targetDuration * 3;
455 }
456
457 return 0;
458};
459/**
460 * walk backward until we find a duration we can use
461 * or return a failure
462 *
463 * @param {Playlist} playlist the playlist to walk through
464 * @param {Number} endSequence the mediaSequence to stop walking on
465 */
466
467var backwardDuration = function backwardDuration(playlist, endSequence) {
468 var result = 0;
469 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
470 // the interval, use it
471
472 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
473 // information that is earlier than endSequence
474
475 if (segment) {
476 if (typeof segment.start !== 'undefined') {
477 return {
478 result: segment.start,
479 precise: true
480 };
481 }
482
483 if (typeof segment.end !== 'undefined') {
484 return {
485 result: segment.end - segment.duration,
486 precise: true
487 };
488 }
489 }
490
491 while (i--) {
492 segment = playlist.segments[i];
493
494 if (typeof segment.end !== 'undefined') {
495 return {
496 result: result + segment.end,
497 precise: true
498 };
499 }
500
501 result += segment.duration;
502
503 if (typeof segment.start !== 'undefined') {
504 return {
505 result: result + segment.start,
506 precise: true
507 };
508 }
509 }
510
511 return {
512 result: result,
513 precise: false
514 };
515};
516/**
517 * walk forward until we find a duration we can use
518 * or return a failure
519 *
520 * @param {Playlist} playlist the playlist to walk through
521 * @param {number} endSequence the mediaSequence to stop walking on
522 */
523
524
525var forwardDuration = function forwardDuration(playlist, endSequence) {
526 var result = 0;
527 var segment;
528 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
529 // information
530
531 for (; i < playlist.segments.length; i++) {
532 segment = playlist.segments[i];
533
534 if (typeof segment.start !== 'undefined') {
535 return {
536 result: segment.start - result,
537 precise: true
538 };
539 }
540
541 result += segment.duration;
542
543 if (typeof segment.end !== 'undefined') {
544 return {
545 result: segment.end - result,
546 precise: true
547 };
548 }
549 } // indicate we didn't find a useful duration estimate
550
551
552 return {
553 result: -1,
554 precise: false
555 };
556};
557/**
558 * Calculate the media duration from the segments associated with a
559 * playlist. The duration of a subinterval of the available segments
560 * may be calculated by specifying an end index.
561 *
562 * @param {Object} playlist a media playlist object
563 * @param {number=} endSequence an exclusive upper boundary
564 * for the playlist. Defaults to playlist length.
565 * @param {number} expired the amount of time that has dropped
566 * off the front of the playlist in a live scenario
567 * @return {number} the duration between the first available segment
568 * and end index.
569 */
570
571
572var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
573 if (typeof endSequence === 'undefined') {
574 endSequence = playlist.mediaSequence + playlist.segments.length;
575 }
576
577 if (endSequence < playlist.mediaSequence) {
578 return 0;
579 } // do a backward walk to estimate the duration
580
581
582 var backward = backwardDuration(playlist, endSequence);
583
584 if (backward.precise) {
585 // if we were able to base our duration estimate on timing
586 // information provided directly from the Media Source, return
587 // it
588 return backward.result;
589 } // walk forward to see if a precise duration estimate can be made
590 // that way
591
592
593 var forward = forwardDuration(playlist, endSequence);
594
595 if (forward.precise) {
596 // we found a segment that has been buffered and so it's
597 // position is known precisely
598 return forward.result;
599 } // return the less-precise, playlist-based duration estimate
600
601
602 return backward.result + expired;
603};
604/**
605 * Calculates the duration of a playlist. If a start and end index
606 * are specified, the duration will be for the subset of the media
607 * timeline between those two indices. The total duration for live
608 * playlists is always Infinity.
609 *
610 * @param {Object} playlist a media playlist object
611 * @param {number=} endSequence an exclusive upper
612 * boundary for the playlist. Defaults to the playlist media
613 * sequence number plus its length.
614 * @param {number=} expired the amount of time that has
615 * dropped off the front of the playlist in a live scenario
616 * @return {number} the duration between the start index and end
617 * index.
618 */
619
620
621var duration = function duration(playlist, endSequence, expired) {
622 if (!playlist) {
623 return 0;
624 }
625
626 if (typeof expired !== 'number') {
627 expired = 0;
628 } // if a slice of the total duration is not requested, use
629 // playlist-level duration indicators when they're present
630
631
632 if (typeof endSequence === 'undefined') {
633 // if present, use the duration specified in the playlist
634 if (playlist.totalDuration) {
635 return playlist.totalDuration;
636 } // duration should be Infinity for live playlists
637
638
639 if (!playlist.endList) {
640 return window.Infinity;
641 }
642 } // calculate the total duration based on the segment durations
643
644
645 return intervalDuration(playlist, endSequence, expired);
646};
647/**
648 * Calculate the time between two indexes in the current playlist
649 * neight the start- nor the end-index need to be within the current
650 * playlist in which case, the targetDuration of the playlist is used
651 * to approximate the durations of the segments
652 *
653 * @param {Array} options.durationList list to iterate over for durations.
654 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
655 * @param {number} options.startIndex partsAndSegments index to start
656 * @param {number} options.endIndex partsAndSegments index to end.
657 * @return {number} the number of seconds between startIndex and endIndex
658 */
659
660var sumDurations = function sumDurations(_ref2) {
661 var defaultDuration = _ref2.defaultDuration,
662 durationList = _ref2.durationList,
663 startIndex = _ref2.startIndex,
664 endIndex = _ref2.endIndex;
665 var durations = 0;
666
667 if (startIndex > endIndex) {
668 var _ref3 = [endIndex, startIndex];
669 startIndex = _ref3[0];
670 endIndex = _ref3[1];
671 }
672
673 if (startIndex < 0) {
674 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
675 durations += defaultDuration;
676 }
677
678 startIndex = 0;
679 }
680
681 for (var _i = startIndex; _i < endIndex; _i++) {
682 durations += durationList[_i].duration;
683 }
684
685 return durations;
686};
687/**
688 * Calculates the playlist end time
689 *
690 * @param {Object} playlist a media playlist object
691 * @param {number=} expired the amount of time that has
692 * dropped off the front of the playlist in a live scenario
693 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
694 * playlist end calculation should consider the safe live end
695 * (truncate the playlist end by three segments). This is normally
696 * used for calculating the end of the playlist's seekable range.
697 * This takes into account the value of liveEdgePadding.
698 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
699 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
700 * If this is provided, it is used in the safe live end calculation.
701 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
702 * Corresponds to suggestedPresentationDelay in DASH manifests.
703 * @return {number} the end time of playlist
704 * @function playlistEnd
705 */
706
707var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
708 if (!playlist || !playlist.segments) {
709 return null;
710 }
711
712 if (playlist.endList) {
713 return duration(playlist);
714 }
715
716 if (expired === null) {
717 return null;
718 }
719
720 expired = expired || 0;
721 var lastSegmentTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
722
723 if (useSafeLiveEnd) {
724 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
725 lastSegmentTime -= liveEdgePadding;
726 } // don't return a time less than zero
727
728
729 return Math.max(0, lastSegmentTime);
730};
731/**
732 * Calculates the interval of time that is currently seekable in a
733 * playlist. The returned time ranges are relative to the earliest
734 * moment in the specified playlist that is still available. A full
735 * seekable implementation for live streams would need to offset
736 * these values by the duration of content that has expired from the
737 * stream.
738 *
739 * @param {Object} playlist a media playlist object
740 * dropped off the front of the playlist in a live scenario
741 * @param {number=} expired the amount of time that has
742 * dropped off the front of the playlist in a live scenario
743 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
744 * Corresponds to suggestedPresentationDelay in DASH manifests.
745 * @return {TimeRanges} the periods of time that are valid targets
746 * for seeking
747 */
748
749var seekable = function seekable(playlist, expired, liveEdgePadding) {
750 var useSafeLiveEnd = true;
751 var seekableStart = expired || 0;
752 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
753
754 if (seekableEnd === null) {
755 return createTimeRange();
756 }
757
758 return createTimeRange(seekableStart, seekableEnd);
759};
760/**
761 * Determine the index and estimated starting time of the segment that
762 * contains a specified playback position in a media playlist.
763 *
764 * @param {Object} options.playlist the media playlist to query
765 * @param {number} options.currentTime The number of seconds since the earliest
766 * possible position to determine the containing segment for
767 * @param {number} options.startTime the time when the segment/part starts
768 * @param {number} options.startingSegmentIndex the segment index to start looking at.
769 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
770 *
771 * @return {Object} an object with partIndex, segmentIndex, and startTime.
772 */
773
774var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
775 var playlist = _ref4.playlist,
776 currentTime = _ref4.currentTime,
777 startingSegmentIndex = _ref4.startingSegmentIndex,
778 startingPartIndex = _ref4.startingPartIndex,
779 startTime = _ref4.startTime,
780 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
781 var time = currentTime - startTime;
782 var partsAndSegments = getPartsAndSegments(playlist);
783 var startIndex = 0;
784
785 for (var i = 0; i < partsAndSegments.length; i++) {
786 var partAndSegment = partsAndSegments[i];
787
788 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
789 continue;
790 } // skip this if part index does not match.
791
792
793 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
794 continue;
795 }
796
797 startIndex = i;
798 break;
799 }
800
801 if (time < 0) {
802 // Walk backward from startIndex in the playlist, adding durations
803 // until we find a segment that contains `time` and return it
804 if (startIndex > 0) {
805 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
806 var _partAndSegment = partsAndSegments[_i2];
807 time += _partAndSegment.duration;
808
809 if (experimentalExactManifestTimings) {
810 if (time < 0) {
811 continue;
812 }
813 } else if (time + TIME_FUDGE_FACTOR <= 0) {
814 continue;
815 }
816
817 return {
818 partIndex: _partAndSegment.partIndex,
819 segmentIndex: _partAndSegment.segmentIndex,
820 startTime: startTime - sumDurations({
821 defaultDuration: playlist.targetDuration,
822 durationList: partsAndSegments,
823 startIndex: startIndex,
824 endIndex: _i2
825 })
826 };
827 }
828 } // We were unable to find a good segment within the playlist
829 // so select the first segment
830
831
832 return {
833 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
834 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
835 startTime: currentTime
836 };
837 } // When startIndex is negative, we first walk forward to first segment
838 // adding target durations. If we "run out of time" before getting to
839 // the first segment, return the first segment
840
841
842 if (startIndex < 0) {
843 for (var _i3 = startIndex; _i3 < 0; _i3++) {
844 time -= playlist.targetDuration;
845
846 if (time < 0) {
847 return {
848 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
849 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
850 startTime: currentTime
851 };
852 }
853 }
854
855 startIndex = 0;
856 } // Walk forward from startIndex in the playlist, subtracting durations
857 // until we find a segment that contains `time` and return it
858
859
860 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
861 var _partAndSegment2 = partsAndSegments[_i4];
862 time -= _partAndSegment2.duration;
863
864 if (experimentalExactManifestTimings) {
865 if (time > 0) {
866 continue;
867 }
868 } else if (time - TIME_FUDGE_FACTOR >= 0) {
869 continue;
870 }
871
872 return {
873 partIndex: _partAndSegment2.partIndex,
874 segmentIndex: _partAndSegment2.segmentIndex,
875 startTime: startTime + sumDurations({
876 defaultDuration: playlist.targetDuration,
877 durationList: partsAndSegments,
878 startIndex: startIndex,
879 endIndex: _i4
880 })
881 };
882 } // We are out of possible candidates so load the last one...
883
884
885 return {
886 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
887 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
888 startTime: currentTime
889 };
890};
891/**
892 * Check whether the playlist is blacklisted or not.
893 *
894 * @param {Object} playlist the media playlist object
895 * @return {boolean} whether the playlist is blacklisted or not
896 * @function isBlacklisted
897 */
898
899var isBlacklisted = function isBlacklisted(playlist) {
900 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
901};
902/**
903 * Check whether the playlist is compatible with current playback configuration or has
904 * been blacklisted permanently for being incompatible.
905 *
906 * @param {Object} playlist the media playlist object
907 * @return {boolean} whether the playlist is incompatible or not
908 * @function isIncompatible
909 */
910
911var isIncompatible = function isIncompatible(playlist) {
912 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
913};
914/**
915 * Check whether the playlist is enabled or not.
916 *
917 * @param {Object} playlist the media playlist object
918 * @return {boolean} whether the playlist is enabled or not
919 * @function isEnabled
920 */
921
922var isEnabled = function isEnabled(playlist) {
923 var blacklisted = isBlacklisted(playlist);
924 return !playlist.disabled && !blacklisted;
925};
926/**
927 * Check whether the playlist has been manually disabled through the representations api.
928 *
929 * @param {Object} playlist the media playlist object
930 * @return {boolean} whether the playlist is disabled manually or not
931 * @function isDisabled
932 */
933
934var isDisabled = function isDisabled(playlist) {
935 return playlist.disabled;
936};
937/**
938 * Returns whether the current playlist is an AES encrypted HLS stream
939 *
940 * @return {boolean} true if it's an AES encrypted HLS stream
941 */
942
943var isAes = function isAes(media) {
944 for (var i = 0; i < media.segments.length; i++) {
945 if (media.segments[i].key) {
946 return true;
947 }
948 }
949
950 return false;
951};
952/**
953 * Checks if the playlist has a value for the specified attribute
954 *
955 * @param {string} attr
956 * Attribute to check for
957 * @param {Object} playlist
958 * The media playlist object
959 * @return {boolean}
960 * Whether the playlist contains a value for the attribute or not
961 * @function hasAttribute
962 */
963
964var hasAttribute = function hasAttribute(attr, playlist) {
965 return playlist.attributes && playlist.attributes[attr];
966};
967/**
968 * Estimates the time required to complete a segment download from the specified playlist
969 *
970 * @param {number} segmentDuration
971 * Duration of requested segment
972 * @param {number} bandwidth
973 * Current measured bandwidth of the player
974 * @param {Object} playlist
975 * The media playlist object
976 * @param {number=} bytesReceived
977 * Number of bytes already received for the request. Defaults to 0
978 * @return {number|NaN}
979 * The estimated time to request the segment. NaN if bandwidth information for
980 * the given playlist is unavailable
981 * @function estimateSegmentRequestTime
982 */
983
984var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
985 if (bytesReceived === void 0) {
986 bytesReceived = 0;
987 }
988
989 if (!hasAttribute('BANDWIDTH', playlist)) {
990 return NaN;
991 }
992
993 var size = segmentDuration * playlist.attributes.BANDWIDTH;
994 return (size - bytesReceived * 8) / bandwidth;
995};
996/*
997 * Returns whether the current playlist is the lowest rendition
998 *
999 * @return {Boolean} true if on lowest rendition
1000 */
1001
1002var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1003 if (master.playlists.length === 1) {
1004 return true;
1005 }
1006
1007 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1008 return master.playlists.filter(function (playlist) {
1009 if (!isEnabled(playlist)) {
1010 return false;
1011 }
1012
1013 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1014 }).length === 0;
1015};
1016var playlistMatch = function playlistMatch(a, b) {
1017 // both playlits are null
1018 // or only one playlist is non-null
1019 // no match
1020 if (!a && !b || !a && b || a && !b) {
1021 return false;
1022 } // playlist objects are the same, match
1023
1024
1025 if (a === b) {
1026 return true;
1027 } // first try to use id as it should be the most
1028 // accurate
1029
1030
1031 if (a.id && b.id && a.id === b.id) {
1032 return true;
1033 } // next try to use reslovedUri as it should be the
1034 // second most accurate.
1035
1036
1037 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
1038 return true;
1039 } // finally try to use uri as it should be accurate
1040 // but might miss a few cases for relative uris
1041
1042
1043 if (a.uri && b.uri && a.uri === b.uri) {
1044 return true;
1045 }
1046
1047 return false;
1048};
1049
1050var someAudioVariant = function someAudioVariant(master, callback) {
1051 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
1052 var found = false;
1053
1054 for (var groupName in AUDIO) {
1055 for (var label in AUDIO[groupName]) {
1056 found = callback(AUDIO[groupName][label]);
1057
1058 if (found) {
1059 break;
1060 }
1061 }
1062
1063 if (found) {
1064 break;
1065 }
1066 }
1067
1068 return !!found;
1069};
1070
1071var isAudioOnly = function isAudioOnly(master) {
1072 // we are audio only if we have no main playlists but do
1073 // have media group playlists.
1074 if (!master || !master.playlists || !master.playlists.length) {
1075 // without audio variants or playlists this
1076 // is not an audio only master.
1077 var found = someAudioVariant(master, function (variant) {
1078 return variant.playlists && variant.playlists.length || variant.uri;
1079 });
1080 return found;
1081 } // if every playlist has only an audio codec it is audio only
1082
1083
1084 var _loop = function _loop(i) {
1085 var playlist = master.playlists[i];
1086 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
1087
1088 if (CODECS && CODECS.split(',').every(function (c) {
1089 return isAudioCodec(c);
1090 })) {
1091 return "continue";
1092 } // playlist is in an audio group it is audio only
1093
1094
1095 var found = someAudioVariant(master, function (variant) {
1096 return playlistMatch(playlist, variant);
1097 });
1098
1099 if (found) {
1100 return "continue";
1101 } // if we make it here this playlist isn't audio and we
1102 // are not audio only
1103
1104
1105 return {
1106 v: false
1107 };
1108 };
1109
1110 for (var i = 0; i < master.playlists.length; i++) {
1111 var _ret = _loop(i);
1112
1113 if (_ret === "continue") continue;
1114 if (typeof _ret === "object") return _ret.v;
1115 } // if we make it past every playlist without returning, then
1116 // this is an audio only playlist.
1117
1118
1119 return true;
1120}; // exports
1121
1122var Playlist = {
1123 liveEdgeDelay: liveEdgeDelay,
1124 duration: duration,
1125 seekable: seekable,
1126 getMediaInfoForTime: getMediaInfoForTime,
1127 isEnabled: isEnabled,
1128 isDisabled: isDisabled,
1129 isBlacklisted: isBlacklisted,
1130 isIncompatible: isIncompatible,
1131 playlistEnd: playlistEnd,
1132 isAes: isAes,
1133 hasAttribute: hasAttribute,
1134 estimateSegmentRequestTime: estimateSegmentRequestTime,
1135 isLowestEnabledRendition: isLowestEnabledRendition,
1136 isAudioOnly: isAudioOnly,
1137 playlistMatch: playlistMatch
1138};
1139
1140var log = videojs.log;
1141var createPlaylistID = function createPlaylistID(index, uri) {
1142 return index + "-" + uri;
1143};
1144/**
1145 * Parses a given m3u8 playlist
1146 *
1147 * @param {Function} [onwarn]
1148 * a function to call when the parser triggers a warning event.
1149 * @param {Function} [oninfo]
1150 * a function to call when the parser triggers an info event.
1151 * @param {string} manifestString
1152 * The downloaded manifest string
1153 * @param {Object[]} [customTagParsers]
1154 * An array of custom tag parsers for the m3u8-parser instance
1155 * @param {Object[]} [customTagMappers]
1156 * An array of custom tag mappers for the m3u8-parser instance
1157 * @param {boolean} [experimentalLLHLS=false]
1158 * Whether to keep ll-hls features in the manifest after parsing.
1159 * @return {Object}
1160 * The manifest object
1161 */
1162
1163var parseManifest = function parseManifest(_ref) {
1164 var onwarn = _ref.onwarn,
1165 oninfo = _ref.oninfo,
1166 manifestString = _ref.manifestString,
1167 _ref$customTagParsers = _ref.customTagParsers,
1168 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
1169 _ref$customTagMappers = _ref.customTagMappers,
1170 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
1171 experimentalLLHLS = _ref.experimentalLLHLS;
1172 var parser = new Parser();
1173
1174 if (onwarn) {
1175 parser.on('warn', onwarn);
1176 }
1177
1178 if (oninfo) {
1179 parser.on('info', oninfo);
1180 }
1181
1182 customTagParsers.forEach(function (customParser) {
1183 return parser.addParser(customParser);
1184 });
1185 customTagMappers.forEach(function (mapper) {
1186 return parser.addTagMapper(mapper);
1187 });
1188 parser.push(manifestString);
1189 parser.end();
1190 var manifest = parser.manifest; // remove llhls features from the parsed manifest
1191 // if we don't want llhls support.
1192
1193 if (!experimentalLLHLS) {
1194 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
1195 if (manifest.hasOwnProperty(k)) {
1196 delete manifest[k];
1197 }
1198 });
1199
1200 if (manifest.segments) {
1201 manifest.segments.forEach(function (segment) {
1202 ['parts', 'preloadHints'].forEach(function (k) {
1203 if (segment.hasOwnProperty(k)) {
1204 delete segment[k];
1205 }
1206 });
1207 });
1208 }
1209 }
1210
1211 if (!manifest.targetDuration) {
1212 var targetDuration = 10;
1213
1214 if (manifest.segments && manifest.segments.length) {
1215 targetDuration = manifest.segments.reduce(function (acc, s) {
1216 return Math.max(acc, s.duration);
1217 }, 0);
1218 }
1219
1220 if (onwarn) {
1221 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
1222 }
1223
1224 manifest.targetDuration = targetDuration;
1225 }
1226
1227 var parts = getLastParts(manifest);
1228
1229 if (parts.length && !manifest.partTargetDuration) {
1230 var partTargetDuration = parts.reduce(function (acc, p) {
1231 return Math.max(acc, p.duration);
1232 }, 0);
1233
1234 if (onwarn) {
1235 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
1236 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
1237 }
1238
1239 manifest.partTargetDuration = partTargetDuration;
1240 }
1241
1242 return manifest;
1243};
1244/**
1245 * Loops through all supported media groups in master and calls the provided
1246 * callback for each group
1247 *
1248 * @param {Object} master
1249 * The parsed master manifest object
1250 * @param {Function} callback
1251 * Callback to call for each media group
1252 */
1253
1254var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1255 if (!master.mediaGroups) {
1256 return;
1257 }
1258
1259 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1260 if (!master.mediaGroups[mediaType]) {
1261 return;
1262 }
1263
1264 for (var groupKey in master.mediaGroups[mediaType]) {
1265 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1266 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1267 callback(mediaProperties, mediaType, groupKey, labelKey);
1268 }
1269 }
1270 });
1271};
1272/**
1273 * Adds properties and attributes to the playlist to keep consistent functionality for
1274 * playlists throughout VHS.
1275 *
1276 * @param {Object} config
1277 * Arguments object
1278 * @param {Object} config.playlist
1279 * The media playlist
1280 * @param {string} [config.uri]
1281 * The uri to the media playlist (if media playlist is not from within a master
1282 * playlist)
1283 * @param {string} id
1284 * ID to use for the playlist
1285 */
1286
1287var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
1288 var playlist = _ref2.playlist,
1289 uri = _ref2.uri,
1290 id = _ref2.id;
1291 playlist.id = id;
1292 playlist.playlistErrors_ = 0;
1293
1294 if (uri) {
1295 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
1296 // playlists do not contain their own source URI, but one is needed for consistency in
1297 // VHS.
1298 playlist.uri = uri;
1299 } // For HLS master playlists, even though certain attributes MUST be defined, the
1300 // stream may still be played without them.
1301 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
1302 // manifest.
1303 //
1304 // To avoid undefined reference errors through the project, and make the code easier
1305 // to write/read, add an empty attributes object for these cases.
1306
1307
1308 playlist.attributes = playlist.attributes || {};
1309};
1310/**
1311 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
1312 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
1313 * playlist references to the playlists array.
1314 *
1315 * @param {Object} master
1316 * The master playlist
1317 */
1318
1319var setupMediaPlaylists = function setupMediaPlaylists(master) {
1320 var i = master.playlists.length;
1321
1322 while (i--) {
1323 var playlist = master.playlists[i];
1324 setupMediaPlaylist({
1325 playlist: playlist,
1326 id: createPlaylistID(i, playlist.uri)
1327 });
1328 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1329 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
1330
1331 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
1332 // the stream can be played without it. Although an attributes property may have been
1333 // added to the playlist to prevent undefined references, issue a warning to fix the
1334 // manifest.
1335
1336 if (!playlist.attributes.BANDWIDTH) {
1337 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1338 }
1339 }
1340};
1341/**
1342 * Adds resolvedUri properties to each media group.
1343 *
1344 * @param {Object} master
1345 * The master playlist
1346 */
1347
1348var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1349 forEachMediaGroup(master, function (properties) {
1350 if (properties.uri) {
1351 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1352 }
1353 });
1354};
1355/**
1356 * Creates a master playlist wrapper to insert a sole media playlist into.
1357 *
1358 * @param {Object} media
1359 * Media playlist
1360 * @param {string} uri
1361 * The media URI
1362 *
1363 * @return {Object}
1364 * Master playlist
1365 */
1366
1367var masterForMedia = function masterForMedia(media, uri) {
1368 var id = createPlaylistID(0, uri);
1369 var master = {
1370 mediaGroups: {
1371 'AUDIO': {},
1372 'VIDEO': {},
1373 'CLOSED-CAPTIONS': {},
1374 'SUBTITLES': {}
1375 },
1376 uri: window.location.href,
1377 resolvedUri: window.location.href,
1378 playlists: [{
1379 uri: uri,
1380 id: id,
1381 resolvedUri: uri,
1382 // m3u8-parser does not attach an attributes property to media playlists so make
1383 // sure that the property is attached to avoid undefined reference errors
1384 attributes: {}
1385 }]
1386 }; // set up ID reference
1387
1388 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
1389
1390 master.playlists[uri] = master.playlists[0];
1391 return master;
1392};
1393/**
1394 * Does an in-place update of the master manifest to add updated playlist URI references
1395 * as well as other properties needed by VHS that aren't included by the parser.
1396 *
1397 * @param {Object} master
1398 * Master manifest object
1399 * @param {string} uri
1400 * The source URI
1401 */
1402
1403var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
1404 master.uri = uri;
1405
1406 for (var i = 0; i < master.playlists.length; i++) {
1407 if (!master.playlists[i].uri) {
1408 // Set up phony URIs for the playlists since playlists are referenced by their URIs
1409 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
1410 // TODO: consider adding dummy URIs in mpd-parser
1411 var phonyUri = "placeholder-uri-" + i;
1412 master.playlists[i].uri = phonyUri;
1413 }
1414 }
1415
1416 var audioOnlyMaster = isAudioOnly(master);
1417 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1418 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
1419
1420 if (!properties.playlists || !properties.playlists.length) {
1421 // If the manifest is audio only and this media group does not have a uri, check
1422 // if the media group is located in the main list of playlists. If it is, don't add
1423 // placeholder properties as it shouldn't be considered an alternate audio track.
1424 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
1425 for (var _i = 0; _i < master.playlists.length; _i++) {
1426 var p = master.playlists[_i];
1427
1428 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
1429 return;
1430 }
1431 }
1432 }
1433
1434 properties.playlists = [_extends({}, properties)];
1435 }
1436
1437 properties.playlists.forEach(function (p, i) {
1438 var id = createPlaylistID(i, groupId);
1439
1440 if (p.uri) {
1441 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
1442 } else {
1443 // DEPRECATED, this has been added to prevent a breaking change.
1444 // previously we only ever had a single media group playlist, so
1445 // we mark the first playlist uri without prepending the index as we used to
1446 // ideally we would do all of the playlists the same way.
1447 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
1448 // the placeholder again
1449
1450 p.resolvedUri = p.uri;
1451 }
1452
1453 p.id = p.id || id; // add an empty attributes object, all playlists are
1454 // expected to have this.
1455
1456 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
1457
1458 master.playlists[p.id] = p;
1459 master.playlists[p.uri] = p;
1460 });
1461 });
1462 setupMediaPlaylists(master);
1463 resolveMediaGroupUris(master);
1464};
1465
1466var mergeOptions$2 = videojs.mergeOptions,
1467 EventTarget$1 = videojs.EventTarget;
1468
1469var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
1470 if (media.endList || !media.serverControl) {
1471 return uri;
1472 }
1473
1474 var parameters = {};
1475
1476 if (media.serverControl.canBlockReload) {
1477 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
1478
1479 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
1480 // that we are going to request a part of that preload segment.
1481 // the logic below is used to determine that.
1482
1483 if (preloadSegment) {
1484 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
1485
1486 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
1487 // length of parts, then we know we had part preload hints
1488 // and we need to add the _HLS_part= query
1489
1490 if (nextPart > -1 && nextPart !== parts.length - 1) {
1491 // add existing parts to our preload hints
1492 // eslint-disable-next-line
1493 parameters._HLS_part = nextPart;
1494 } // this if statement makes sure that we request the msn
1495 // of the preload segment if:
1496 // 1. the preload segment had parts (and was not yet a full segment)
1497 // but was added to our segments array
1498 // 2. the preload segment had preload hints for parts that are not in
1499 // the manifest yet.
1500 // in all other cases we want the segment after the preload segment
1501 // which will be given by using media.segments.length because it is 1 based
1502 // rather than 0 based.
1503
1504
1505 if (nextPart > -1 || parts.length) {
1506 nextMSN--;
1507 }
1508 } // add _HLS_msn= in front of any _HLS_part query
1509 // eslint-disable-next-line
1510
1511
1512 parameters._HLS_msn = nextMSN;
1513 }
1514
1515 if (media.serverControl && media.serverControl.canSkipUntil) {
1516 // add _HLS_skip= infront of all other queries.
1517 // eslint-disable-next-line
1518 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
1519 }
1520
1521 if (Object.keys(parameters).length) {
1522 var parsedUri = new window.URL(uri);
1523 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
1524 if (!parameters.hasOwnProperty(name)) {
1525 return;
1526 }
1527
1528 parsedUri.searchParams.set(name, parameters[name]);
1529 });
1530 uri = parsedUri.toString();
1531 }
1532
1533 return uri;
1534};
1535/**
1536 * Returns a new segment object with properties and
1537 * the parts array merged.
1538 *
1539 * @param {Object} a the old segment
1540 * @param {Object} b the new segment
1541 *
1542 * @return {Object} the merged segment
1543 */
1544
1545
1546var updateSegment = function updateSegment(a, b) {
1547 if (!a) {
1548 return b;
1549 }
1550
1551 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
1552 // and the new one does not, remove preload hints.
1553
1554 if (a.preloadHints && !b.preloadHints) {
1555 delete result.preloadHints;
1556 } // if only the old segment has parts
1557 // then the parts are no longer valid
1558
1559
1560 if (a.parts && !b.parts) {
1561 delete result.parts; // if both segments have parts
1562 // copy part propeties from the old segment
1563 // to the new one.
1564 } else if (a.parts && b.parts) {
1565 for (var i = 0; i < b.parts.length; i++) {
1566 if (a.parts && a.parts[i]) {
1567 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
1568 }
1569 }
1570 } // set skipped to false for segments that have
1571 // have had information merged from the old segment.
1572
1573
1574 if (!a.skipped && b.skipped) {
1575 result.skipped = false;
1576 } // set preload to false for segments that have
1577 // had information added in the new segment.
1578
1579
1580 if (a.preload && !b.preload) {
1581 result.preload = false;
1582 }
1583
1584 return result;
1585};
1586/**
1587 * Returns a new array of segments that is the result of merging
1588 * properties from an older list of segments onto an updated
1589 * list. No properties on the updated playlist will be ovewritten.
1590 *
1591 * @param {Array} original the outdated list of segments
1592 * @param {Array} update the updated list of segments
1593 * @param {number=} offset the index of the first update
1594 * segment in the original segment list. For non-live playlists,
1595 * this should always be zero and does not need to be
1596 * specified. For live playlists, it should be the difference
1597 * between the media sequence numbers in the original and updated
1598 * playlists.
1599 * @return {Array} a list of merged segment objects
1600 */
1601
1602var updateSegments = function updateSegments(original, update, offset) {
1603 var oldSegments = original.slice();
1604 var newSegments = update.slice();
1605 offset = offset || 0;
1606 var result = [];
1607 var currentMap;
1608
1609 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
1610 var oldSegment = oldSegments[newIndex + offset];
1611 var newSegment = newSegments[newIndex];
1612
1613 if (oldSegment) {
1614 currentMap = oldSegment.map || currentMap;
1615 result.push(updateSegment(oldSegment, newSegment));
1616 } else {
1617 // carry over map to new segment if it is missing
1618 if (currentMap && !newSegment.map) {
1619 newSegment.map = currentMap;
1620 }
1621
1622 result.push(newSegment);
1623 }
1624 }
1625
1626 return result;
1627};
1628var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1629 // preloadSegment will not have a uri at all
1630 // as the segment isn't actually in the manifest yet, only parts
1631 if (!segment.resolvedUri && segment.uri) {
1632 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1633 }
1634
1635 if (segment.key && !segment.key.resolvedUri) {
1636 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1637 }
1638
1639 if (segment.map && !segment.map.resolvedUri) {
1640 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1641 }
1642
1643 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
1644 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
1645 }
1646
1647 if (segment.parts && segment.parts.length) {
1648 segment.parts.forEach(function (p) {
1649 if (p.resolvedUri) {
1650 return;
1651 }
1652
1653 p.resolvedUri = resolveUrl(baseUri, p.uri);
1654 });
1655 }
1656
1657 if (segment.preloadHints && segment.preloadHints.length) {
1658 segment.preloadHints.forEach(function (p) {
1659 if (p.resolvedUri) {
1660 return;
1661 }
1662
1663 p.resolvedUri = resolveUrl(baseUri, p.uri);
1664 });
1665 }
1666};
1667
1668var getAllSegments = function getAllSegments(media) {
1669 var segments = media.segments || [];
1670 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
1671 // a usable segment, only include a preloadSegment that has
1672 // parts.
1673
1674 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
1675 // if preloadHints has a MAP that means that the
1676 // init segment is going to change. We cannot use any of the parts
1677 // from this preload segment.
1678 if (preloadSegment.preloadHints) {
1679 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
1680 if (preloadSegment.preloadHints[i].type === 'MAP') {
1681 return segments;
1682 }
1683 }
1684 } // set the duration for our preload segment to target duration.
1685
1686
1687 preloadSegment.duration = media.targetDuration;
1688 preloadSegment.preload = true;
1689 segments.push(preloadSegment);
1690 }
1691
1692 return segments;
1693}; // consider the playlist unchanged if the playlist object is the same or
1694// the number of segments is equal, the media sequence number is unchanged,
1695// and this playlist hasn't become the end of the playlist
1696
1697
1698var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
1699 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence;
1700};
1701/**
1702 * Returns a new master playlist that is the result of merging an
1703 * updated media playlist into the original version. If the
1704 * updated media playlist does not match any of the playlist
1705 * entries in the original master playlist, null is returned.
1706 *
1707 * @param {Object} master a parsed master M3U8 object
1708 * @param {Object} media a parsed media M3U8 object
1709 * @return {Object} a new object that represents the original
1710 * master playlist with the updated media playlist merged in, or
1711 * null if the merge produced no change.
1712 */
1713
1714var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
1715 if (unchangedCheck === void 0) {
1716 unchangedCheck = isPlaylistUnchanged;
1717 }
1718
1719 var result = mergeOptions$2(master, {});
1720 var oldMedia = result.playlists[newMedia.id];
1721
1722 if (!oldMedia) {
1723 return null;
1724 }
1725
1726 if (unchangedCheck(oldMedia, newMedia)) {
1727 return null;
1728 }
1729
1730 newMedia.segments = getAllSegments(newMedia);
1731 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
1732
1733 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
1734 delete mergedPlaylist.preloadSegment;
1735 } // if the update could overlap existing segment information, merge the two segment lists
1736
1737
1738 if (oldMedia.segments) {
1739 if (newMedia.skip) {
1740 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
1741 // old properties into the new segments
1742
1743 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
1744 newMedia.segments.unshift({
1745 skipped: true
1746 });
1747 }
1748 }
1749
1750 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
1751 } // resolve any segment URIs to prevent us from having to do it later
1752
1753
1754 mergedPlaylist.segments.forEach(function (segment) {
1755 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1756 }); // TODO Right now in the playlists array there are two references to each playlist, one
1757 // that is referenced by index, and one by URI. The index reference may no longer be
1758 // necessary.
1759
1760 for (var _i = 0; _i < result.playlists.length; _i++) {
1761 if (result.playlists[_i].id === newMedia.id) {
1762 result.playlists[_i] = mergedPlaylist;
1763 }
1764 }
1765
1766 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
1767
1768 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
1769
1770 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1771 if (!properties.playlists) {
1772 return;
1773 }
1774
1775 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
1776 if (newMedia.id === properties.playlists[_i2].id) {
1777 properties.playlists[_i2] = newMedia;
1778 }
1779 }
1780 });
1781 return result;
1782};
1783/**
1784 * Calculates the time to wait before refreshing a live playlist
1785 *
1786 * @param {Object} media
1787 * The current media
1788 * @param {boolean} update
1789 * True if there were any updates from the last refresh, false otherwise
1790 * @return {number}
1791 * The time in ms to wait before refreshing the live playlist
1792 */
1793
1794var refreshDelay = function refreshDelay(media, update) {
1795 var segments = media.segments || [];
1796 var lastSegment = segments[segments.length - 1];
1797 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
1798 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
1799
1800 if (update && lastDuration) {
1801 return lastDuration * 1000;
1802 } // if the playlist is unchanged since the last reload or last segment duration
1803 // cannot be determined, try again after half the target duration
1804
1805
1806 return (media.partTargetDuration || media.targetDuration || 10) * 500;
1807};
1808/**
1809 * Load a playlist from a remote location
1810 *
1811 * @class PlaylistLoader
1812 * @extends Stream
1813 * @param {string|Object} src url or object of manifest
1814 * @param {boolean} withCredentials the withCredentials xhr option
1815 * @class
1816 */
1817
1818var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
1819 _inheritsLoose(PlaylistLoader, _EventTarget);
1820
1821 function PlaylistLoader(src, vhs, options) {
1822 var _this;
1823
1824 if (options === void 0) {
1825 options = {};
1826 }
1827
1828 _this = _EventTarget.call(this) || this;
1829
1830 if (!src) {
1831 throw new Error('A non-empty playlist URL or object is required');
1832 }
1833
1834 _this.logger_ = logger('PlaylistLoader');
1835 var _options = options,
1836 _options$withCredenti = _options.withCredentials,
1837 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
1838 _options$handleManife = _options.handleManifestRedirects,
1839 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
1840 _this.src = src;
1841 _this.vhs_ = vhs;
1842 _this.withCredentials = withCredentials;
1843 _this.handleManifestRedirects = handleManifestRedirects;
1844 var vhsOptions = vhs.options_;
1845 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
1846 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
1847 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
1848
1849 if (videojs.browser.IE_VERSION) {
1850 _this.experimentalLLHLS = false;
1851 } // initialize the loader state
1852
1853
1854 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
1855
1856 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized(_this));
1857
1858 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
1859
1860 return _this;
1861 }
1862
1863 var _proto = PlaylistLoader.prototype;
1864
1865 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
1866 var _this2 = this;
1867
1868 if (this.state !== 'HAVE_METADATA') {
1869 // only refresh the media playlist if no other activity is going on
1870 return;
1871 }
1872
1873 var media = this.media();
1874 var uri = resolveUrl(this.master.uri, media.uri);
1875
1876 if (this.experimentalLLHLS) {
1877 uri = addLLHLSQueryDirectives(uri, media);
1878 }
1879
1880 this.state = 'HAVE_CURRENT_METADATA';
1881 this.request = this.vhs_.xhr({
1882 uri: uri,
1883 withCredentials: this.withCredentials
1884 }, function (error, req) {
1885 // disposed
1886 if (!_this2.request) {
1887 return;
1888 }
1889
1890 if (error) {
1891 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
1892 }
1893
1894 _this2.haveMetadata({
1895 playlistString: _this2.request.responseText,
1896 url: _this2.media().uri,
1897 id: _this2.media().id
1898 });
1899 });
1900 };
1901
1902 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
1903 var uri = playlist.uri,
1904 id = playlist.id; // any in-flight request is now finished
1905
1906 this.request = null;
1907
1908 if (startingState) {
1909 this.state = startingState;
1910 }
1911
1912 this.error = {
1913 playlist: this.master.playlists[id],
1914 status: xhr.status,
1915 message: "HLS playlist request error at URL: " + uri + ".",
1916 responseText: xhr.responseText,
1917 code: xhr.status >= 500 ? 4 : 2
1918 };
1919 this.trigger('error');
1920 };
1921
1922 _proto.parseManifest_ = function parseManifest_(_ref) {
1923 var _this3 = this;
1924
1925 var url = _ref.url,
1926 manifestString = _ref.manifestString;
1927 return parseManifest({
1928 onwarn: function onwarn(_ref2) {
1929 var message = _ref2.message;
1930 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
1931 },
1932 oninfo: function oninfo(_ref3) {
1933 var message = _ref3.message;
1934 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
1935 },
1936 manifestString: manifestString,
1937 customTagParsers: this.customTagParsers,
1938 customTagMappers: this.customTagMappers,
1939 experimentalLLHLS: this.experimentalLLHLS
1940 });
1941 }
1942 /**
1943 * Update the playlist loader's state in response to a new or updated playlist.
1944 *
1945 * @param {string} [playlistString]
1946 * Playlist string (if playlistObject is not provided)
1947 * @param {Object} [playlistObject]
1948 * Playlist object (if playlistString is not provided)
1949 * @param {string} url
1950 * URL of playlist
1951 * @param {string} id
1952 * ID to use for playlist
1953 */
1954 ;
1955
1956 _proto.haveMetadata = function haveMetadata(_ref4) {
1957 var playlistString = _ref4.playlistString,
1958 playlistObject = _ref4.playlistObject,
1959 url = _ref4.url,
1960 id = _ref4.id;
1961 // any in-flight request is now finished
1962 this.request = null;
1963 this.state = 'HAVE_METADATA';
1964 var playlist = playlistObject || this.parseManifest_({
1965 url: url,
1966 manifestString: playlistString
1967 });
1968 playlist.lastRequest = Date.now();
1969 setupMediaPlaylist({
1970 playlist: playlist,
1971 uri: url,
1972 id: id
1973 }); // merge this playlist into the master
1974
1975 var update = updateMaster$1(this.master, playlist);
1976 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
1977
1978 if (update) {
1979 this.master = update;
1980 this.media_ = this.master.playlists[id];
1981 } else {
1982 this.trigger('playlistunchanged');
1983 }
1984
1985 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
1986 this.trigger('loadedplaylist');
1987 }
1988 /**
1989 * Abort any outstanding work and clean up.
1990 */
1991 ;
1992
1993 _proto.dispose = function dispose() {
1994 this.trigger('dispose');
1995 this.stopRequest();
1996 window.clearTimeout(this.mediaUpdateTimeout);
1997 window.clearTimeout(this.finalRenditionTimeout);
1998 this.off();
1999 };
2000
2001 _proto.stopRequest = function stopRequest() {
2002 if (this.request) {
2003 var oldRequest = this.request;
2004 this.request = null;
2005 oldRequest.onreadystatechange = null;
2006 oldRequest.abort();
2007 }
2008 }
2009 /**
2010 * When called without any arguments, returns the currently
2011 * active media playlist. When called with a single argument,
2012 * triggers the playlist loader to asynchronously switch to the
2013 * specified media playlist. Calling this method while the
2014 * loader is in the HAVE_NOTHING causes an error to be emitted
2015 * but otherwise has no effect.
2016 *
2017 * @param {Object=} playlist the parsed media playlist
2018 * object to switch to
2019 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
2020 *
2021 * @return {Playlist} the current loaded media
2022 */
2023 ;
2024
2025 _proto.media = function media(playlist, shouldDelay) {
2026 var _this4 = this;
2027
2028 // getter
2029 if (!playlist) {
2030 return this.media_;
2031 } // setter
2032
2033
2034 if (this.state === 'HAVE_NOTHING') {
2035 throw new Error('Cannot switch media playlist from ' + this.state);
2036 } // find the playlist object if the target playlist has been
2037 // specified by URI
2038
2039
2040 if (typeof playlist === 'string') {
2041 if (!this.master.playlists[playlist]) {
2042 throw new Error('Unknown playlist URI: ' + playlist);
2043 }
2044
2045 playlist = this.master.playlists[playlist];
2046 }
2047
2048 window.clearTimeout(this.finalRenditionTimeout);
2049
2050 if (shouldDelay) {
2051 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
2052 this.finalRenditionTimeout = window.setTimeout(this.media.bind(this, playlist, false), delay);
2053 return;
2054 }
2055
2056 var startingState = this.state;
2057 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2058 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
2059
2060 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
2061 // media playlist or, for the case of demuxed audio, a resolved audio media group)
2062 playlist.endList && playlist.segments.length) {
2063 // abort outstanding playlist requests
2064 if (this.request) {
2065 this.request.onreadystatechange = null;
2066 this.request.abort();
2067 this.request = null;
2068 }
2069
2070 this.state = 'HAVE_METADATA';
2071 this.media_ = playlist; // trigger media change if the active media has been updated
2072
2073 if (mediaChange) {
2074 this.trigger('mediachanging');
2075
2076 if (startingState === 'HAVE_MASTER') {
2077 // The initial playlist was a master manifest, and the first media selected was
2078 // also provided (in the form of a resolved playlist object) as part of the
2079 // source object (rather than just a URL). Therefore, since the media playlist
2080 // doesn't need to be requested, loadedmetadata won't trigger as part of the
2081 // normal flow, and needs an explicit trigger here.
2082 this.trigger('loadedmetadata');
2083 } else {
2084 this.trigger('mediachange');
2085 }
2086 }
2087
2088 return;
2089 } // We update/set the timeout here so that live playlists
2090 // that are not a media change will "start" the loader as expected.
2091 // We expect that this function will start the media update timeout
2092 // cycle again. This also prevents a playlist switch failure from
2093 // causing us to stall during live.
2094
2095
2096 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
2097
2098 if (!mediaChange) {
2099 return;
2100 }
2101
2102 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
2103
2104 if (this.request) {
2105 if (playlist.resolvedUri === this.request.url) {
2106 // requesting to switch to the same playlist multiple times
2107 // has no effect after the first
2108 return;
2109 }
2110
2111 this.request.onreadystatechange = null;
2112 this.request.abort();
2113 this.request = null;
2114 } // request the new playlist
2115
2116
2117 if (this.media_) {
2118 this.trigger('mediachanging');
2119 }
2120
2121 this.request = this.vhs_.xhr({
2122 uri: playlist.resolvedUri,
2123 withCredentials: this.withCredentials
2124 }, function (error, req) {
2125 // disposed
2126 if (!_this4.request) {
2127 return;
2128 }
2129
2130 playlist.lastRequest = Date.now();
2131 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
2132
2133 if (error) {
2134 return _this4.playlistRequestError(_this4.request, playlist, startingState);
2135 }
2136
2137 _this4.haveMetadata({
2138 playlistString: req.responseText,
2139 url: playlist.uri,
2140 id: playlist.id
2141 }); // fire loadedmetadata the first time a media playlist is loaded
2142
2143
2144 if (startingState === 'HAVE_MASTER') {
2145 _this4.trigger('loadedmetadata');
2146 } else {
2147 _this4.trigger('mediachange');
2148 }
2149 });
2150 }
2151 /**
2152 * pause loading of the playlist
2153 */
2154 ;
2155
2156 _proto.pause = function pause() {
2157 if (this.mediaUpdateTimeout) {
2158 window.clearTimeout(this.mediaUpdateTimeout);
2159 this.mediaUpdateTimeout = null;
2160 }
2161
2162 this.stopRequest();
2163
2164 if (this.state === 'HAVE_NOTHING') {
2165 // If we pause the loader before any data has been retrieved, its as if we never
2166 // started, so reset to an unstarted state.
2167 this.started = false;
2168 } // Need to restore state now that no activity is happening
2169
2170
2171 if (this.state === 'SWITCHING_MEDIA') {
2172 // if the loader was in the process of switching media, it should either return to
2173 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2174 // playlist yet. This is determined by the existence of loader.media_
2175 if (this.media_) {
2176 this.state = 'HAVE_METADATA';
2177 } else {
2178 this.state = 'HAVE_MASTER';
2179 }
2180 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2181 this.state = 'HAVE_METADATA';
2182 }
2183 }
2184 /**
2185 * start loading of the playlist
2186 */
2187 ;
2188
2189 _proto.load = function load(shouldDelay) {
2190 var _this5 = this;
2191
2192 if (this.mediaUpdateTimeout) {
2193 window.clearTimeout(this.mediaUpdateTimeout);
2194 this.mediaUpdateTimeout = null;
2195 }
2196
2197 var media = this.media();
2198
2199 if (shouldDelay) {
2200 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
2201 this.mediaUpdateTimeout = window.setTimeout(function () {
2202 _this5.mediaUpdateTimeout = null;
2203
2204 _this5.load();
2205 }, delay);
2206 return;
2207 }
2208
2209 if (!this.started) {
2210 this.start();
2211 return;
2212 }
2213
2214 if (media && !media.endList) {
2215 this.trigger('mediaupdatetimeout');
2216 } else {
2217 this.trigger('loadedplaylist');
2218 }
2219 };
2220
2221 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
2222 var _this6 = this;
2223
2224 if (this.mediaUpdateTimeout) {
2225 window.clearTimeout(this.mediaUpdateTimeout);
2226 this.mediaUpdateTimeout = null;
2227 } // we only have use mediaupdatetimeout for live playlists.
2228
2229
2230 if (!this.media() || this.media().endList) {
2231 return;
2232 }
2233
2234 this.mediaUpdateTimeout = window.setTimeout(function () {
2235 _this6.mediaUpdateTimeout = null;
2236
2237 _this6.trigger('mediaupdatetimeout');
2238
2239 _this6.updateMediaUpdateTimeout_(delay);
2240 }, delay);
2241 }
2242 /**
2243 * start loading of the playlist
2244 */
2245 ;
2246
2247 _proto.start = function start() {
2248 var _this7 = this;
2249
2250 this.started = true;
2251
2252 if (typeof this.src === 'object') {
2253 // in the case of an entirely constructed manifest object (meaning there's no actual
2254 // manifest on a server), default the uri to the page's href
2255 if (!this.src.uri) {
2256 this.src.uri = window.location.href;
2257 } // resolvedUri is added on internally after the initial request. Since there's no
2258 // request for pre-resolved manifests, add on resolvedUri here.
2259
2260
2261 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
2262 // request can be skipped (since the top level of the manifest, at a minimum, is
2263 // already available as a parsed manifest object). However, if the manifest object
2264 // represents a master playlist, some media playlists may need to be resolved before
2265 // the starting segment list is available. Therefore, go directly to setup of the
2266 // initial playlist, and let the normal flow continue from there.
2267 //
2268 // Note that the call to setup is asynchronous, as other sections of VHS may assume
2269 // that the first request is asynchronous.
2270
2271 setTimeout(function () {
2272 _this7.setupInitialPlaylist(_this7.src);
2273 }, 0);
2274 return;
2275 } // request the specified URL
2276
2277
2278 this.request = this.vhs_.xhr({
2279 uri: this.src,
2280 withCredentials: this.withCredentials
2281 }, function (error, req) {
2282 // disposed
2283 if (!_this7.request) {
2284 return;
2285 } // clear the loader's request reference
2286
2287
2288 _this7.request = null;
2289
2290 if (error) {
2291 _this7.error = {
2292 status: req.status,
2293 message: "HLS playlist request error at URL: " + _this7.src + ".",
2294 responseText: req.responseText,
2295 // MEDIA_ERR_NETWORK
2296 code: 2
2297 };
2298
2299 if (_this7.state === 'HAVE_NOTHING') {
2300 _this7.started = false;
2301 }
2302
2303 return _this7.trigger('error');
2304 }
2305
2306 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
2307
2308 var manifest = _this7.parseManifest_({
2309 manifestString: req.responseText,
2310 url: _this7.src
2311 });
2312
2313 _this7.setupInitialPlaylist(manifest);
2314 });
2315 };
2316
2317 _proto.srcUri = function srcUri() {
2318 return typeof this.src === 'string' ? this.src : this.src.uri;
2319 }
2320 /**
2321 * Given a manifest object that's either a master or media playlist, trigger the proper
2322 * events and set the state of the playlist loader.
2323 *
2324 * If the manifest object represents a master playlist, `loadedplaylist` will be
2325 * triggered to allow listeners to select a playlist. If none is selected, the loader
2326 * will default to the first one in the playlists array.
2327 *
2328 * If the manifest object represents a media playlist, `loadedplaylist` will be
2329 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
2330 *
2331 * In the case of a media playlist, a master playlist object wrapper with one playlist
2332 * will be created so that all logic can handle playlists in the same fashion (as an
2333 * assumed manifest object schema).
2334 *
2335 * @param {Object} manifest
2336 * The parsed manifest object
2337 */
2338 ;
2339
2340 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
2341 this.state = 'HAVE_MASTER';
2342
2343 if (manifest.playlists) {
2344 this.master = manifest;
2345 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
2346 // then resolve URIs in advance, as they are usually done after a playlist request,
2347 // which may not happen if the playlist is resolved.
2348
2349 manifest.playlists.forEach(function (playlist) {
2350 playlist.segments = getAllSegments(playlist);
2351 playlist.segments.forEach(function (segment) {
2352 resolveSegmentUris(segment, playlist.resolvedUri);
2353 });
2354 });
2355 this.trigger('loadedplaylist');
2356
2357 if (!this.request) {
2358 // no media playlist was specifically selected so start
2359 // from the first listed one
2360 this.media(this.master.playlists[0]);
2361 }
2362
2363 return;
2364 } // In order to support media playlists passed in as vhs-json, the case where the uri
2365 // is not provided as part of the manifest should be considered, and an appropriate
2366 // default used.
2367
2368
2369 var uri = this.srcUri() || window.location.href;
2370 this.master = masterForMedia(manifest, uri);
2371 this.haveMetadata({
2372 playlistObject: manifest,
2373 url: uri,
2374 id: this.master.playlists[0].id
2375 });
2376 this.trigger('loadedmetadata');
2377 };
2378
2379 return PlaylistLoader;
2380}(EventTarget$1);
2381
2382/**
2383 * @file xhr.js
2384 */
2385var videojsXHR = videojs.xhr,
2386 mergeOptions$1 = videojs.mergeOptions;
2387
2388var callbackWrapper = function callbackWrapper(request, error, response, callback) {
2389 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
2390
2391 if (!error && reqResponse) {
2392 request.responseTime = Date.now();
2393 request.roundTripTime = request.responseTime - request.requestTime;
2394 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2395
2396 if (!request.bandwidth) {
2397 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2398 }
2399 }
2400
2401 if (response.headers) {
2402 request.responseHeaders = response.headers;
2403 } // videojs.xhr now uses a specific code on the error
2404 // object to signal that a request has timed out instead
2405 // of setting a boolean on the request object
2406
2407
2408 if (error && error.code === 'ETIMEDOUT') {
2409 request.timedout = true;
2410 } // videojs.xhr no longer considers status codes outside of 200 and 0
2411 // (for file uris) to be errors, but the old XHR did, so emulate that
2412 // behavior. Status 206 may be used in response to byterange requests.
2413
2414
2415 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2416 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2417 }
2418
2419 callback(error, request);
2420};
2421
2422var xhrFactory = function xhrFactory() {
2423 var xhr = function XhrFunction(options, callback) {
2424 // Add a default timeout
2425 options = mergeOptions$1({
2426 timeout: 45e3
2427 }, options); // Allow an optional user-specified function to modify the option
2428 // object before we construct the xhr request
2429
2430 var beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
2431
2432 if (beforeRequest && typeof beforeRequest === 'function') {
2433 var newOptions = beforeRequest(options);
2434
2435 if (newOptions) {
2436 options = newOptions;
2437 }
2438 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
2439 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
2440
2441
2442 var xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr;
2443 var request = xhrMethod(options, function (error, response) {
2444 return callbackWrapper(request, error, response, callback);
2445 });
2446 var originalAbort = request.abort;
2447
2448 request.abort = function () {
2449 request.aborted = true;
2450 return originalAbort.apply(request, arguments);
2451 };
2452
2453 request.uri = options.uri;
2454 request.requestTime = Date.now();
2455 return request;
2456 };
2457
2458 xhr.original = true;
2459 return xhr;
2460};
2461/**
2462 * Turns segment byterange into a string suitable for use in
2463 * HTTP Range requests
2464 *
2465 * @param {Object} byterange - an object with two values defining the start and end
2466 * of a byte-range
2467 */
2468
2469
2470var byterangeStr = function byterangeStr(byterange) {
2471 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2472 // header uses inclusive ranges
2473 var byterangeEnd = byterange.offset + byterange.length - 1;
2474 var byterangeStart = byterange.offset;
2475 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2476};
2477/**
2478 * Defines headers for use in the xhr request for a particular segment.
2479 *
2480 * @param {Object} segment - a simplified copy of the segmentInfo object
2481 * from SegmentLoader
2482 */
2483
2484
2485var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2486 var headers = {};
2487
2488 if (segment.byterange) {
2489 headers.Range = byterangeStr(segment.byterange);
2490 }
2491
2492 return headers;
2493};
2494
2495/**
2496 * @file bin-utils.js
2497 */
2498
2499/**
2500 * convert a TimeRange to text
2501 *
2502 * @param {TimeRange} range the timerange to use for conversion
2503 * @param {number} i the iterator on the range to convert
2504 * @return {string} the range in string format
2505 */
2506var textRange = function textRange(range, i) {
2507 return range.start(i) + '-' + range.end(i);
2508};
2509/**
2510 * format a number as hex string
2511 *
2512 * @param {number} e The number
2513 * @param {number} i the iterator
2514 * @return {string} the hex formatted number as a string
2515 */
2516
2517
2518var formatHexString = function formatHexString(e, i) {
2519 var value = e.toString(16);
2520 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
2521};
2522
2523var formatAsciiString = function formatAsciiString(e) {
2524 if (e >= 0x20 && e < 0x7e) {
2525 return String.fromCharCode(e);
2526 }
2527
2528 return '.';
2529};
2530/**
2531 * Creates an object for sending to a web worker modifying properties that are TypedArrays
2532 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
2533 *
2534 * @param {Object} message
2535 * Object of properties and values to send to the web worker
2536 * @return {Object}
2537 * Modified message with TypedArray values expanded
2538 * @function createTransferableMessage
2539 */
2540
2541
2542var createTransferableMessage = function createTransferableMessage(message) {
2543 var transferable = {};
2544 Object.keys(message).forEach(function (key) {
2545 var value = message[key];
2546
2547 if (ArrayBuffer.isView(value)) {
2548 transferable[key] = {
2549 bytes: value.buffer,
2550 byteOffset: value.byteOffset,
2551 byteLength: value.byteLength
2552 };
2553 } else {
2554 transferable[key] = value;
2555 }
2556 });
2557 return transferable;
2558};
2559/**
2560 * Returns a unique string identifier for a media initialization
2561 * segment.
2562 *
2563 * @param {Object} initSegment
2564 * the init segment object.
2565 *
2566 * @return {string} the generated init segment id
2567 */
2568
2569var initSegmentId = function initSegmentId(initSegment) {
2570 var byterange = initSegment.byterange || {
2571 length: Infinity,
2572 offset: 0
2573 };
2574 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
2575};
2576/**
2577 * Returns a unique string identifier for a media segment key.
2578 *
2579 * @param {Object} key the encryption key
2580 * @return {string} the unique id for the media segment key.
2581 */
2582
2583var segmentKeyId = function segmentKeyId(key) {
2584 return key.resolvedUri;
2585};
2586/**
2587 * utils to help dump binary data to the console
2588 *
2589 * @param {Array|TypedArray} data
2590 * data to dump to a string
2591 *
2592 * @return {string} the data as a hex string.
2593 */
2594
2595var hexDump = function hexDump(data) {
2596 var bytes = Array.prototype.slice.call(data);
2597 var step = 16;
2598 var result = '';
2599 var hex;
2600 var ascii;
2601
2602 for (var j = 0; j < bytes.length / step; j++) {
2603 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
2604 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
2605 result += hex + ' ' + ascii + '\n';
2606 }
2607
2608 return result;
2609};
2610var tagDump = function tagDump(_ref) {
2611 var bytes = _ref.bytes;
2612 return hexDump(bytes);
2613};
2614var textRanges = function textRanges(ranges) {
2615 var result = '';
2616 var i;
2617
2618 for (i = 0; i < ranges.length; i++) {
2619 result += textRange(ranges, i) + ' ';
2620 }
2621
2622 return result;
2623};
2624
2625var utils = /*#__PURE__*/Object.freeze({
2626 __proto__: null,
2627 createTransferableMessage: createTransferableMessage,
2628 initSegmentId: initSegmentId,
2629 segmentKeyId: segmentKeyId,
2630 hexDump: hexDump,
2631 tagDump: tagDump,
2632 textRanges: textRanges
2633});
2634
2635// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2636// 25% was arbitrarily chosen, and may need to be refined over time.
2637
2638var SEGMENT_END_FUDGE_PERCENT = 0.25;
2639/**
2640 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2641 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2642 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2643 *
2644 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2645 * point" (a point where we have a mapping from program time to player time, with player
2646 * time being the post transmux start of the segment).
2647 *
2648 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2649 *
2650 * @param {number} playerTime the player time
2651 * @param {Object} segment the segment which contains the player time
2652 * @return {Date} program time
2653 */
2654
2655var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2656 if (!segment.dateTimeObject) {
2657 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2658 // be used to map the start of a segment with a real world time).
2659 return null;
2660 }
2661
2662 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2663 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2664
2665 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2666 var offsetFromSegmentStart = playerTime - startOfSegment;
2667 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2668};
2669var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2670 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2671};
2672/**
2673 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2674 * returned segment might be an estimate or an accurate match.
2675 *
2676 * @param {string} programTime The ISO-8601 programTime to find a match for
2677 * @param {Object} playlist A playlist object to search within
2678 */
2679
2680var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2681 // Assumptions:
2682 // - verifyProgramDateTimeTags has already been run
2683 // - live streams have been started
2684 var dateTimeObject;
2685
2686 try {
2687 dateTimeObject = new Date(programTime);
2688 } catch (e) {
2689 return null;
2690 }
2691
2692 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2693 return null;
2694 }
2695
2696 var segment = playlist.segments[0];
2697
2698 if (dateTimeObject < segment.dateTimeObject) {
2699 // Requested time is before stream start.
2700 return null;
2701 }
2702
2703 for (var i = 0; i < playlist.segments.length - 1; i++) {
2704 segment = playlist.segments[i];
2705 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2706
2707 if (dateTimeObject < nextSegmentStart) {
2708 break;
2709 }
2710 }
2711
2712 var lastSegment = playlist.segments[playlist.segments.length - 1];
2713 var lastSegmentStart = lastSegment.dateTimeObject;
2714 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2715 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2716
2717 if (dateTimeObject > lastSegmentEnd) {
2718 // Beyond the end of the stream, or our best guess of the end of the stream.
2719 return null;
2720 }
2721
2722 if (dateTimeObject > lastSegmentStart) {
2723 segment = lastSegment;
2724 }
2725
2726 return {
2727 segment: segment,
2728 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2729 // Although, given that all segments have accurate date time objects, the segment
2730 // selected should be accurate, unless the video has been transmuxed at some point
2731 // (determined by the presence of the videoTimingInfo object), the segment's "player
2732 // time" (the start time in the player) can't be considered accurate.
2733 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2734 };
2735};
2736/**
2737 * Finds a segment that contains the given player time(in seconds).
2738 *
2739 * @param {number} time The player time to find a match for
2740 * @param {Object} playlist A playlist object to search within
2741 */
2742
2743var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2744 // Assumptions:
2745 // - there will always be a segment.duration
2746 // - we can start from zero
2747 // - segments are in time order
2748 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2749 return null;
2750 }
2751
2752 var segmentEnd = 0;
2753 var segment;
2754
2755 for (var i = 0; i < playlist.segments.length; i++) {
2756 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2757 // should contain the most accurate values we have for the segment's player times.
2758 //
2759 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2760 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2761 // calculate an end value.
2762
2763 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2764
2765 if (time <= segmentEnd) {
2766 break;
2767 }
2768 }
2769
2770 var lastSegment = playlist.segments[playlist.segments.length - 1];
2771
2772 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2773 // The time requested is beyond the stream end.
2774 return null;
2775 }
2776
2777 if (time > segmentEnd) {
2778 // The time is within or beyond the last segment.
2779 //
2780 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2781 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2782 // Technically, because the duration value is only an estimate, the time may still
2783 // exist in the last segment, however, there isn't enough information to make even
2784 // a reasonable estimate.
2785 return null;
2786 }
2787
2788 segment = lastSegment;
2789 }
2790
2791 return {
2792 segment: segment,
2793 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2794 // Because videoTimingInfo is only set after transmux, it is the only way to get
2795 // accurate timing values.
2796 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2797 };
2798};
2799/**
2800 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2801 * If the offset returned is positive, the programTime occurs after the
2802 * comparisonTimestamp.
2803 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2804 *
2805 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2806 * @param {string} programTime The programTime as an ISO-8601 string
2807 * @return {number} offset
2808 */
2809
2810var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2811 var segmentDateTime;
2812 var programDateTime;
2813
2814 try {
2815 segmentDateTime = new Date(comparisonTimeStamp);
2816 programDateTime = new Date(programTime);
2817 } catch (e) {// TODO handle error
2818 }
2819
2820 var segmentTimeEpoch = segmentDateTime.getTime();
2821 var programTimeEpoch = programDateTime.getTime();
2822 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2823};
2824/**
2825 * Checks that all segments in this playlist have programDateTime tags.
2826 *
2827 * @param {Object} playlist A playlist object
2828 */
2829
2830var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2831 if (!playlist.segments || playlist.segments.length === 0) {
2832 return false;
2833 }
2834
2835 for (var i = 0; i < playlist.segments.length; i++) {
2836 var segment = playlist.segments[i];
2837
2838 if (!segment.dateTimeObject) {
2839 return false;
2840 }
2841 }
2842
2843 return true;
2844};
2845/**
2846 * Returns the programTime of the media given a playlist and a playerTime.
2847 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2848 * If the segments containing the time requested have not been buffered yet, an estimate
2849 * may be returned to the callback.
2850 *
2851 * @param {Object} args
2852 * @param {Object} args.playlist A playlist object to search within
2853 * @param {number} time A playerTime in seconds
2854 * @param {Function} callback(err, programTime)
2855 * @return {string} err.message A detailed error message
2856 * @return {Object} programTime
2857 * @return {number} programTime.mediaSeconds The streamTime in seconds
2858 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2859 */
2860
2861var getProgramTime = function getProgramTime(_ref) {
2862 var playlist = _ref.playlist,
2863 _ref$time = _ref.time,
2864 time = _ref$time === void 0 ? undefined : _ref$time,
2865 callback = _ref.callback;
2866
2867 if (!callback) {
2868 throw new Error('getProgramTime: callback must be provided');
2869 }
2870
2871 if (!playlist || time === undefined) {
2872 return callback({
2873 message: 'getProgramTime: playlist and time must be provided'
2874 });
2875 }
2876
2877 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2878
2879 if (!matchedSegment) {
2880 return callback({
2881 message: 'valid programTime was not found'
2882 });
2883 }
2884
2885 if (matchedSegment.type === 'estimate') {
2886 return callback({
2887 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2888 seekTime: matchedSegment.estimatedStart
2889 });
2890 }
2891
2892 var programTimeObject = {
2893 mediaSeconds: time
2894 };
2895 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2896
2897 if (programTime) {
2898 programTimeObject.programDateTime = programTime.toISOString();
2899 }
2900
2901 return callback(null, programTimeObject);
2902};
2903/**
2904 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2905 *
2906 * @param {Object} args
2907 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2908 * @param {Object} args.playlist A playlist to look within
2909 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2910 * @param {Function} args.seekTo A method to perform a seek
2911 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2912 * @param {Object} args.tech The tech to seek on
2913 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2914 * @return {string} err.message A detailed error message
2915 * @return {number} newTime The exact time that was seeked to in seconds
2916 */
2917
2918var seekToProgramTime = function seekToProgramTime(_ref2) {
2919 var programTime = _ref2.programTime,
2920 playlist = _ref2.playlist,
2921 _ref2$retryCount = _ref2.retryCount,
2922 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2923 seekTo = _ref2.seekTo,
2924 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2925 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2926 tech = _ref2.tech,
2927 callback = _ref2.callback;
2928
2929 if (!callback) {
2930 throw new Error('seekToProgramTime: callback must be provided');
2931 }
2932
2933 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2934 return callback({
2935 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2936 });
2937 }
2938
2939 if (!playlist.endList && !tech.hasStarted_) {
2940 return callback({
2941 message: 'player must be playing a live stream to start buffering'
2942 });
2943 }
2944
2945 if (!verifyProgramDateTimeTags(playlist)) {
2946 return callback({
2947 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
2948 });
2949 }
2950
2951 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
2952
2953 if (!matchedSegment) {
2954 return callback({
2955 message: programTime + " was not found in the stream"
2956 });
2957 }
2958
2959 var segment = matchedSegment.segment;
2960 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
2961
2962 if (matchedSegment.type === 'estimate') {
2963 // we've run out of retries
2964 if (retryCount === 0) {
2965 return callback({
2966 message: programTime + " is not buffered yet. Try again"
2967 });
2968 }
2969
2970 seekTo(matchedSegment.estimatedStart + mediaOffset);
2971 tech.one('seeked', function () {
2972 seekToProgramTime({
2973 programTime: programTime,
2974 playlist: playlist,
2975 retryCount: retryCount - 1,
2976 seekTo: seekTo,
2977 pauseAfterSeek: pauseAfterSeek,
2978 tech: tech,
2979 callback: callback
2980 });
2981 });
2982 return;
2983 } // Since the segment.start value is determined from the buffered end or ending time
2984 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
2985 // modifications.
2986
2987
2988 var seekToTime = segment.start + mediaOffset;
2989
2990 var seekedCallback = function seekedCallback() {
2991 return callback(null, tech.currentTime());
2992 }; // listen for seeked event
2993
2994
2995 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
2996
2997 if (pauseAfterSeek) {
2998 tech.pause();
2999 }
3000
3001 seekTo(seekToTime);
3002};
3003
3004// which will only happen if the request is complete.
3005
3006var callbackOnCompleted = function callbackOnCompleted(request, cb) {
3007 if (request.readyState === 4) {
3008 return cb();
3009 }
3010
3011 return;
3012};
3013
3014var containerRequest = function containerRequest(uri, xhr, cb) {
3015 var bytes = [];
3016 var id3Offset;
3017 var finished = false;
3018
3019 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
3020 req.abort();
3021 finished = true;
3022 return cb(err, req, type, _bytes);
3023 };
3024
3025 var progressListener = function progressListener(error, request) {
3026 if (finished) {
3027 return;
3028 }
3029
3030 if (error) {
3031 return endRequestAndCallback(error, request, '', bytes);
3032 } // grap the new part of content that was just downloaded
3033
3034
3035 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
3036
3037 bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
3038 id3Offset = id3Offset || getId3Offset(bytes); // we need at least 10 bytes to determine a type
3039 // or we need at least two bytes after an id3Offset
3040
3041 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
3042 return callbackOnCompleted(request, function () {
3043 return endRequestAndCallback(error, request, '', bytes);
3044 });
3045 }
3046
3047 var type = detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
3048 // to see the second sync byte, wait until we have enough data
3049 // before declaring it ts
3050
3051 if (type === 'ts' && bytes.length < 188) {
3052 return callbackOnCompleted(request, function () {
3053 return endRequestAndCallback(error, request, '', bytes);
3054 });
3055 } // this may be an unsynced ts segment
3056 // wait for 376 bytes before detecting no container
3057
3058
3059 if (!type && bytes.length < 376) {
3060 return callbackOnCompleted(request, function () {
3061 return endRequestAndCallback(error, request, '', bytes);
3062 });
3063 }
3064
3065 return endRequestAndCallback(null, request, type, bytes);
3066 };
3067
3068 var options = {
3069 uri: uri,
3070 beforeSend: function beforeSend(request) {
3071 // this forces the browser to pass the bytes to us unprocessed
3072 request.overrideMimeType('text/plain; charset=x-user-defined');
3073 request.addEventListener('progress', function (_ref) {
3074 _ref.total;
3075 _ref.loaded;
3076 return callbackWrapper(request, null, {
3077 statusCode: request.status
3078 }, progressListener);
3079 });
3080 }
3081 };
3082 var request = xhr(options, function (error, response) {
3083 return callbackWrapper(request, error, response, progressListener);
3084 });
3085 return request;
3086};
3087
3088var EventTarget = videojs.EventTarget,
3089 mergeOptions = videojs.mergeOptions;
3090
3091var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
3092 if (!isPlaylistUnchanged(a, b)) {
3093 return false;
3094 } // for dash the above check will often return true in scenarios where
3095 // the playlist actually has changed because mediaSequence isn't a
3096 // dash thing, and we often set it to 1. So if the playlists have the same amount
3097 // of segments we return true.
3098 // So for dash we need to make sure that the underlying segments are different.
3099 // if sidx changed then the playlists are different.
3100
3101
3102 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
3103 return false;
3104 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
3105 return false;
3106 } // one or the other does not have segments
3107 // there was a change.
3108
3109
3110 if (a.segments && !b.segments || !a.segments && b.segments) {
3111 return false;
3112 } // neither has segments nothing changed
3113
3114
3115 if (!a.segments && !b.segments) {
3116 return true;
3117 } // check segments themselves
3118
3119
3120 for (var i = 0; i < a.segments.length; i++) {
3121 var aSegment = a.segments[i];
3122 var bSegment = b.segments[i]; // if uris are different between segments there was a change
3123
3124 if (aSegment.uri !== bSegment.uri) {
3125 return false;
3126 } // neither segment has a byterange, there will be no byterange change.
3127
3128
3129 if (!aSegment.byterange && !bSegment.byterange) {
3130 continue;
3131 }
3132
3133 var aByterange = aSegment.byterange;
3134 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
3135
3136 if (aByterange && !bByterange || !aByterange && bByterange) {
3137 return false;
3138 } // if both segments have byterange with different offsets, there was a change.
3139
3140
3141 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
3142 return false;
3143 }
3144 } // if everything was the same with segments, this is the same playlist.
3145
3146
3147 return true;
3148};
3149/**
3150 * Parses the master XML string and updates playlist URI references.
3151 *
3152 * @param {Object} config
3153 * Object of arguments
3154 * @param {string} config.masterXml
3155 * The mpd XML
3156 * @param {string} config.srcUrl
3157 * The mpd URL
3158 * @param {Date} config.clientOffset
3159 * A time difference between server and client
3160 * @param {Object} config.sidxMapping
3161 * SIDX mappings for moof/mdat URIs and byte ranges
3162 * @return {Object}
3163 * The parsed mpd manifest object
3164 */
3165
3166
3167var parseMasterXml = function parseMasterXml(_ref) {
3168 var masterXml = _ref.masterXml,
3169 srcUrl = _ref.srcUrl,
3170 clientOffset = _ref.clientOffset,
3171 sidxMapping = _ref.sidxMapping;
3172 var master = parse(masterXml, {
3173 manifestUri: srcUrl,
3174 clientOffset: clientOffset,
3175 sidxMapping: sidxMapping
3176 });
3177 addPropertiesToMaster(master, srcUrl);
3178 return master;
3179};
3180/**
3181 * Returns a new master manifest that is the result of merging an updated master manifest
3182 * into the original version.
3183 *
3184 * @param {Object} oldMaster
3185 * The old parsed mpd object
3186 * @param {Object} newMaster
3187 * The updated parsed mpd object
3188 * @return {Object}
3189 * A new object representing the original master manifest with the updated media
3190 * playlists merged in
3191 */
3192
3193var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
3194 var noChanges = true;
3195 var update = mergeOptions(oldMaster, {
3196 // These are top level properties that can be updated
3197 duration: newMaster.duration,
3198 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
3199 }); // First update the playlists in playlist list
3200
3201 for (var i = 0; i < newMaster.playlists.length; i++) {
3202 var playlist = newMaster.playlists[i];
3203
3204 if (playlist.sidx) {
3205 var sidxKey = generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
3206
3207 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
3208 addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
3209 }
3210 }
3211
3212 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
3213
3214 if (playlistUpdate) {
3215 update = playlistUpdate;
3216 noChanges = false;
3217 }
3218 } // Then update media group playlists
3219
3220
3221 forEachMediaGroup(newMaster, function (properties, type, group, label) {
3222 if (properties.playlists && properties.playlists.length) {
3223 var id = properties.playlists[0].id;
3224
3225 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
3226
3227 if (_playlistUpdate) {
3228 update = _playlistUpdate; // update the playlist reference within media groups
3229
3230 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
3231 noChanges = false;
3232 }
3233 }
3234 });
3235
3236 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3237 noChanges = false;
3238 }
3239
3240 if (noChanges) {
3241 return null;
3242 }
3243
3244 return update;
3245}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
3246// If the SIDXs have maps, the two maps should match,
3247// both `a` and `b` missing SIDXs is considered matching.
3248// If `a` or `b` but not both have a map, they aren't matching.
3249
3250var equivalentSidx = function equivalentSidx(a, b) {
3251 var neitherMap = Boolean(!a.map && !b.map);
3252 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
3253 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
3254}; // exported for testing
3255
3256
3257var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
3258 var newSidxMapping = {};
3259
3260 for (var id in playlists) {
3261 var playlist = playlists[id];
3262 var currentSidxInfo = playlist.sidx;
3263
3264 if (currentSidxInfo) {
3265 var key = generateSidxKey(currentSidxInfo);
3266
3267 if (!oldSidxMapping[key]) {
3268 break;
3269 }
3270
3271 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
3272
3273 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
3274 newSidxMapping[key] = oldSidxMapping[key];
3275 }
3276 }
3277 }
3278
3279 return newSidxMapping;
3280};
3281/**
3282 * A function that filters out changed items as they need to be requested separately.
3283 *
3284 * The method is exported for testing
3285 *
3286 * @param {Object} master the parsed mpd XML returned via mpd-parser
3287 * @param {Object} oldSidxMapping the SIDX to compare against
3288 */
3289
3290var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
3291 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
3292 var mediaGroupSidx = videoSidx;
3293 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3294 if (properties.playlists && properties.playlists.length) {
3295 var playlists = properties.playlists;
3296 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
3297 }
3298 });
3299 return mediaGroupSidx;
3300};
3301
3302var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
3303 _inheritsLoose(DashPlaylistLoader, _EventTarget);
3304
3305 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
3306 // playlist loader setups from media groups will expect to be able to pass a playlist
3307 // (since there aren't external URLs to media playlists with DASH)
3308 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
3309 var _this;
3310
3311 if (options === void 0) {
3312 options = {};
3313 }
3314
3315 _this = _EventTarget.call(this) || this;
3316 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized(_this);
3317
3318 if (!masterPlaylistLoader) {
3319 _this.isMaster_ = true;
3320 }
3321
3322 var _options = options,
3323 _options$withCredenti = _options.withCredentials,
3324 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
3325 _options$handleManife = _options.handleManifestRedirects,
3326 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
3327 _this.vhs_ = vhs;
3328 _this.withCredentials = withCredentials;
3329 _this.handleManifestRedirects = handleManifestRedirects;
3330
3331 if (!srcUrlOrPlaylist) {
3332 throw new Error('A non-empty playlist URL or object is required');
3333 } // event naming?
3334
3335
3336 _this.on('minimumUpdatePeriod', function () {
3337 _this.refreshXml_();
3338 }); // live playlist staleness timeout
3339
3340
3341 _this.on('mediaupdatetimeout', function () {
3342 _this.refreshMedia_(_this.media().id);
3343 });
3344
3345 _this.state = 'HAVE_NOTHING';
3346 _this.loadedPlaylists_ = {};
3347 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
3348 // The masterPlaylistLoader will be created with a string
3349
3350 if (_this.isMaster_) {
3351 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
3352 // once multi-period is refactored
3353
3354 _this.masterPlaylistLoader_.sidxMapping_ = {};
3355 } else {
3356 _this.childPlaylist_ = srcUrlOrPlaylist;
3357 }
3358
3359 return _this;
3360 }
3361
3362 var _proto = DashPlaylistLoader.prototype;
3363
3364 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
3365 // disposed
3366 if (!this.request) {
3367 return true;
3368 } // pending request is cleared
3369
3370
3371 this.request = null;
3372
3373 if (err) {
3374 // use the provided error object or create one
3375 // based on the request/response
3376 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
3377 status: request.status,
3378 message: 'DASH request error at URL: ' + request.uri,
3379 response: request.response,
3380 // MEDIA_ERR_NETWORK
3381 code: 2
3382 };
3383
3384 if (startingState) {
3385 this.state = startingState;
3386 }
3387
3388 this.trigger('error');
3389 return true;
3390 }
3391 }
3392 /**
3393 * Verify that the container of the sidx segment can be parsed
3394 * and if it can, get and parse that segment.
3395 */
3396 ;
3397
3398 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
3399 var _this2 = this;
3400
3401 var sidxKey = playlist.sidx && generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
3402
3403 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
3404 // keep this function async
3405 this.mediaRequest_ = window.setTimeout(function () {
3406 return cb(false);
3407 }, 0);
3408 return;
3409 } // resolve the segment URL relative to the playlist
3410
3411
3412 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
3413
3414 var fin = function fin(err, request) {
3415 if (_this2.requestErrored_(err, request, startingState)) {
3416 return;
3417 }
3418
3419 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
3420 var sidx;
3421
3422 try {
3423 sidx = parseSidx(toUint8(request.response).subarray(8));
3424 } catch (e) {
3425 // sidx parsing failed.
3426 _this2.requestErrored_(e, request, startingState);
3427
3428 return;
3429 }
3430
3431 sidxMapping[sidxKey] = {
3432 sidxInfo: playlist.sidx,
3433 sidx: sidx
3434 };
3435 addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
3436 return cb(true);
3437 };
3438
3439 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
3440 if (err) {
3441 return fin(err, request);
3442 }
3443
3444 if (!container || container !== 'mp4') {
3445 return fin({
3446 status: request.status,
3447 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
3448 // response is just bytes in this case
3449 // but we really don't want to return that.
3450 response: '',
3451 playlist: playlist,
3452 internal: true,
3453 blacklistDuration: Infinity,
3454 // MEDIA_ERR_NETWORK
3455 code: 2
3456 }, request);
3457 } // if we already downloaded the sidx bytes in the container request, use them
3458
3459
3460 var _playlist$sidx$bytera = playlist.sidx.byterange,
3461 offset = _playlist$sidx$bytera.offset,
3462 length = _playlist$sidx$bytera.length;
3463
3464 if (bytes.length >= length + offset) {
3465 return fin(err, {
3466 response: bytes.subarray(offset, offset + length),
3467 status: request.status,
3468 uri: request.uri
3469 });
3470 } // otherwise request sidx bytes
3471
3472
3473 _this2.request = _this2.vhs_.xhr({
3474 uri: uri,
3475 responseType: 'arraybuffer',
3476 headers: segmentXhrHeaders({
3477 byterange: playlist.sidx.byterange
3478 })
3479 }, fin);
3480 });
3481 };
3482
3483 _proto.dispose = function dispose() {
3484 this.trigger('dispose');
3485 this.stopRequest();
3486 this.loadedPlaylists_ = {};
3487 window.clearTimeout(this.minimumUpdatePeriodTimeout_);
3488 window.clearTimeout(this.mediaRequest_);
3489 window.clearTimeout(this.mediaUpdateTimeout);
3490 this.mediaUpdateTimeout = null;
3491 this.mediaRequest_ = null;
3492 this.minimumUpdatePeriodTimeout_ = null;
3493
3494 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3495 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3496 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3497 }
3498
3499 this.off();
3500 };
3501
3502 _proto.hasPendingRequest = function hasPendingRequest() {
3503 return this.request || this.mediaRequest_;
3504 };
3505
3506 _proto.stopRequest = function stopRequest() {
3507 if (this.request) {
3508 var oldRequest = this.request;
3509 this.request = null;
3510 oldRequest.onreadystatechange = null;
3511 oldRequest.abort();
3512 }
3513 };
3514
3515 _proto.media = function media(playlist) {
3516 var _this3 = this;
3517
3518 // getter
3519 if (!playlist) {
3520 return this.media_;
3521 } // setter
3522
3523
3524 if (this.state === 'HAVE_NOTHING') {
3525 throw new Error('Cannot switch media playlist from ' + this.state);
3526 }
3527
3528 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
3529
3530 if (typeof playlist === 'string') {
3531 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
3532 throw new Error('Unknown playlist URI: ' + playlist);
3533 }
3534
3535 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
3536 }
3537
3538 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
3539
3540 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
3541 this.state = 'HAVE_METADATA';
3542 this.media_ = playlist; // trigger media change if the active media has been updated
3543
3544 if (mediaChange) {
3545 this.trigger('mediachanging');
3546 this.trigger('mediachange');
3547 }
3548
3549 return;
3550 } // switching to the active playlist is a no-op
3551
3552
3553 if (!mediaChange) {
3554 return;
3555 } // switching from an already loaded playlist
3556
3557
3558 if (this.media_) {
3559 this.trigger('mediachanging');
3560 }
3561
3562 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
3563 // everything is ready just continue to haveMetadata
3564 _this3.haveMetadata({
3565 startingState: startingState,
3566 playlist: playlist
3567 });
3568 });
3569 };
3570
3571 _proto.haveMetadata = function haveMetadata(_ref2) {
3572 var startingState = _ref2.startingState,
3573 playlist = _ref2.playlist;
3574 this.state = 'HAVE_METADATA';
3575 this.loadedPlaylists_[playlist.id] = playlist;
3576 this.mediaRequest_ = null; // This will trigger loadedplaylist
3577
3578 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
3579 // to resolve setup of media groups
3580
3581 if (startingState === 'HAVE_MASTER') {
3582 this.trigger('loadedmetadata');
3583 } else {
3584 // trigger media change if the active media has been updated
3585 this.trigger('mediachange');
3586 }
3587 };
3588
3589 _proto.pause = function pause() {
3590 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3591 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3592 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3593 }
3594
3595 this.stopRequest();
3596 window.clearTimeout(this.mediaUpdateTimeout);
3597 this.mediaUpdateTimeout = null;
3598
3599 if (this.isMaster_) {
3600 window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
3601 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
3602 }
3603
3604 if (this.state === 'HAVE_NOTHING') {
3605 // If we pause the loader before any data has been retrieved, its as if we never
3606 // started, so reset to an unstarted state.
3607 this.started = false;
3608 }
3609 };
3610
3611 _proto.load = function load(isFinalRendition) {
3612 var _this4 = this;
3613
3614 window.clearTimeout(this.mediaUpdateTimeout);
3615 this.mediaUpdateTimeout = null;
3616 var media = this.media();
3617
3618 if (isFinalRendition) {
3619 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3620 this.mediaUpdateTimeout = window.setTimeout(function () {
3621 return _this4.load();
3622 }, delay);
3623 return;
3624 } // because the playlists are internal to the manifest, load should either load the
3625 // main manifest, or do nothing but trigger an event
3626
3627
3628 if (!this.started) {
3629 this.start();
3630 return;
3631 }
3632
3633 if (media && !media.endList) {
3634 // Check to see if this is the master loader and the MUP was cleared (this happens
3635 // when the loader was paused). `media` should be set at this point since one is always
3636 // set during `start()`.
3637 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
3638 // Trigger minimumUpdatePeriod to refresh the master manifest
3639 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
3640
3641 this.updateMinimumUpdatePeriodTimeout_();
3642 }
3643
3644 this.trigger('mediaupdatetimeout');
3645 } else {
3646 this.trigger('loadedplaylist');
3647 }
3648 };
3649
3650 _proto.start = function start() {
3651 var _this5 = this;
3652
3653 this.started = true; // We don't need to request the master manifest again
3654 // Call this asynchronously to match the xhr request behavior below
3655
3656 if (!this.isMaster_) {
3657 this.mediaRequest_ = window.setTimeout(function () {
3658 return _this5.haveMaster_();
3659 }, 0);
3660 return;
3661 }
3662
3663 this.requestMaster_(function (req, masterChanged) {
3664 _this5.haveMaster_();
3665
3666 if (!_this5.hasPendingRequest() && !_this5.media_) {
3667 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3668 }
3669 });
3670 };
3671
3672 _proto.requestMaster_ = function requestMaster_(cb) {
3673 var _this6 = this;
3674
3675 this.request = this.vhs_.xhr({
3676 uri: this.masterPlaylistLoader_.srcUrl,
3677 withCredentials: this.withCredentials
3678 }, function (error, req) {
3679 if (_this6.requestErrored_(error, req)) {
3680 if (_this6.state === 'HAVE_NOTHING') {
3681 _this6.started = false;
3682 }
3683
3684 return;
3685 }
3686
3687 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3688 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3689
3690 if (req.responseHeaders && req.responseHeaders.date) {
3691 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3692 } else {
3693 _this6.masterLoaded_ = Date.now();
3694 }
3695
3696 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3697
3698 if (masterChanged) {
3699 _this6.handleMaster_();
3700
3701 _this6.syncClientServerClock_(function () {
3702 return cb(req, masterChanged);
3703 });
3704
3705 return;
3706 }
3707
3708 return cb(req, masterChanged);
3709 });
3710 }
3711 /**
3712 * Parses the master xml for UTCTiming node to sync the client clock to the server
3713 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3714 *
3715 * @param {Function} done
3716 * Function to call when clock sync has completed
3717 */
3718 ;
3719
3720 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3721 var _this7 = this;
3722
3723 var utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3724 // server clock
3725
3726 if (utcTiming === null) {
3727 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3728 return done();
3729 }
3730
3731 if (utcTiming.method === 'DIRECT') {
3732 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3733 return done();
3734 }
3735
3736 this.request = this.vhs_.xhr({
3737 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3738 method: utcTiming.method,
3739 withCredentials: this.withCredentials
3740 }, function (error, req) {
3741 // disposed
3742 if (!_this7.request) {
3743 return;
3744 }
3745
3746 if (error) {
3747 // sync request failed, fall back to using date header from mpd
3748 // TODO: log warning
3749 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3750 return done();
3751 }
3752
3753 var serverTime;
3754
3755 if (utcTiming.method === 'HEAD') {
3756 if (!req.responseHeaders || !req.responseHeaders.date) {
3757 // expected date header not preset, fall back to using date header from mpd
3758 // TODO: log warning
3759 serverTime = _this7.masterLoaded_;
3760 } else {
3761 serverTime = Date.parse(req.responseHeaders.date);
3762 }
3763 } else {
3764 serverTime = Date.parse(req.responseText);
3765 }
3766
3767 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3768 done();
3769 });
3770 };
3771
3772 _proto.haveMaster_ = function haveMaster_() {
3773 this.state = 'HAVE_MASTER';
3774
3775 if (this.isMaster_) {
3776 // We have the master playlist at this point, so
3777 // trigger this to allow MasterPlaylistController
3778 // to make an initial playlist selection
3779 this.trigger('loadedplaylist');
3780 } else if (!this.media_) {
3781 // no media playlist was specifically selected so select
3782 // the one the child playlist loader was created with
3783 this.media(this.childPlaylist_);
3784 }
3785 };
3786
3787 _proto.handleMaster_ = function handleMaster_() {
3788 // clear media request
3789 this.mediaRequest_ = null;
3790 var newMaster = parseMasterXml({
3791 masterXml: this.masterPlaylistLoader_.masterXml_,
3792 srcUrl: this.masterPlaylistLoader_.srcUrl,
3793 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3794 sidxMapping: this.masterPlaylistLoader_.sidxMapping_
3795 });
3796 var oldMaster = this.masterPlaylistLoader_.master; // if we have an old master to compare the new master against
3797
3798 if (oldMaster) {
3799 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3800 } // only update master if we have a new master
3801
3802
3803 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3804 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3805
3806 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3807 this.masterPlaylistLoader_.srcUrl = location;
3808 }
3809
3810 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3811 this.updateMinimumUpdatePeriodTimeout_();
3812 }
3813
3814 return Boolean(newMaster);
3815 };
3816
3817 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3818 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
3819 // a new one will be added if needed.
3820
3821 if (mpl.createMupOnMedia_) {
3822 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
3823 mpl.createMupOnMedia_ = null;
3824 } // clear any pending timeouts
3825
3826
3827 if (mpl.minimumUpdatePeriodTimeout_) {
3828 window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
3829 mpl.minimumUpdatePeriodTimeout_ = null;
3830 }
3831
3832 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3833 // MPD has no future validity, so a new one will need to be acquired when new
3834 // media segments are to be made available. Thus, we use the target duration
3835 // in this case
3836
3837 if (mup === 0) {
3838 if (mpl.media()) {
3839 mup = mpl.media().targetDuration * 1000;
3840 } else {
3841 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
3842 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
3843 }
3844 } // if minimumUpdatePeriod is invalid or <= zero, which
3845 // can happen when a live video becomes VOD. skip timeout
3846 // creation.
3847
3848
3849 if (typeof mup !== 'number' || mup <= 0) {
3850 if (mup < 0) {
3851 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
3852 }
3853
3854 return;
3855 }
3856
3857 this.createMUPTimeout_(mup);
3858 };
3859
3860 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
3861 var mpl = this.masterPlaylistLoader_;
3862 mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(function () {
3863 mpl.minimumUpdatePeriodTimeout_ = null;
3864 mpl.trigger('minimumUpdatePeriod');
3865 mpl.createMUPTimeout_(mup);
3866 }, mup);
3867 }
3868 /**
3869 * Sends request to refresh the master xml and updates the parsed master manifest
3870 */
3871 ;
3872
3873 _proto.refreshXml_ = function refreshXml_() {
3874 var _this8 = this;
3875
3876 this.requestMaster_(function (req, masterChanged) {
3877 if (!masterChanged) {
3878 return;
3879 }
3880
3881 if (_this8.media_) {
3882 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
3883 } // This will filter out updated sidx info from the mapping
3884
3885
3886 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
3887
3888 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
3889 // TODO: do we need to reload the current playlist?
3890 _this8.refreshMedia_(_this8.media().id);
3891 });
3892 });
3893 }
3894 /**
3895 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3896 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3897 * from the master loader.
3898 */
3899 ;
3900
3901 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3902 var _this9 = this;
3903
3904 if (!mediaID) {
3905 throw new Error('refreshMedia_ must take a media id');
3906 } // for master we have to reparse the master xml
3907 // to re-create segments based on current timing values
3908 // which may change media. We only skip updating master
3909 // if this is the first time this.media_ is being set.
3910 // as master was just parsed in that case.
3911
3912
3913 if (this.media_ && this.isMaster_) {
3914 this.handleMaster_();
3915 }
3916
3917 var playlists = this.masterPlaylistLoader_.master.playlists;
3918 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3919
3920 if (mediaChanged) {
3921 this.media_ = playlists[mediaID];
3922 } else {
3923 this.trigger('playlistunchanged');
3924 }
3925
3926 if (!this.mediaUpdateTimeout) {
3927 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
3928 if (_this9.media().endList) {
3929 return;
3930 }
3931
3932 _this9.mediaUpdateTimeout = window.setTimeout(function () {
3933 _this9.trigger('mediaupdatetimeout');
3934
3935 createMediaUpdateTimeout();
3936 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
3937 };
3938
3939 createMediaUpdateTimeout();
3940 }
3941
3942 this.trigger('loadedplaylist');
3943 };
3944
3945 return DashPlaylistLoader;
3946}(EventTarget);
3947
3948var Config = {
3949 GOAL_BUFFER_LENGTH: 30,
3950 MAX_GOAL_BUFFER_LENGTH: 60,
3951 BACK_BUFFER_LENGTH: 30,
3952 GOAL_BUFFER_LENGTH_RATE: 1,
3953 // 0.5 MB/s
3954 INITIAL_BANDWIDTH: 4194304,
3955 // A fudge factor to apply to advertised playlist bitrates to account for
3956 // temporary flucations in client bandwidth
3957 BANDWIDTH_VARIANCE: 1.2,
3958 // How much of the buffer must be filled before we consider upswitching
3959 BUFFER_LOW_WATER_LINE: 0,
3960 MAX_BUFFER_LOW_WATER_LINE: 30,
3961 // TODO: Remove this when experimentalBufferBasedABR is removed
3962 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
3963 BUFFER_LOW_WATER_LINE_RATE: 1,
3964 // If the buffer is greater than the high water line, we won't switch down
3965 BUFFER_HIGH_WATER_LINE: 30
3966};
3967
3968var stringToArrayBuffer = function stringToArrayBuffer(string) {
3969 var view = new Uint8Array(new ArrayBuffer(string.length));
3970
3971 for (var i = 0; i < string.length; i++) {
3972 view[i] = string.charCodeAt(i);
3973 }
3974
3975 return view.buffer;
3976};
3977
3978/* global Blob, BlobBuilder, Worker */
3979// unify worker interface
3980var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
3981 // node only supports on/off
3982 workerObj.on = workerObj.addEventListener;
3983 workerObj.off = workerObj.removeEventListener;
3984 return workerObj;
3985};
3986
3987var createObjectURL = function createObjectURL(str) {
3988 try {
3989 return URL.createObjectURL(new Blob([str], {
3990 type: 'application/javascript'
3991 }));
3992 } catch (e) {
3993 var blob = new BlobBuilder();
3994 blob.append(str);
3995 return URL.createObjectURL(blob.getBlob());
3996 }
3997};
3998
3999var factory = function factory(code) {
4000 return function () {
4001 var objectUrl = createObjectURL(code);
4002 var worker = browserWorkerPolyFill(new Worker(objectUrl));
4003 worker.objURL = objectUrl;
4004 var terminate = worker.terminate;
4005 worker.on = worker.addEventListener;
4006 worker.off = worker.removeEventListener;
4007
4008 worker.terminate = function () {
4009 URL.revokeObjectURL(objectUrl);
4010 return terminate.call(this);
4011 };
4012
4013 return worker;
4014 };
4015};
4016var transform = function transform(code) {
4017 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
4018};
4019
4020var getWorkerString = function getWorkerString(fn) {
4021 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
4022};
4023
4024/* rollup-plugin-worker-factory start for worker!/Users/bcasey/Projects/videojs-http-streaming/src/transmuxer-worker.js */
4025var workerCode$1 = transform(getWorkerString(function () {
4026 /**
4027 * mux.js
4028 *
4029 * Copyright (c) Brightcove
4030 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4031 *
4032 * A lightweight readable stream implemention that handles event dispatching.
4033 * Objects that inherit from streams should call init in their constructors.
4034 */
4035
4036 var Stream = function Stream() {
4037 this.init = function () {
4038 var listeners = {};
4039 /**
4040 * Add a listener for a specified event type.
4041 * @param type {string} the event name
4042 * @param listener {function} the callback to be invoked when an event of
4043 * the specified type occurs
4044 */
4045
4046 this.on = function (type, listener) {
4047 if (!listeners[type]) {
4048 listeners[type] = [];
4049 }
4050
4051 listeners[type] = listeners[type].concat(listener);
4052 };
4053 /**
4054 * Remove a listener for a specified event type.
4055 * @param type {string} the event name
4056 * @param listener {function} a function previously registered for this
4057 * type of event through `on`
4058 */
4059
4060
4061 this.off = function (type, listener) {
4062 var index;
4063
4064 if (!listeners[type]) {
4065 return false;
4066 }
4067
4068 index = listeners[type].indexOf(listener);
4069 listeners[type] = listeners[type].slice();
4070 listeners[type].splice(index, 1);
4071 return index > -1;
4072 };
4073 /**
4074 * Trigger an event of the specified type on this stream. Any additional
4075 * arguments to this function are passed as parameters to event listeners.
4076 * @param type {string} the event name
4077 */
4078
4079
4080 this.trigger = function (type) {
4081 var callbacks, i, length, args;
4082 callbacks = listeners[type];
4083
4084 if (!callbacks) {
4085 return;
4086 } // Slicing the arguments on every invocation of this method
4087 // can add a significant amount of overhead. Avoid the
4088 // intermediate object creation for the common case of a
4089 // single callback argument
4090
4091
4092 if (arguments.length === 2) {
4093 length = callbacks.length;
4094
4095 for (i = 0; i < length; ++i) {
4096 callbacks[i].call(this, arguments[1]);
4097 }
4098 } else {
4099 args = [];
4100 i = arguments.length;
4101
4102 for (i = 1; i < arguments.length; ++i) {
4103 args.push(arguments[i]);
4104 }
4105
4106 length = callbacks.length;
4107
4108 for (i = 0; i < length; ++i) {
4109 callbacks[i].apply(this, args);
4110 }
4111 }
4112 };
4113 /**
4114 * Destroys the stream and cleans up.
4115 */
4116
4117
4118 this.dispose = function () {
4119 listeners = {};
4120 };
4121 };
4122 };
4123 /**
4124 * Forwards all `data` events on this stream to the destination stream. The
4125 * destination stream should provide a method `push` to receive the data
4126 * events as they arrive.
4127 * @param destination {stream} the stream that will receive all `data` events
4128 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4129 * when the current stream emits a 'done' event
4130 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4131 */
4132
4133
4134 Stream.prototype.pipe = function (destination) {
4135 this.on('data', function (data) {
4136 destination.push(data);
4137 });
4138 this.on('done', function (flushSource) {
4139 destination.flush(flushSource);
4140 });
4141 this.on('partialdone', function (flushSource) {
4142 destination.partialFlush(flushSource);
4143 });
4144 this.on('endedtimeline', function (flushSource) {
4145 destination.endTimeline(flushSource);
4146 });
4147 this.on('reset', function (flushSource) {
4148 destination.reset(flushSource);
4149 });
4150 return destination;
4151 }; // Default stream functions that are expected to be overridden to perform
4152 // actual work. These are provided by the prototype as a sort of no-op
4153 // implementation so that we don't have to check for their existence in the
4154 // `pipe` function above.
4155
4156
4157 Stream.prototype.push = function (data) {
4158 this.trigger('data', data);
4159 };
4160
4161 Stream.prototype.flush = function (flushSource) {
4162 this.trigger('done', flushSource);
4163 };
4164
4165 Stream.prototype.partialFlush = function (flushSource) {
4166 this.trigger('partialdone', flushSource);
4167 };
4168
4169 Stream.prototype.endTimeline = function (flushSource) {
4170 this.trigger('endedtimeline', flushSource);
4171 };
4172
4173 Stream.prototype.reset = function (flushSource) {
4174 this.trigger('reset', flushSource);
4175 };
4176
4177 var stream = Stream;
4178 /**
4179 * mux.js
4180 *
4181 * Copyright (c) Brightcove
4182 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4183 *
4184 * Functions that generate fragmented MP4s suitable for use with Media
4185 * Source Extensions.
4186 */
4187
4188 var UINT32_MAX = Math.pow(2, 32) - 1;
4189 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
4190
4191 (function () {
4192 var i;
4193 types = {
4194 avc1: [],
4195 // codingname
4196 avcC: [],
4197 btrt: [],
4198 dinf: [],
4199 dref: [],
4200 esds: [],
4201 ftyp: [],
4202 hdlr: [],
4203 mdat: [],
4204 mdhd: [],
4205 mdia: [],
4206 mfhd: [],
4207 minf: [],
4208 moof: [],
4209 moov: [],
4210 mp4a: [],
4211 // codingname
4212 mvex: [],
4213 mvhd: [],
4214 pasp: [],
4215 sdtp: [],
4216 smhd: [],
4217 stbl: [],
4218 stco: [],
4219 stsc: [],
4220 stsd: [],
4221 stsz: [],
4222 stts: [],
4223 styp: [],
4224 tfdt: [],
4225 tfhd: [],
4226 traf: [],
4227 trak: [],
4228 trun: [],
4229 trex: [],
4230 tkhd: [],
4231 vmhd: []
4232 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4233 // don't throw an error
4234
4235 if (typeof Uint8Array === 'undefined') {
4236 return;
4237 }
4238
4239 for (i in types) {
4240 if (types.hasOwnProperty(i)) {
4241 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4242 }
4243 }
4244
4245 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4246 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4247 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4248 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4249 0x00, 0x00, 0x00, // flags
4250 0x00, 0x00, 0x00, 0x00, // pre_defined
4251 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4252 0x00, 0x00, 0x00, 0x00, // reserved
4253 0x00, 0x00, 0x00, 0x00, // reserved
4254 0x00, 0x00, 0x00, 0x00, // reserved
4255 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4256 ]);
4257 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4258 0x00, 0x00, 0x00, // flags
4259 0x00, 0x00, 0x00, 0x00, // pre_defined
4260 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4261 0x00, 0x00, 0x00, 0x00, // reserved
4262 0x00, 0x00, 0x00, 0x00, // reserved
4263 0x00, 0x00, 0x00, 0x00, // reserved
4264 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4265 ]);
4266 HDLR_TYPES = {
4267 video: VIDEO_HDLR,
4268 audio: AUDIO_HDLR
4269 };
4270 DREF = new Uint8Array([0x00, // version 0
4271 0x00, 0x00, 0x00, // flags
4272 0x00, 0x00, 0x00, 0x01, // entry_count
4273 0x00, 0x00, 0x00, 0x0c, // entry_size
4274 0x75, 0x72, 0x6c, 0x20, // 'url' type
4275 0x00, // version 0
4276 0x00, 0x00, 0x01 // entry_flags
4277 ]);
4278 SMHD = new Uint8Array([0x00, // version
4279 0x00, 0x00, 0x00, // flags
4280 0x00, 0x00, // balance, 0 means centered
4281 0x00, 0x00 // reserved
4282 ]);
4283 STCO = new Uint8Array([0x00, // version
4284 0x00, 0x00, 0x00, // flags
4285 0x00, 0x00, 0x00, 0x00 // entry_count
4286 ]);
4287 STSC = STCO;
4288 STSZ = new Uint8Array([0x00, // version
4289 0x00, 0x00, 0x00, // flags
4290 0x00, 0x00, 0x00, 0x00, // sample_size
4291 0x00, 0x00, 0x00, 0x00 // sample_count
4292 ]);
4293 STTS = STCO;
4294 VMHD = new Uint8Array([0x00, // version
4295 0x00, 0x00, 0x01, // flags
4296 0x00, 0x00, // graphicsmode
4297 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4298 ]);
4299 })();
4300
4301 box = function box(type) {
4302 var payload = [],
4303 size = 0,
4304 i,
4305 result,
4306 view;
4307
4308 for (i = 1; i < arguments.length; i++) {
4309 payload.push(arguments[i]);
4310 }
4311
4312 i = payload.length; // calculate the total size we need to allocate
4313
4314 while (i--) {
4315 size += payload[i].byteLength;
4316 }
4317
4318 result = new Uint8Array(size + 8);
4319 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4320 view.setUint32(0, result.byteLength);
4321 result.set(type, 4); // copy the payload into the result
4322
4323 for (i = 0, size = 8; i < payload.length; i++) {
4324 result.set(payload[i], size);
4325 size += payload[i].byteLength;
4326 }
4327
4328 return result;
4329 };
4330
4331 dinf = function dinf() {
4332 return box(types.dinf, box(types.dref, DREF));
4333 };
4334
4335 esds = function esds(track) {
4336 return box(types.esds, new Uint8Array([0x00, // version
4337 0x00, 0x00, 0x00, // flags
4338 // ES_Descriptor
4339 0x03, // tag, ES_DescrTag
4340 0x19, // length
4341 0x00, 0x00, // ES_ID
4342 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4343 // DecoderConfigDescriptor
4344 0x04, // tag, DecoderConfigDescrTag
4345 0x11, // length
4346 0x40, // object type
4347 0x15, // streamType
4348 0x00, 0x06, 0x00, // bufferSizeDB
4349 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4350 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4351 // DecoderSpecificInfo
4352 0x05, // tag, DecoderSpecificInfoTag
4353 0x02, // length
4354 // ISO/IEC 14496-3, AudioSpecificConfig
4355 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4356 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4357 ]));
4358 };
4359
4360 ftyp = function ftyp() {
4361 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4362 };
4363
4364 hdlr = function hdlr(type) {
4365 return box(types.hdlr, HDLR_TYPES[type]);
4366 };
4367
4368 mdat = function mdat(data) {
4369 return box(types.mdat, data);
4370 };
4371
4372 mdhd = function mdhd(track) {
4373 var result = new Uint8Array([0x00, // version 0
4374 0x00, 0x00, 0x00, // flags
4375 0x00, 0x00, 0x00, 0x02, // creation_time
4376 0x00, 0x00, 0x00, 0x03, // modification_time
4377 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4378 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4379 0x55, 0xc4, // 'und' language (undetermined)
4380 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
4381 // defined. The sample rate can be parsed out of an ADTS header, for
4382 // instance.
4383
4384 if (track.samplerate) {
4385 result[12] = track.samplerate >>> 24 & 0xFF;
4386 result[13] = track.samplerate >>> 16 & 0xFF;
4387 result[14] = track.samplerate >>> 8 & 0xFF;
4388 result[15] = track.samplerate & 0xFF;
4389 }
4390
4391 return box(types.mdhd, result);
4392 };
4393
4394 mdia = function mdia(track) {
4395 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4396 };
4397
4398 mfhd = function mfhd(sequenceNumber) {
4399 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4400 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4401 ]));
4402 };
4403
4404 minf = function minf(track) {
4405 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4406 };
4407
4408 moof = function moof(sequenceNumber, tracks) {
4409 var trackFragments = [],
4410 i = tracks.length; // build traf boxes for each track fragment
4411
4412 while (i--) {
4413 trackFragments[i] = traf(tracks[i]);
4414 }
4415
4416 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4417 };
4418 /**
4419 * Returns a movie box.
4420 * @param tracks {array} the tracks associated with this movie
4421 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4422 */
4423
4424
4425 moov = function moov(tracks) {
4426 var i = tracks.length,
4427 boxes = [];
4428
4429 while (i--) {
4430 boxes[i] = trak(tracks[i]);
4431 }
4432
4433 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4434 };
4435
4436 mvex = function mvex(tracks) {
4437 var i = tracks.length,
4438 boxes = [];
4439
4440 while (i--) {
4441 boxes[i] = trex(tracks[i]);
4442 }
4443
4444 return box.apply(null, [types.mvex].concat(boxes));
4445 };
4446
4447 mvhd = function mvhd(duration) {
4448 var bytes = new Uint8Array([0x00, // version 0
4449 0x00, 0x00, 0x00, // flags
4450 0x00, 0x00, 0x00, 0x01, // creation_time
4451 0x00, 0x00, 0x00, 0x02, // modification_time
4452 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4453 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
4454 0x00, 0x01, 0x00, 0x00, // 1.0 rate
4455 0x01, 0x00, // 1.0 volume
4456 0x00, 0x00, // reserved
4457 0x00, 0x00, 0x00, 0x00, // reserved
4458 0x00, 0x00, 0x00, 0x00, // reserved
4459 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4460 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4461 0xff, 0xff, 0xff, 0xff // next_track_ID
4462 ]);
4463 return box(types.mvhd, bytes);
4464 };
4465
4466 sdtp = function sdtp(track) {
4467 var samples = track.samples || [],
4468 bytes = new Uint8Array(4 + samples.length),
4469 flags,
4470 i; // leave the full box header (4 bytes) all zero
4471 // write the sample table
4472
4473 for (i = 0; i < samples.length; i++) {
4474 flags = samples[i].flags;
4475 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
4476 }
4477
4478 return box(types.sdtp, bytes);
4479 };
4480
4481 stbl = function stbl(track) {
4482 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
4483 };
4484
4485 (function () {
4486 var videoSample, audioSample;
4487
4488 stsd = function stsd(track) {
4489 return box(types.stsd, new Uint8Array([0x00, // version 0
4490 0x00, 0x00, 0x00, // flags
4491 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
4492 };
4493
4494 videoSample = function videoSample(track) {
4495 var sps = track.sps || [],
4496 pps = track.pps || [],
4497 sequenceParameterSets = [],
4498 pictureParameterSets = [],
4499 i,
4500 avc1Box; // assemble the SPSs
4501
4502 for (i = 0; i < sps.length; i++) {
4503 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
4504 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
4505
4506 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
4507 } // assemble the PPSs
4508
4509
4510 for (i = 0; i < pps.length; i++) {
4511 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
4512 pictureParameterSets.push(pps[i].byteLength & 0xFF);
4513 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
4514 }
4515
4516 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4517 0x00, 0x01, // data_reference_index
4518 0x00, 0x00, // pre_defined
4519 0x00, 0x00, // reserved
4520 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4521 (track.width & 0xff00) >> 8, track.width & 0xff, // width
4522 (track.height & 0xff00) >> 8, track.height & 0xff, // height
4523 0x00, 0x48, 0x00, 0x00, // horizresolution
4524 0x00, 0x48, 0x00, 0x00, // vertresolution
4525 0x00, 0x00, 0x00, 0x00, // reserved
4526 0x00, 0x01, // frame_count
4527 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
4528 0x00, 0x18, // depth = 24
4529 0x11, 0x11 // pre_defined = -1
4530 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
4531 track.profileIdc, // AVCProfileIndication
4532 track.profileCompatibility, // profile_compatibility
4533 track.levelIdc, // AVCLevelIndication
4534 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
4535 ].concat([sps.length], // numOfSequenceParameterSets
4536 sequenceParameterSets, // "SPS"
4537 [pps.length], // numOfPictureParameterSets
4538 pictureParameterSets // "PPS"
4539 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
4540 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
4541 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
4542 ]))];
4543
4544 if (track.sarRatio) {
4545 var hSpacing = track.sarRatio[0],
4546 vSpacing = track.sarRatio[1];
4547 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
4548 }
4549
4550 return box.apply(null, avc1Box);
4551 };
4552
4553 audioSample = function audioSample(track) {
4554 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
4555 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4556 0x00, 0x01, // data_reference_index
4557 // AudioSampleEntry, ISO/IEC 14496-12
4558 0x00, 0x00, 0x00, 0x00, // reserved
4559 0x00, 0x00, 0x00, 0x00, // reserved
4560 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
4561 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
4562 0x00, 0x00, // pre_defined
4563 0x00, 0x00, // reserved
4564 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
4565 // MP4AudioSampleEntry, ISO/IEC 14496-14
4566 ]), esds(track));
4567 };
4568 })();
4569
4570 tkhd = function tkhd(track) {
4571 var result = new Uint8Array([0x00, // version 0
4572 0x00, 0x00, 0x07, // flags
4573 0x00, 0x00, 0x00, 0x00, // creation_time
4574 0x00, 0x00, 0x00, 0x00, // modification_time
4575 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4576 0x00, 0x00, 0x00, 0x00, // reserved
4577 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
4578 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4579 0x00, 0x00, // layer
4580 0x00, 0x00, // alternate_group
4581 0x01, 0x00, // non-audio track volume
4582 0x00, 0x00, // reserved
4583 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4584 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
4585 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
4586 ]);
4587 return box(types.tkhd, result);
4588 };
4589 /**
4590 * Generate a track fragment (traf) box. A traf box collects metadata
4591 * about tracks in a movie fragment (moof) box.
4592 */
4593
4594
4595 traf = function traf(track) {
4596 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
4597 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
4598 0x00, 0x00, 0x3a, // flags
4599 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4600 0x00, 0x00, 0x00, 0x01, // sample_description_index
4601 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4602 0x00, 0x00, 0x00, 0x00, // default_sample_size
4603 0x00, 0x00, 0x00, 0x00 // default_sample_flags
4604 ]));
4605 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
4606 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
4607 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
4608 0x00, 0x00, 0x00, // flags
4609 // baseMediaDecodeTime
4610 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
4611 // the containing moof to the first payload byte of the associated
4612 // mdat
4613
4614 dataOffset = 32 + // tfhd
4615 20 + // tfdt
4616 8 + // traf header
4617 16 + // mfhd
4618 8 + // moof header
4619 8; // mdat header
4620 // audio tracks require less metadata
4621
4622 if (track.type === 'audio') {
4623 trackFragmentRun = trun$1(track, dataOffset);
4624 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
4625 } // video tracks should contain an independent and disposable samples
4626 // box (sdtp)
4627 // generate one and adjust offsets to match
4628
4629
4630 sampleDependencyTable = sdtp(track);
4631 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
4632 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
4633 };
4634 /**
4635 * Generate a track box.
4636 * @param track {object} a track definition
4637 * @return {Uint8Array} the track box
4638 */
4639
4640
4641 trak = function trak(track) {
4642 track.duration = track.duration || 0xffffffff;
4643 return box(types.trak, tkhd(track), mdia(track));
4644 };
4645
4646 trex = function trex(track) {
4647 var result = new Uint8Array([0x00, // version 0
4648 0x00, 0x00, 0x00, // flags
4649 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4650 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
4651 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4652 0x00, 0x00, 0x00, 0x00, // default_sample_size
4653 0x00, 0x01, 0x00, 0x01 // default_sample_flags
4654 ]); // the last two bytes of default_sample_flags is the sample
4655 // degradation priority, a hint about the importance of this sample
4656 // relative to others. Lower the degradation priority for all sample
4657 // types other than video.
4658
4659 if (track.type !== 'video') {
4660 result[result.length - 1] = 0x00;
4661 }
4662
4663 return box(types.trex, result);
4664 };
4665
4666 (function () {
4667 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
4668 // duration is present for the first sample, it will be present for
4669 // all subsequent samples.
4670 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
4671
4672 trunHeader = function trunHeader(samples, offset) {
4673 var durationPresent = 0,
4674 sizePresent = 0,
4675 flagsPresent = 0,
4676 compositionTimeOffset = 0; // trun flag constants
4677
4678 if (samples.length) {
4679 if (samples[0].duration !== undefined) {
4680 durationPresent = 0x1;
4681 }
4682
4683 if (samples[0].size !== undefined) {
4684 sizePresent = 0x2;
4685 }
4686
4687 if (samples[0].flags !== undefined) {
4688 flagsPresent = 0x4;
4689 }
4690
4691 if (samples[0].compositionTimeOffset !== undefined) {
4692 compositionTimeOffset = 0x8;
4693 }
4694 }
4695
4696 return [0x00, // version 0
4697 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4698 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4699 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4700 ];
4701 };
4702
4703 videoTrun = function videoTrun(track, offset) {
4704 var bytesOffest, bytes, header, samples, sample, i;
4705 samples = track.samples || [];
4706 offset += 8 + 12 + 16 * samples.length;
4707 header = trunHeader(samples, offset);
4708 bytes = new Uint8Array(header.length + samples.length * 16);
4709 bytes.set(header);
4710 bytesOffest = header.length;
4711
4712 for (i = 0; i < samples.length; i++) {
4713 sample = samples[i];
4714 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4715 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4716 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4717 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4718
4719 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4720 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4721 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4722 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4723
4724 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4725 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4726 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4727 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4728
4729 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4730 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4731 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4732 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4733 }
4734
4735 return box(types.trun, bytes);
4736 };
4737
4738 audioTrun = function audioTrun(track, offset) {
4739 var bytes, bytesOffest, header, samples, sample, i;
4740 samples = track.samples || [];
4741 offset += 8 + 12 + 8 * samples.length;
4742 header = trunHeader(samples, offset);
4743 bytes = new Uint8Array(header.length + samples.length * 8);
4744 bytes.set(header);
4745 bytesOffest = header.length;
4746
4747 for (i = 0; i < samples.length; i++) {
4748 sample = samples[i];
4749 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4750 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4751 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4752 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4753
4754 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4755 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4756 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4757 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4758 }
4759
4760 return box(types.trun, bytes);
4761 };
4762
4763 trun$1 = function trun(track, offset) {
4764 if (track.type === 'audio') {
4765 return audioTrun(track, offset);
4766 }
4767
4768 return videoTrun(track, offset);
4769 };
4770 })();
4771
4772 var mp4Generator = {
4773 ftyp: ftyp,
4774 mdat: mdat,
4775 moof: moof,
4776 moov: moov,
4777 initSegment: function initSegment(tracks) {
4778 var fileType = ftyp(),
4779 movie = moov(tracks),
4780 result;
4781 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4782 result.set(fileType);
4783 result.set(movie, fileType.byteLength);
4784 return result;
4785 }
4786 };
4787 /**
4788 * mux.js
4789 *
4790 * Copyright (c) Brightcove
4791 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4792 */
4793 // Convert an array of nal units into an array of frames with each frame being
4794 // composed of the nal units that make up that frame
4795 // Also keep track of cummulative data about the frame from the nal units such
4796 // as the frame duration, starting pts, etc.
4797
4798 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4799 var i,
4800 currentNal,
4801 currentFrame = [],
4802 frames = []; // TODO added for LHLS, make sure this is OK
4803
4804 frames.byteLength = 0;
4805 frames.nalCount = 0;
4806 frames.duration = 0;
4807 currentFrame.byteLength = 0;
4808
4809 for (i = 0; i < nalUnits.length; i++) {
4810 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4811
4812 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4813 // Since the very first nal unit is expected to be an AUD
4814 // only push to the frames array when currentFrame is not empty
4815 if (currentFrame.length) {
4816 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4817
4818 frames.byteLength += currentFrame.byteLength;
4819 frames.nalCount += currentFrame.length;
4820 frames.duration += currentFrame.duration;
4821 frames.push(currentFrame);
4822 }
4823
4824 currentFrame = [currentNal];
4825 currentFrame.byteLength = currentNal.data.byteLength;
4826 currentFrame.pts = currentNal.pts;
4827 currentFrame.dts = currentNal.dts;
4828 } else {
4829 // Specifically flag key frames for ease of use later
4830 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4831 currentFrame.keyFrame = true;
4832 }
4833
4834 currentFrame.duration = currentNal.dts - currentFrame.dts;
4835 currentFrame.byteLength += currentNal.data.byteLength;
4836 currentFrame.push(currentNal);
4837 }
4838 } // For the last frame, use the duration of the previous frame if we
4839 // have nothing better to go on
4840
4841
4842 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4843 currentFrame.duration = frames[frames.length - 1].duration;
4844 } // Push the final frame
4845 // TODO added for LHLS, make sure this is OK
4846
4847
4848 frames.byteLength += currentFrame.byteLength;
4849 frames.nalCount += currentFrame.length;
4850 frames.duration += currentFrame.duration;
4851 frames.push(currentFrame);
4852 return frames;
4853 }; // Convert an array of frames into an array of Gop with each Gop being composed
4854 // of the frames that make up that Gop
4855 // Also keep track of cummulative data about the Gop from the frames such as the
4856 // Gop duration, starting pts, etc.
4857
4858
4859 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4860 var i,
4861 currentFrame,
4862 currentGop = [],
4863 gops = []; // We must pre-set some of the values on the Gop since we
4864 // keep running totals of these values
4865
4866 currentGop.byteLength = 0;
4867 currentGop.nalCount = 0;
4868 currentGop.duration = 0;
4869 currentGop.pts = frames[0].pts;
4870 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4871
4872 gops.byteLength = 0;
4873 gops.nalCount = 0;
4874 gops.duration = 0;
4875 gops.pts = frames[0].pts;
4876 gops.dts = frames[0].dts;
4877
4878 for (i = 0; i < frames.length; i++) {
4879 currentFrame = frames[i];
4880
4881 if (currentFrame.keyFrame) {
4882 // Since the very first frame is expected to be an keyframe
4883 // only push to the gops array when currentGop is not empty
4884 if (currentGop.length) {
4885 gops.push(currentGop);
4886 gops.byteLength += currentGop.byteLength;
4887 gops.nalCount += currentGop.nalCount;
4888 gops.duration += currentGop.duration;
4889 }
4890
4891 currentGop = [currentFrame];
4892 currentGop.nalCount = currentFrame.length;
4893 currentGop.byteLength = currentFrame.byteLength;
4894 currentGop.pts = currentFrame.pts;
4895 currentGop.dts = currentFrame.dts;
4896 currentGop.duration = currentFrame.duration;
4897 } else {
4898 currentGop.duration += currentFrame.duration;
4899 currentGop.nalCount += currentFrame.length;
4900 currentGop.byteLength += currentFrame.byteLength;
4901 currentGop.push(currentFrame);
4902 }
4903 }
4904
4905 if (gops.length && currentGop.duration <= 0) {
4906 currentGop.duration = gops[gops.length - 1].duration;
4907 }
4908
4909 gops.byteLength += currentGop.byteLength;
4910 gops.nalCount += currentGop.nalCount;
4911 gops.duration += currentGop.duration; // push the final Gop
4912
4913 gops.push(currentGop);
4914 return gops;
4915 };
4916 /*
4917 * Search for the first keyframe in the GOPs and throw away all frames
4918 * until that keyframe. Then extend the duration of the pulled keyframe
4919 * and pull the PTS and DTS of the keyframe so that it covers the time
4920 * range of the frames that were disposed.
4921 *
4922 * @param {Array} gops video GOPs
4923 * @returns {Array} modified video GOPs
4924 */
4925
4926
4927 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
4928 var currentGop;
4929
4930 if (!gops[0][0].keyFrame && gops.length > 1) {
4931 // Remove the first GOP
4932 currentGop = gops.shift();
4933 gops.byteLength -= currentGop.byteLength;
4934 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
4935 // first gop to cover the time period of the
4936 // frames we just removed
4937
4938 gops[0][0].dts = currentGop.dts;
4939 gops[0][0].pts = currentGop.pts;
4940 gops[0][0].duration += currentGop.duration;
4941 }
4942
4943 return gops;
4944 };
4945 /**
4946 * Default sample object
4947 * see ISO/IEC 14496-12:2012, section 8.6.4.3
4948 */
4949
4950
4951 var createDefaultSample = function createDefaultSample() {
4952 return {
4953 size: 0,
4954 flags: {
4955 isLeading: 0,
4956 dependsOn: 1,
4957 isDependedOn: 0,
4958 hasRedundancy: 0,
4959 degradationPriority: 0,
4960 isNonSyncSample: 1
4961 }
4962 };
4963 };
4964 /*
4965 * Collates information from a video frame into an object for eventual
4966 * entry into an MP4 sample table.
4967 *
4968 * @param {Object} frame the video frame
4969 * @param {Number} dataOffset the byte offset to position the sample
4970 * @return {Object} object containing sample table info for a frame
4971 */
4972
4973
4974 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
4975 var sample = createDefaultSample();
4976 sample.dataOffset = dataOffset;
4977 sample.compositionTimeOffset = frame.pts - frame.dts;
4978 sample.duration = frame.duration;
4979 sample.size = 4 * frame.length; // Space for nal unit size
4980
4981 sample.size += frame.byteLength;
4982
4983 if (frame.keyFrame) {
4984 sample.flags.dependsOn = 2;
4985 sample.flags.isNonSyncSample = 0;
4986 }
4987
4988 return sample;
4989 }; // generate the track's sample table from an array of gops
4990
4991
4992 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
4993 var h,
4994 i,
4995 sample,
4996 currentGop,
4997 currentFrame,
4998 dataOffset = baseDataOffset || 0,
4999 samples = [];
5000
5001 for (h = 0; h < gops.length; h++) {
5002 currentGop = gops[h];
5003
5004 for (i = 0; i < currentGop.length; i++) {
5005 currentFrame = currentGop[i];
5006 sample = sampleForFrame(currentFrame, dataOffset);
5007 dataOffset += sample.size;
5008 samples.push(sample);
5009 }
5010 }
5011
5012 return samples;
5013 }; // generate the track's raw mdat data from an array of gops
5014
5015
5016 var concatenateNalData = function concatenateNalData(gops) {
5017 var h,
5018 i,
5019 j,
5020 currentGop,
5021 currentFrame,
5022 currentNal,
5023 dataOffset = 0,
5024 nalsByteLength = gops.byteLength,
5025 numberOfNals = gops.nalCount,
5026 totalByteLength = nalsByteLength + 4 * numberOfNals,
5027 data = new Uint8Array(totalByteLength),
5028 view = new DataView(data.buffer); // For each Gop..
5029
5030 for (h = 0; h < gops.length; h++) {
5031 currentGop = gops[h]; // For each Frame..
5032
5033 for (i = 0; i < currentGop.length; i++) {
5034 currentFrame = currentGop[i]; // For each NAL..
5035
5036 for (j = 0; j < currentFrame.length; j++) {
5037 currentNal = currentFrame[j];
5038 view.setUint32(dataOffset, currentNal.data.byteLength);
5039 dataOffset += 4;
5040 data.set(currentNal.data, dataOffset);
5041 dataOffset += currentNal.data.byteLength;
5042 }
5043 }
5044 }
5045
5046 return data;
5047 }; // generate the track's sample table from a frame
5048
5049
5050 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5051 var sample,
5052 dataOffset = baseDataOffset || 0,
5053 samples = [];
5054 sample = sampleForFrame(frame, dataOffset);
5055 samples.push(sample);
5056 return samples;
5057 }; // generate the track's raw mdat data from a frame
5058
5059
5060 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5061 var i,
5062 currentNal,
5063 dataOffset = 0,
5064 nalsByteLength = frame.byteLength,
5065 numberOfNals = frame.length,
5066 totalByteLength = nalsByteLength + 4 * numberOfNals,
5067 data = new Uint8Array(totalByteLength),
5068 view = new DataView(data.buffer); // For each NAL..
5069
5070 for (i = 0; i < frame.length; i++) {
5071 currentNal = frame[i];
5072 view.setUint32(dataOffset, currentNal.data.byteLength);
5073 dataOffset += 4;
5074 data.set(currentNal.data, dataOffset);
5075 dataOffset += currentNal.data.byteLength;
5076 }
5077
5078 return data;
5079 };
5080
5081 var frameUtils = {
5082 groupNalsIntoFrames: groupNalsIntoFrames,
5083 groupFramesIntoGops: groupFramesIntoGops,
5084 extendFirstKeyFrame: extendFirstKeyFrame,
5085 generateSampleTable: generateSampleTable$1,
5086 concatenateNalData: concatenateNalData,
5087 generateSampleTableForFrame: generateSampleTableForFrame,
5088 concatenateNalDataForFrame: concatenateNalDataForFrame
5089 };
5090 /**
5091 * mux.js
5092 *
5093 * Copyright (c) Brightcove
5094 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5095 */
5096
5097 var highPrefix = [33, 16, 5, 32, 164, 27];
5098 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5099
5100 var zeroFill = function zeroFill(count) {
5101 var a = [];
5102
5103 while (count--) {
5104 a.push(0);
5105 }
5106
5107 return a;
5108 };
5109
5110 var makeTable = function makeTable(metaTable) {
5111 return Object.keys(metaTable).reduce(function (obj, key) {
5112 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5113 return arr.concat(part);
5114 }, []));
5115 return obj;
5116 }, {});
5117 };
5118
5119 var silence;
5120
5121 var silence_1 = function silence_1() {
5122 if (!silence) {
5123 // Frames-of-silence to use for filling in missing AAC frames
5124 var coneOfSilence = {
5125 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5126 88200: [highPrefix, [231], zeroFill(170), [56]],
5127 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5128 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5129 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5130 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5131 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5132 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5133 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5134 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5135 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5136 };
5137 silence = makeTable(coneOfSilence);
5138 }
5139
5140 return silence;
5141 };
5142 /**
5143 * mux.js
5144 *
5145 * Copyright (c) Brightcove
5146 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5147 */
5148
5149
5150 var ONE_SECOND_IN_TS$4 = 90000,
5151 // 90kHz clock
5152 secondsToVideoTs,
5153 secondsToAudioTs,
5154 videoTsToSeconds,
5155 audioTsToSeconds,
5156 audioTsToVideoTs,
5157 videoTsToAudioTs,
5158 metadataTsToSeconds;
5159
5160 secondsToVideoTs = function secondsToVideoTs(seconds) {
5161 return seconds * ONE_SECOND_IN_TS$4;
5162 };
5163
5164 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5165 return seconds * sampleRate;
5166 };
5167
5168 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5169 return timestamp / ONE_SECOND_IN_TS$4;
5170 };
5171
5172 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5173 return timestamp / sampleRate;
5174 };
5175
5176 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5177 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5178 };
5179
5180 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5181 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5182 };
5183 /**
5184 * Adjust ID3 tag or caption timing information by the timeline pts values
5185 * (if keepOriginalTimestamps is false) and convert to seconds
5186 */
5187
5188
5189 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5190 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5191 };
5192
5193 var clock = {
5194 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
5195 secondsToVideoTs: secondsToVideoTs,
5196 secondsToAudioTs: secondsToAudioTs,
5197 videoTsToSeconds: videoTsToSeconds,
5198 audioTsToSeconds: audioTsToSeconds,
5199 audioTsToVideoTs: audioTsToVideoTs,
5200 videoTsToAudioTs: videoTsToAudioTs,
5201 metadataTsToSeconds: metadataTsToSeconds
5202 };
5203 /**
5204 * mux.js
5205 *
5206 * Copyright (c) Brightcove
5207 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5208 */
5209
5210 /**
5211 * Sum the `byteLength` properties of the data in each AAC frame
5212 */
5213
5214 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5215 var i,
5216 currentObj,
5217 sum = 0; // sum the byteLength's all each nal unit in the frame
5218
5219 for (i = 0; i < array.length; i++) {
5220 currentObj = array[i];
5221 sum += currentObj.data.byteLength;
5222 }
5223
5224 return sum;
5225 }; // Possibly pad (prefix) the audio track with silence if appending this track
5226 // would lead to the introduction of a gap in the audio buffer
5227
5228
5229 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5230 var baseMediaDecodeTimeTs,
5231 frameDuration = 0,
5232 audioGapDuration = 0,
5233 audioFillFrameCount = 0,
5234 audioFillDuration = 0,
5235 silentFrame,
5236 i,
5237 firstFrame;
5238
5239 if (!frames.length) {
5240 return;
5241 }
5242
5243 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
5244
5245 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5246
5247 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5248 // insert the shortest possible amount (audio gap or audio to video gap)
5249 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
5250
5251 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5252 audioFillDuration = audioFillFrameCount * frameDuration;
5253 } // don't attempt to fill gaps smaller than a single frame or larger
5254 // than a half second
5255
5256
5257 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5258 return;
5259 }
5260
5261 silentFrame = silence_1()[track.samplerate];
5262
5263 if (!silentFrame) {
5264 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5265 // from the content instead
5266 silentFrame = frames[0].data;
5267 }
5268
5269 for (i = 0; i < audioFillFrameCount; i++) {
5270 firstFrame = frames[0];
5271 frames.splice(0, 0, {
5272 data: silentFrame,
5273 dts: firstFrame.dts - frameDuration,
5274 pts: firstFrame.pts - frameDuration
5275 });
5276 }
5277
5278 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5279 return audioFillDuration;
5280 }; // If the audio segment extends before the earliest allowed dts
5281 // value, remove AAC frames until starts at or after the earliest
5282 // allowed DTS so that we don't end up with a negative baseMedia-
5283 // DecodeTime for the audio track
5284
5285
5286 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5287 if (track.minSegmentDts >= earliestAllowedDts) {
5288 return adtsFrames;
5289 } // We will need to recalculate the earliest segment Dts
5290
5291
5292 track.minSegmentDts = Infinity;
5293 return adtsFrames.filter(function (currentFrame) {
5294 // If this is an allowed frame, keep it and record it's Dts
5295 if (currentFrame.dts >= earliestAllowedDts) {
5296 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5297 track.minSegmentPts = track.minSegmentDts;
5298 return true;
5299 } // Otherwise, discard it
5300
5301
5302 return false;
5303 });
5304 }; // generate the track's raw mdat data from an array of frames
5305
5306
5307 var generateSampleTable = function generateSampleTable(frames) {
5308 var i,
5309 currentFrame,
5310 samples = [];
5311
5312 for (i = 0; i < frames.length; i++) {
5313 currentFrame = frames[i];
5314 samples.push({
5315 size: currentFrame.data.byteLength,
5316 duration: 1024 // For AAC audio, all samples contain 1024 samples
5317
5318 });
5319 }
5320
5321 return samples;
5322 }; // generate the track's sample table from an array of frames
5323
5324
5325 var concatenateFrameData = function concatenateFrameData(frames) {
5326 var i,
5327 currentFrame,
5328 dataOffset = 0,
5329 data = new Uint8Array(sumFrameByteLengths(frames));
5330
5331 for (i = 0; i < frames.length; i++) {
5332 currentFrame = frames[i];
5333 data.set(currentFrame.data, dataOffset);
5334 dataOffset += currentFrame.data.byteLength;
5335 }
5336
5337 return data;
5338 };
5339
5340 var audioFrameUtils = {
5341 prefixWithSilence: prefixWithSilence,
5342 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5343 generateSampleTable: generateSampleTable,
5344 concatenateFrameData: concatenateFrameData
5345 };
5346 /**
5347 * mux.js
5348 *
5349 * Copyright (c) Brightcove
5350 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5351 */
5352
5353 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
5354 /**
5355 * Store information about the start and end of the track and the
5356 * duration for each frame/sample we process in order to calculate
5357 * the baseMediaDecodeTime
5358 */
5359
5360 var collectDtsInfo = function collectDtsInfo(track, data) {
5361 if (typeof data.pts === 'number') {
5362 if (track.timelineStartInfo.pts === undefined) {
5363 track.timelineStartInfo.pts = data.pts;
5364 }
5365
5366 if (track.minSegmentPts === undefined) {
5367 track.minSegmentPts = data.pts;
5368 } else {
5369 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5370 }
5371
5372 if (track.maxSegmentPts === undefined) {
5373 track.maxSegmentPts = data.pts;
5374 } else {
5375 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5376 }
5377 }
5378
5379 if (typeof data.dts === 'number') {
5380 if (track.timelineStartInfo.dts === undefined) {
5381 track.timelineStartInfo.dts = data.dts;
5382 }
5383
5384 if (track.minSegmentDts === undefined) {
5385 track.minSegmentDts = data.dts;
5386 } else {
5387 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5388 }
5389
5390 if (track.maxSegmentDts === undefined) {
5391 track.maxSegmentDts = data.dts;
5392 } else {
5393 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5394 }
5395 }
5396 };
5397 /**
5398 * Clear values used to calculate the baseMediaDecodeTime between
5399 * tracks
5400 */
5401
5402
5403 var clearDtsInfo = function clearDtsInfo(track) {
5404 delete track.minSegmentDts;
5405 delete track.maxSegmentDts;
5406 delete track.minSegmentPts;
5407 delete track.maxSegmentPts;
5408 };
5409 /**
5410 * Calculate the track's baseMediaDecodeTime based on the earliest
5411 * DTS the transmuxer has ever seen and the minimum DTS for the
5412 * current track
5413 * @param track {object} track metadata configuration
5414 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5415 * in the source; false to adjust the first segment to start at 0.
5416 */
5417
5418
5419 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5420 var baseMediaDecodeTime,
5421 scale,
5422 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
5423
5424 if (!keepOriginalTimestamps) {
5425 minSegmentDts -= track.timelineStartInfo.dts;
5426 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5427 // we want the start of the first segment to be placed
5428
5429
5430 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
5431
5432 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
5433
5434 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5435
5436 if (track.type === 'audio') {
5437 // Audio has a different clock equal to the sampling_rate so we need to
5438 // scale the PTS values into the clock rate of the track
5439 scale = track.samplerate / ONE_SECOND_IN_TS$3;
5440 baseMediaDecodeTime *= scale;
5441 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5442 }
5443
5444 return baseMediaDecodeTime;
5445 };
5446
5447 var trackDecodeInfo = {
5448 clearDtsInfo: clearDtsInfo,
5449 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5450 collectDtsInfo: collectDtsInfo
5451 };
5452 /**
5453 * mux.js
5454 *
5455 * Copyright (c) Brightcove
5456 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5457 *
5458 * Reads in-band caption information from a video elementary
5459 * stream. Captions must follow the CEA-708 standard for injection
5460 * into an MPEG-2 transport streams.
5461 * @see https://en.wikipedia.org/wiki/CEA-708
5462 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
5463 */
5464 // payload type field to indicate how they are to be
5465 // interpreted. CEAS-708 caption content is always transmitted with
5466 // payload type 0x04.
5467
5468 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
5469 RBSP_TRAILING_BITS = 128;
5470 /**
5471 * Parse a supplemental enhancement information (SEI) NAL unit.
5472 * Stops parsing once a message of type ITU T T35 has been found.
5473 *
5474 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
5475 * @return {object} the parsed SEI payload
5476 * @see Rec. ITU-T H.264, 7.3.2.3.1
5477 */
5478
5479 var parseSei = function parseSei(bytes) {
5480 var i = 0,
5481 result = {
5482 payloadType: -1,
5483 payloadSize: 0
5484 },
5485 payloadType = 0,
5486 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
5487
5488 while (i < bytes.byteLength) {
5489 // stop once we have hit the end of the sei_rbsp
5490 if (bytes[i] === RBSP_TRAILING_BITS) {
5491 break;
5492 } // Parse payload type
5493
5494
5495 while (bytes[i] === 0xFF) {
5496 payloadType += 255;
5497 i++;
5498 }
5499
5500 payloadType += bytes[i++]; // Parse payload size
5501
5502 while (bytes[i] === 0xFF) {
5503 payloadSize += 255;
5504 i++;
5505 }
5506
5507 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
5508 // there can only ever be one caption message in a frame's sei
5509
5510 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
5511 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
5512
5513 if (userIdentifier === 'GA94') {
5514 result.payloadType = payloadType;
5515 result.payloadSize = payloadSize;
5516 result.payload = bytes.subarray(i, i + payloadSize);
5517 break;
5518 } else {
5519 result.payload = void 0;
5520 }
5521 } // skip the payload and parse the next message
5522
5523
5524 i += payloadSize;
5525 payloadType = 0;
5526 payloadSize = 0;
5527 }
5528
5529 return result;
5530 }; // see ANSI/SCTE 128-1 (2013), section 8.1
5531
5532
5533 var parseUserData = function parseUserData(sei) {
5534 // itu_t_t35_contry_code must be 181 (United States) for
5535 // captions
5536 if (sei.payload[0] !== 181) {
5537 return null;
5538 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
5539
5540
5541 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
5542 return null;
5543 } // the user_identifier should be "GA94" to indicate ATSC1 data
5544
5545
5546 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
5547 return null;
5548 } // finally, user_data_type_code should be 0x03 for caption data
5549
5550
5551 if (sei.payload[7] !== 0x03) {
5552 return null;
5553 } // return the user_data_type_structure and strip the trailing
5554 // marker bits
5555
5556
5557 return sei.payload.subarray(8, sei.payload.length - 1);
5558 }; // see CEA-708-D, section 4.4
5559
5560
5561 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
5562 var results = [],
5563 i,
5564 count,
5565 offset,
5566 data; // if this is just filler, return immediately
5567
5568 if (!(userData[0] & 0x40)) {
5569 return results;
5570 } // parse out the cc_data_1 and cc_data_2 fields
5571
5572
5573 count = userData[0] & 0x1f;
5574
5575 for (i = 0; i < count; i++) {
5576 offset = i * 3;
5577 data = {
5578 type: userData[offset + 2] & 0x03,
5579 pts: pts
5580 }; // capture cc data when cc_valid is 1
5581
5582 if (userData[offset + 2] & 0x04) {
5583 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
5584 results.push(data);
5585 }
5586 }
5587
5588 return results;
5589 };
5590
5591 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
5592 var length = data.byteLength,
5593 emulationPreventionBytesPositions = [],
5594 i = 1,
5595 newLength,
5596 newData; // Find all `Emulation Prevention Bytes`
5597
5598 while (i < length - 2) {
5599 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
5600 emulationPreventionBytesPositions.push(i + 2);
5601 i += 2;
5602 } else {
5603 i++;
5604 }
5605 } // If no Emulation Prevention Bytes were found just return the original
5606 // array
5607
5608
5609 if (emulationPreventionBytesPositions.length === 0) {
5610 return data;
5611 } // Create a new array to hold the NAL unit data
5612
5613
5614 newLength = length - emulationPreventionBytesPositions.length;
5615 newData = new Uint8Array(newLength);
5616 var sourceIndex = 0;
5617
5618 for (i = 0; i < newLength; sourceIndex++, i++) {
5619 if (sourceIndex === emulationPreventionBytesPositions[0]) {
5620 // Skip this byte
5621 sourceIndex++; // Remove this position index
5622
5623 emulationPreventionBytesPositions.shift();
5624 }
5625
5626 newData[i] = data[sourceIndex];
5627 }
5628
5629 return newData;
5630 }; // exports
5631
5632
5633 var captionPacketParser = {
5634 parseSei: parseSei,
5635 parseUserData: parseUserData,
5636 parseCaptionPackets: parseCaptionPackets,
5637 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
5638 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
5639 }; // Link To Transport
5640 // -----------------
5641
5642 var CaptionStream$1 = function CaptionStream(options) {
5643 options = options || {};
5644 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
5645
5646 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
5647 this.captionPackets_ = [];
5648 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
5649 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
5650 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
5651 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
5652 ];
5653
5654 if (this.parse708captions_) {
5655 this.cc708Stream_ = new Cea708Stream({
5656 captionServices: options.captionServices
5657 }); // eslint-disable-line no-use-before-define
5658 }
5659
5660 this.reset(); // forward data and done events from CCs to this CaptionStream
5661
5662 this.ccStreams_.forEach(function (cc) {
5663 cc.on('data', this.trigger.bind(this, 'data'));
5664 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
5665 cc.on('done', this.trigger.bind(this, 'done'));
5666 }, this);
5667
5668 if (this.parse708captions_) {
5669 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
5670 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
5671 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
5672 }
5673 };
5674
5675 CaptionStream$1.prototype = new stream();
5676
5677 CaptionStream$1.prototype.push = function (event) {
5678 var sei, userData, newCaptionPackets; // only examine SEI NALs
5679
5680 if (event.nalUnitType !== 'sei_rbsp') {
5681 return;
5682 } // parse the sei
5683
5684
5685 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5686
5687 if (!sei.payload) {
5688 return;
5689 } // ignore everything but user_data_registered_itu_t_t35
5690
5691
5692 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5693 return;
5694 } // parse out the user data payload
5695
5696
5697 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5698
5699 if (!userData) {
5700 return;
5701 } // Sometimes, the same segment # will be downloaded twice. To stop the
5702 // caption data from being processed twice, we track the latest dts we've
5703 // received and ignore everything with a dts before that. However, since
5704 // data for a specific dts can be split across packets on either side of
5705 // a segment boundary, we need to make sure we *don't* ignore the packets
5706 // from the *next* segment that have dts === this.latestDts_. By constantly
5707 // tracking the number of packets received with dts === this.latestDts_, we
5708 // know how many should be ignored once we start receiving duplicates.
5709
5710
5711 if (event.dts < this.latestDts_) {
5712 // We've started getting older data, so set the flag.
5713 this.ignoreNextEqualDts_ = true;
5714 return;
5715 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5716 this.numSameDts_--;
5717
5718 if (!this.numSameDts_) {
5719 // We've received the last duplicate packet, time to start processing again
5720 this.ignoreNextEqualDts_ = false;
5721 }
5722
5723 return;
5724 } // parse out CC data packets and save them for later
5725
5726
5727 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5728 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5729
5730 if (this.latestDts_ !== event.dts) {
5731 this.numSameDts_ = 0;
5732 }
5733
5734 this.numSameDts_++;
5735 this.latestDts_ = event.dts;
5736 };
5737
5738 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
5739 this.ccStreams_.forEach(function (cc) {
5740 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5741 }, this);
5742 };
5743
5744 CaptionStream$1.prototype.flushStream = function (flushType) {
5745 // make sure we actually parsed captions before proceeding
5746 if (!this.captionPackets_.length) {
5747 this.flushCCStreams(flushType);
5748 return;
5749 } // In Chrome, the Array#sort function is not stable so add a
5750 // presortIndex that we can use to ensure we get a stable-sort
5751
5752
5753 this.captionPackets_.forEach(function (elem, idx) {
5754 elem.presortIndex = idx;
5755 }); // sort caption byte-pairs based on their PTS values
5756
5757 this.captionPackets_.sort(function (a, b) {
5758 if (a.pts === b.pts) {
5759 return a.presortIndex - b.presortIndex;
5760 }
5761
5762 return a.pts - b.pts;
5763 });
5764 this.captionPackets_.forEach(function (packet) {
5765 if (packet.type < 2) {
5766 // Dispatch packet to the right Cea608Stream
5767 this.dispatchCea608Packet(packet);
5768 } else {
5769 // Dispatch packet to the Cea708Stream
5770 this.dispatchCea708Packet(packet);
5771 }
5772 }, this);
5773 this.captionPackets_.length = 0;
5774 this.flushCCStreams(flushType);
5775 };
5776
5777 CaptionStream$1.prototype.flush = function () {
5778 return this.flushStream('flush');
5779 }; // Only called if handling partial data
5780
5781
5782 CaptionStream$1.prototype.partialFlush = function () {
5783 return this.flushStream('partialFlush');
5784 };
5785
5786 CaptionStream$1.prototype.reset = function () {
5787 this.latestDts_ = null;
5788 this.ignoreNextEqualDts_ = false;
5789 this.numSameDts_ = 0;
5790 this.activeCea608Channel_ = [null, null];
5791 this.ccStreams_.forEach(function (ccStream) {
5792 ccStream.reset();
5793 });
5794 }; // From the CEA-608 spec:
5795
5796 /*
5797 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5798 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5799 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5800 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5801 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5802 * to switch to captioning or Text.
5803 */
5804 // With that in mind, we ignore any data between an XDS control code and a
5805 // subsequent closed-captioning control code.
5806
5807
5808 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
5809 // NOTE: packet.type is the CEA608 field
5810 if (this.setsTextOrXDSActive(packet)) {
5811 this.activeCea608Channel_[packet.type] = null;
5812 } else if (this.setsChannel1Active(packet)) {
5813 this.activeCea608Channel_[packet.type] = 0;
5814 } else if (this.setsChannel2Active(packet)) {
5815 this.activeCea608Channel_[packet.type] = 1;
5816 }
5817
5818 if (this.activeCea608Channel_[packet.type] === null) {
5819 // If we haven't received anything to set the active channel, or the
5820 // packets are Text/XDS data, discard the data; we don't want jumbled
5821 // captions
5822 return;
5823 }
5824
5825 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5826 };
5827
5828 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
5829 return (packet.ccData & 0x7800) === 0x1000;
5830 };
5831
5832 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
5833 return (packet.ccData & 0x7800) === 0x1800;
5834 };
5835
5836 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
5837 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5838 };
5839
5840 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
5841 if (this.parse708captions_) {
5842 this.cc708Stream_.push(packet);
5843 }
5844 }; // ----------------------
5845 // Session to Application
5846 // ----------------------
5847 // This hash maps special and extended character codes to their
5848 // proper Unicode equivalent. The first one-byte key is just a
5849 // non-standard character code. The two-byte keys that follow are
5850 // the extended CEA708 character codes, along with the preceding
5851 // 0x10 extended character byte to distinguish these codes from
5852 // non-extended character codes. Every CEA708 character code that
5853 // is not in this object maps directly to a standard unicode
5854 // character code.
5855 // The transparent space and non-breaking transparent space are
5856 // technically not fully supported since there is no code to
5857 // make them transparent, so they have normal non-transparent
5858 // stand-ins.
5859 // The special closed caption (CC) character isn't a standard
5860 // unicode character, so a fairly similar unicode character was
5861 // chosen in it's place.
5862
5863
5864 var CHARACTER_TRANSLATION_708 = {
5865 0x7f: 0x266a,
5866 // ♪
5867 0x1020: 0x20,
5868 // Transparent Space
5869 0x1021: 0xa0,
5870 // Nob-breaking Transparent Space
5871 0x1025: 0x2026,
5872 // …
5873 0x102a: 0x0160,
5874 // Š
5875 0x102c: 0x0152,
5876 // Œ
5877 0x1030: 0x2588,
5878 // █
5879 0x1031: 0x2018,
5880 // ‘
5881 0x1032: 0x2019,
5882 // ’
5883 0x1033: 0x201c,
5884 // “
5885 0x1034: 0x201d,
5886 // ”
5887 0x1035: 0x2022,
5888 // •
5889 0x1039: 0x2122,
5890 // ™
5891 0x103a: 0x0161,
5892 // š
5893 0x103c: 0x0153,
5894 // œ
5895 0x103d: 0x2120,
5896 // ℠
5897 0x103f: 0x0178,
5898 // Ÿ
5899 0x1076: 0x215b,
5900 // ⅛
5901 0x1077: 0x215c,
5902 // ⅜
5903 0x1078: 0x215d,
5904 // ⅝
5905 0x1079: 0x215e,
5906 // ⅞
5907 0x107a: 0x23d0,
5908 // ⏐
5909 0x107b: 0x23a4,
5910 // ⎤
5911 0x107c: 0x23a3,
5912 // ⎣
5913 0x107d: 0x23af,
5914 // ⎯
5915 0x107e: 0x23a6,
5916 // ⎦
5917 0x107f: 0x23a1,
5918 // ⎡
5919 0x10a0: 0x3138 // ㄸ (CC char)
5920
5921 };
5922
5923 var get708CharFromCode = function get708CharFromCode(code) {
5924 var newCode = CHARACTER_TRANSLATION_708[code] || code;
5925
5926 if (code & 0x1000 && code === newCode) {
5927 // Invalid extended code
5928 return '';
5929 }
5930
5931 return String.fromCharCode(newCode);
5932 };
5933
5934 var within708TextBlock = function within708TextBlock(b) {
5935 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
5936 };
5937
5938 var Cea708Window = function Cea708Window(windowNum) {
5939 this.windowNum = windowNum;
5940 this.reset();
5941 };
5942
5943 Cea708Window.prototype.reset = function () {
5944 this.clearText();
5945 this.pendingNewLine = false;
5946 this.winAttr = {};
5947 this.penAttr = {};
5948 this.penLoc = {};
5949 this.penColor = {}; // These default values are arbitrary,
5950 // defineWindow will usually override them
5951
5952 this.visible = 0;
5953 this.rowLock = 0;
5954 this.columnLock = 0;
5955 this.priority = 0;
5956 this.relativePositioning = 0;
5957 this.anchorVertical = 0;
5958 this.anchorHorizontal = 0;
5959 this.anchorPoint = 0;
5960 this.rowCount = 1;
5961 this.virtualRowCount = this.rowCount + 1;
5962 this.columnCount = 41;
5963 this.windowStyle = 0;
5964 this.penStyle = 0;
5965 };
5966
5967 Cea708Window.prototype.getText = function () {
5968 return this.rows.join('\n');
5969 };
5970
5971 Cea708Window.prototype.clearText = function () {
5972 this.rows = [''];
5973 this.rowIdx = 0;
5974 };
5975
5976 Cea708Window.prototype.newLine = function (pts) {
5977 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
5978 this.beforeRowOverflow(pts);
5979 }
5980
5981 if (this.rows.length > 0) {
5982 this.rows.push('');
5983 this.rowIdx++;
5984 } // Show all virtual rows since there's no visible scrolling
5985
5986
5987 while (this.rows.length > this.virtualRowCount) {
5988 this.rows.shift();
5989 this.rowIdx--;
5990 }
5991 };
5992
5993 Cea708Window.prototype.isEmpty = function () {
5994 if (this.rows.length === 0) {
5995 return true;
5996 } else if (this.rows.length === 1) {
5997 return this.rows[0] === '';
5998 }
5999
6000 return false;
6001 };
6002
6003 Cea708Window.prototype.addText = function (text) {
6004 this.rows[this.rowIdx] += text;
6005 };
6006
6007 Cea708Window.prototype.backspace = function () {
6008 if (!this.isEmpty()) {
6009 var row = this.rows[this.rowIdx];
6010 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
6011 }
6012 };
6013
6014 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
6015 this.serviceNum = serviceNum;
6016 this.text = '';
6017 this.currentWindow = new Cea708Window(-1);
6018 this.windows = [];
6019 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
6020
6021 if (typeof encoding === 'string') {
6022 this.createTextDecoder(encoding);
6023 }
6024 };
6025 /**
6026 * Initialize service windows
6027 * Must be run before service use
6028 *
6029 * @param {Integer} pts PTS value
6030 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
6031 */
6032
6033
6034 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
6035 this.startPts = pts;
6036
6037 for (var win = 0; win < 8; win++) {
6038 this.windows[win] = new Cea708Window(win);
6039
6040 if (typeof beforeRowOverflow === 'function') {
6041 this.windows[win].beforeRowOverflow = beforeRowOverflow;
6042 }
6043 }
6044 };
6045 /**
6046 * Set current window of service to be affected by commands
6047 *
6048 * @param {Integer} windowNum Window number
6049 */
6050
6051
6052 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
6053 this.currentWindow = this.windows[windowNum];
6054 };
6055 /**
6056 * Try to create a TextDecoder if it is natively supported
6057 */
6058
6059
6060 Cea708Service.prototype.createTextDecoder = function (encoding) {
6061 if (typeof TextDecoder === 'undefined') {
6062 this.stream.trigger('log', {
6063 level: 'warn',
6064 message: 'The `encoding` option is unsupported without TextDecoder support'
6065 });
6066 } else {
6067 try {
6068 this.textDecoder_ = new TextDecoder(encoding);
6069 } catch (error) {
6070 this.stream.trigger('log', {
6071 level: 'warn',
6072 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
6073 });
6074 }
6075 }
6076 };
6077
6078 var Cea708Stream = function Cea708Stream(options) {
6079 options = options || {};
6080 Cea708Stream.prototype.init.call(this);
6081 var self = this;
6082 var captionServices = options.captionServices || {};
6083 var captionServiceEncodings = {};
6084 var serviceProps; // Get service encodings from captionServices option block
6085
6086 Object.keys(captionServices).forEach(function (serviceName) {
6087 serviceProps = captionServices[serviceName];
6088
6089 if (/^SERVICE/.test(serviceName)) {
6090 captionServiceEncodings[serviceName] = serviceProps.encoding;
6091 }
6092 });
6093 this.serviceEncodings = captionServiceEncodings;
6094 this.current708Packet = null;
6095 this.services = {};
6096
6097 this.push = function (packet) {
6098 if (packet.type === 3) {
6099 // 708 packet start
6100 self.new708Packet();
6101 self.add708Bytes(packet);
6102 } else {
6103 if (self.current708Packet === null) {
6104 // This should only happen at the start of a file if there's no packet start.
6105 self.new708Packet();
6106 }
6107
6108 self.add708Bytes(packet);
6109 }
6110 };
6111 };
6112
6113 Cea708Stream.prototype = new stream();
6114 /**
6115 * Push current 708 packet, create new 708 packet.
6116 */
6117
6118 Cea708Stream.prototype.new708Packet = function () {
6119 if (this.current708Packet !== null) {
6120 this.push708Packet();
6121 }
6122
6123 this.current708Packet = {
6124 data: [],
6125 ptsVals: []
6126 };
6127 };
6128 /**
6129 * Add pts and both bytes from packet into current 708 packet.
6130 */
6131
6132
6133 Cea708Stream.prototype.add708Bytes = function (packet) {
6134 var data = packet.ccData;
6135 var byte0 = data >>> 8;
6136 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
6137 // that service blocks will always line up with byte pairs.
6138
6139 this.current708Packet.ptsVals.push(packet.pts);
6140 this.current708Packet.data.push(byte0);
6141 this.current708Packet.data.push(byte1);
6142 };
6143 /**
6144 * Parse completed 708 packet into service blocks and push each service block.
6145 */
6146
6147
6148 Cea708Stream.prototype.push708Packet = function () {
6149 var packet708 = this.current708Packet;
6150 var packetData = packet708.data;
6151 var serviceNum = null;
6152 var blockSize = null;
6153 var i = 0;
6154 var b = packetData[i++];
6155 packet708.seq = b >> 6;
6156 packet708.sizeCode = b & 0x3f; // 0b00111111;
6157
6158 for (; i < packetData.length; i++) {
6159 b = packetData[i++];
6160 serviceNum = b >> 5;
6161 blockSize = b & 0x1f; // 0b00011111
6162
6163 if (serviceNum === 7 && blockSize > 0) {
6164 // Extended service num
6165 b = packetData[i++];
6166 serviceNum = b;
6167 }
6168
6169 this.pushServiceBlock(serviceNum, i, blockSize);
6170
6171 if (blockSize > 0) {
6172 i += blockSize - 1;
6173 }
6174 }
6175 };
6176 /**
6177 * Parse service block, execute commands, read text.
6178 *
6179 * Note: While many of these commands serve important purposes,
6180 * many others just parse out the parameters or attributes, but
6181 * nothing is done with them because this is not a full and complete
6182 * implementation of the entire 708 spec.
6183 *
6184 * @param {Integer} serviceNum Service number
6185 * @param {Integer} start Start index of the 708 packet data
6186 * @param {Integer} size Block size
6187 */
6188
6189
6190 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
6191 var b;
6192 var i = start;
6193 var packetData = this.current708Packet.data;
6194 var service = this.services[serviceNum];
6195
6196 if (!service) {
6197 service = this.initService(serviceNum, i);
6198 }
6199
6200 for (; i < start + size && i < packetData.length; i++) {
6201 b = packetData[i];
6202
6203 if (within708TextBlock(b)) {
6204 i = this.handleText(i, service);
6205 } else if (b === 0x18) {
6206 i = this.multiByteCharacter(i, service);
6207 } else if (b === 0x10) {
6208 i = this.extendedCommands(i, service);
6209 } else if (0x80 <= b && b <= 0x87) {
6210 i = this.setCurrentWindow(i, service);
6211 } else if (0x98 <= b && b <= 0x9f) {
6212 i = this.defineWindow(i, service);
6213 } else if (b === 0x88) {
6214 i = this.clearWindows(i, service);
6215 } else if (b === 0x8c) {
6216 i = this.deleteWindows(i, service);
6217 } else if (b === 0x89) {
6218 i = this.displayWindows(i, service);
6219 } else if (b === 0x8a) {
6220 i = this.hideWindows(i, service);
6221 } else if (b === 0x8b) {
6222 i = this.toggleWindows(i, service);
6223 } else if (b === 0x97) {
6224 i = this.setWindowAttributes(i, service);
6225 } else if (b === 0x90) {
6226 i = this.setPenAttributes(i, service);
6227 } else if (b === 0x91) {
6228 i = this.setPenColor(i, service);
6229 } else if (b === 0x92) {
6230 i = this.setPenLocation(i, service);
6231 } else if (b === 0x8f) {
6232 service = this.reset(i, service);
6233 } else if (b === 0x08) {
6234 // BS: Backspace
6235 service.currentWindow.backspace();
6236 } else if (b === 0x0c) {
6237 // FF: Form feed
6238 service.currentWindow.clearText();
6239 } else if (b === 0x0d) {
6240 // CR: Carriage return
6241 service.currentWindow.pendingNewLine = true;
6242 } else if (b === 0x0e) {
6243 // HCR: Horizontal carriage return
6244 service.currentWindow.clearText();
6245 } else if (b === 0x8d) {
6246 // DLY: Delay, nothing to do
6247 i++;
6248 } else ;
6249 }
6250 };
6251 /**
6252 * Execute an extended command
6253 *
6254 * @param {Integer} i Current index in the 708 packet
6255 * @param {Service} service The service object to be affected
6256 * @return {Integer} New index after parsing
6257 */
6258
6259
6260 Cea708Stream.prototype.extendedCommands = function (i, service) {
6261 var packetData = this.current708Packet.data;
6262 var b = packetData[++i];
6263
6264 if (within708TextBlock(b)) {
6265 i = this.handleText(i, service, {
6266 isExtended: true
6267 });
6268 }
6269
6270 return i;
6271 };
6272 /**
6273 * Get PTS value of a given byte index
6274 *
6275 * @param {Integer} byteIndex Index of the byte
6276 * @return {Integer} PTS
6277 */
6278
6279
6280 Cea708Stream.prototype.getPts = function (byteIndex) {
6281 // There's 1 pts value per 2 bytes
6282 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
6283 };
6284 /**
6285 * Initializes a service
6286 *
6287 * @param {Integer} serviceNum Service number
6288 * @return {Service} Initialized service object
6289 */
6290
6291
6292 Cea708Stream.prototype.initService = function (serviceNum, i) {
6293 var serviceName = 'SERVICE' + serviceNum;
6294 var self = this;
6295 var serviceName;
6296 var encoding;
6297
6298 if (serviceName in this.serviceEncodings) {
6299 encoding = this.serviceEncodings[serviceName];
6300 }
6301
6302 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
6303 this.services[serviceNum].init(this.getPts(i), function (pts) {
6304 self.flushDisplayed(pts, self.services[serviceNum]);
6305 });
6306 return this.services[serviceNum];
6307 };
6308 /**
6309 * Execute text writing to current window
6310 *
6311 * @param {Integer} i Current index in the 708 packet
6312 * @param {Service} service The service object to be affected
6313 * @return {Integer} New index after parsing
6314 */
6315
6316
6317 Cea708Stream.prototype.handleText = function (i, service, options) {
6318 var isExtended = options && options.isExtended;
6319 var isMultiByte = options && options.isMultiByte;
6320 var packetData = this.current708Packet.data;
6321 var extended = isExtended ? 0x1000 : 0x0000;
6322 var currentByte = packetData[i];
6323 var nextByte = packetData[i + 1];
6324 var win = service.currentWindow;
6325 var char;
6326 var charCodeArray; // Use the TextDecoder if one was created for this service
6327
6328 if (service.textDecoder_ && !isExtended) {
6329 if (isMultiByte) {
6330 charCodeArray = [currentByte, nextByte];
6331 i++;
6332 } else {
6333 charCodeArray = [currentByte];
6334 }
6335
6336 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
6337 } else {
6338 char = get708CharFromCode(extended | currentByte);
6339 }
6340
6341 if (win.pendingNewLine && !win.isEmpty()) {
6342 win.newLine(this.getPts(i));
6343 }
6344
6345 win.pendingNewLine = false;
6346 win.addText(char);
6347 return i;
6348 };
6349 /**
6350 * Handle decoding of multibyte character
6351 *
6352 * @param {Integer} i Current index in the 708 packet
6353 * @param {Service} service The service object to be affected
6354 * @return {Integer} New index after parsing
6355 */
6356
6357
6358 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
6359 var packetData = this.current708Packet.data;
6360 var firstByte = packetData[i + 1];
6361 var secondByte = packetData[i + 2];
6362
6363 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
6364 i = this.handleText(++i, service, {
6365 isMultiByte: true
6366 });
6367 }
6368
6369 return i;
6370 };
6371 /**
6372 * Parse and execute the CW# command.
6373 *
6374 * Set the current window.
6375 *
6376 * @param {Integer} i Current index in the 708 packet
6377 * @param {Service} service The service object to be affected
6378 * @return {Integer} New index after parsing
6379 */
6380
6381
6382 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
6383 var packetData = this.current708Packet.data;
6384 var b = packetData[i];
6385 var windowNum = b & 0x07;
6386 service.setCurrentWindow(windowNum);
6387 return i;
6388 };
6389 /**
6390 * Parse and execute the DF# command.
6391 *
6392 * Define a window and set it as the current window.
6393 *
6394 * @param {Integer} i Current index in the 708 packet
6395 * @param {Service} service The service object to be affected
6396 * @return {Integer} New index after parsing
6397 */
6398
6399
6400 Cea708Stream.prototype.defineWindow = function (i, service) {
6401 var packetData = this.current708Packet.data;
6402 var b = packetData[i];
6403 var windowNum = b & 0x07;
6404 service.setCurrentWindow(windowNum);
6405 var win = service.currentWindow;
6406 b = packetData[++i];
6407 win.visible = (b & 0x20) >> 5; // v
6408
6409 win.rowLock = (b & 0x10) >> 4; // rl
6410
6411 win.columnLock = (b & 0x08) >> 3; // cl
6412
6413 win.priority = b & 0x07; // p
6414
6415 b = packetData[++i];
6416 win.relativePositioning = (b & 0x80) >> 7; // rp
6417
6418 win.anchorVertical = b & 0x7f; // av
6419
6420 b = packetData[++i];
6421 win.anchorHorizontal = b; // ah
6422
6423 b = packetData[++i];
6424 win.anchorPoint = (b & 0xf0) >> 4; // ap
6425
6426 win.rowCount = b & 0x0f; // rc
6427
6428 b = packetData[++i];
6429 win.columnCount = b & 0x3f; // cc
6430
6431 b = packetData[++i];
6432 win.windowStyle = (b & 0x38) >> 3; // ws
6433
6434 win.penStyle = b & 0x07; // ps
6435 // The spec says there are (rowCount+1) "virtual rows"
6436
6437 win.virtualRowCount = win.rowCount + 1;
6438 return i;
6439 };
6440 /**
6441 * Parse and execute the SWA command.
6442 *
6443 * Set attributes of the current window.
6444 *
6445 * @param {Integer} i Current index in the 708 packet
6446 * @param {Service} service The service object to be affected
6447 * @return {Integer} New index after parsing
6448 */
6449
6450
6451 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
6452 var packetData = this.current708Packet.data;
6453 var b = packetData[i];
6454 var winAttr = service.currentWindow.winAttr;
6455 b = packetData[++i];
6456 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
6457
6458 winAttr.fillRed = (b & 0x30) >> 4; // fr
6459
6460 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
6461
6462 winAttr.fillBlue = b & 0x03; // fb
6463
6464 b = packetData[++i];
6465 winAttr.borderType = (b & 0xc0) >> 6; // bt
6466
6467 winAttr.borderRed = (b & 0x30) >> 4; // br
6468
6469 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
6470
6471 winAttr.borderBlue = b & 0x03; // bb
6472
6473 b = packetData[++i];
6474 winAttr.borderType += (b & 0x80) >> 5; // bt
6475
6476 winAttr.wordWrap = (b & 0x40) >> 6; // ww
6477
6478 winAttr.printDirection = (b & 0x30) >> 4; // pd
6479
6480 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
6481
6482 winAttr.justify = b & 0x03; // j
6483
6484 b = packetData[++i];
6485 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
6486
6487 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
6488
6489 winAttr.displayEffect = b & 0x03; // de
6490
6491 return i;
6492 };
6493 /**
6494 * Gather text from all displayed windows and push a caption to output.
6495 *
6496 * @param {Integer} i Current index in the 708 packet
6497 * @param {Service} service The service object to be affected
6498 */
6499
6500
6501 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
6502 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
6503 // display text in the correct order, but sample files so far have not shown any issue.
6504
6505 for (var winId = 0; winId < 8; winId++) {
6506 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
6507 displayedText.push(service.windows[winId].getText());
6508 }
6509 }
6510
6511 service.endPts = pts;
6512 service.text = displayedText.join('\n\n');
6513 this.pushCaption(service);
6514 service.startPts = pts;
6515 };
6516 /**
6517 * Push a caption to output if the caption contains text.
6518 *
6519 * @param {Service} service The service object to be affected
6520 */
6521
6522
6523 Cea708Stream.prototype.pushCaption = function (service) {
6524 if (service.text !== '') {
6525 this.trigger('data', {
6526 startPts: service.startPts,
6527 endPts: service.endPts,
6528 text: service.text,
6529 stream: 'cc708_' + service.serviceNum
6530 });
6531 service.text = '';
6532 service.startPts = service.endPts;
6533 }
6534 };
6535 /**
6536 * Parse and execute the DSW command.
6537 *
6538 * Set visible property of windows based on the parsed bitmask.
6539 *
6540 * @param {Integer} i Current index in the 708 packet
6541 * @param {Service} service The service object to be affected
6542 * @return {Integer} New index after parsing
6543 */
6544
6545
6546 Cea708Stream.prototype.displayWindows = function (i, service) {
6547 var packetData = this.current708Packet.data;
6548 var b = packetData[++i];
6549 var pts = this.getPts(i);
6550 this.flushDisplayed(pts, service);
6551
6552 for (var winId = 0; winId < 8; winId++) {
6553 if (b & 0x01 << winId) {
6554 service.windows[winId].visible = 1;
6555 }
6556 }
6557
6558 return i;
6559 };
6560 /**
6561 * Parse and execute the HDW command.
6562 *
6563 * Set visible property of windows based on the parsed bitmask.
6564 *
6565 * @param {Integer} i Current index in the 708 packet
6566 * @param {Service} service The service object to be affected
6567 * @return {Integer} New index after parsing
6568 */
6569
6570
6571 Cea708Stream.prototype.hideWindows = function (i, service) {
6572 var packetData = this.current708Packet.data;
6573 var b = packetData[++i];
6574 var pts = this.getPts(i);
6575 this.flushDisplayed(pts, service);
6576
6577 for (var winId = 0; winId < 8; winId++) {
6578 if (b & 0x01 << winId) {
6579 service.windows[winId].visible = 0;
6580 }
6581 }
6582
6583 return i;
6584 };
6585 /**
6586 * Parse and execute the TGW command.
6587 *
6588 * Set visible property of windows based on the parsed bitmask.
6589 *
6590 * @param {Integer} i Current index in the 708 packet
6591 * @param {Service} service The service object to be affected
6592 * @return {Integer} New index after parsing
6593 */
6594
6595
6596 Cea708Stream.prototype.toggleWindows = function (i, service) {
6597 var packetData = this.current708Packet.data;
6598 var b = packetData[++i];
6599 var pts = this.getPts(i);
6600 this.flushDisplayed(pts, service);
6601
6602 for (var winId = 0; winId < 8; winId++) {
6603 if (b & 0x01 << winId) {
6604 service.windows[winId].visible ^= 1;
6605 }
6606 }
6607
6608 return i;
6609 };
6610 /**
6611 * Parse and execute the CLW command.
6612 *
6613 * Clear text of windows based on the parsed bitmask.
6614 *
6615 * @param {Integer} i Current index in the 708 packet
6616 * @param {Service} service The service object to be affected
6617 * @return {Integer} New index after parsing
6618 */
6619
6620
6621 Cea708Stream.prototype.clearWindows = function (i, service) {
6622 var packetData = this.current708Packet.data;
6623 var b = packetData[++i];
6624 var pts = this.getPts(i);
6625 this.flushDisplayed(pts, service);
6626
6627 for (var winId = 0; winId < 8; winId++) {
6628 if (b & 0x01 << winId) {
6629 service.windows[winId].clearText();
6630 }
6631 }
6632
6633 return i;
6634 };
6635 /**
6636 * Parse and execute the DLW command.
6637 *
6638 * Re-initialize windows based on the parsed bitmask.
6639 *
6640 * @param {Integer} i Current index in the 708 packet
6641 * @param {Service} service The service object to be affected
6642 * @return {Integer} New index after parsing
6643 */
6644
6645
6646 Cea708Stream.prototype.deleteWindows = function (i, service) {
6647 var packetData = this.current708Packet.data;
6648 var b = packetData[++i];
6649 var pts = this.getPts(i);
6650 this.flushDisplayed(pts, service);
6651
6652 for (var winId = 0; winId < 8; winId++) {
6653 if (b & 0x01 << winId) {
6654 service.windows[winId].reset();
6655 }
6656 }
6657
6658 return i;
6659 };
6660 /**
6661 * Parse and execute the SPA command.
6662 *
6663 * Set pen attributes of the current window.
6664 *
6665 * @param {Integer} i Current index in the 708 packet
6666 * @param {Service} service The service object to be affected
6667 * @return {Integer} New index after parsing
6668 */
6669
6670
6671 Cea708Stream.prototype.setPenAttributes = function (i, service) {
6672 var packetData = this.current708Packet.data;
6673 var b = packetData[i];
6674 var penAttr = service.currentWindow.penAttr;
6675 b = packetData[++i];
6676 penAttr.textTag = (b & 0xf0) >> 4; // tt
6677
6678 penAttr.offset = (b & 0x0c) >> 2; // o
6679
6680 penAttr.penSize = b & 0x03; // s
6681
6682 b = packetData[++i];
6683 penAttr.italics = (b & 0x80) >> 7; // i
6684
6685 penAttr.underline = (b & 0x40) >> 6; // u
6686
6687 penAttr.edgeType = (b & 0x38) >> 3; // et
6688
6689 penAttr.fontStyle = b & 0x07; // fs
6690
6691 return i;
6692 };
6693 /**
6694 * Parse and execute the SPC command.
6695 *
6696 * Set pen color of the current window.
6697 *
6698 * @param {Integer} i Current index in the 708 packet
6699 * @param {Service} service The service object to be affected
6700 * @return {Integer} New index after parsing
6701 */
6702
6703
6704 Cea708Stream.prototype.setPenColor = function (i, service) {
6705 var packetData = this.current708Packet.data;
6706 var b = packetData[i];
6707 var penColor = service.currentWindow.penColor;
6708 b = packetData[++i];
6709 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
6710
6711 penColor.fgRed = (b & 0x30) >> 4; // fr
6712
6713 penColor.fgGreen = (b & 0x0c) >> 2; // fg
6714
6715 penColor.fgBlue = b & 0x03; // fb
6716
6717 b = packetData[++i];
6718 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
6719
6720 penColor.bgRed = (b & 0x30) >> 4; // br
6721
6722 penColor.bgGreen = (b & 0x0c) >> 2; // bg
6723
6724 penColor.bgBlue = b & 0x03; // bb
6725
6726 b = packetData[++i];
6727 penColor.edgeRed = (b & 0x30) >> 4; // er
6728
6729 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
6730
6731 penColor.edgeBlue = b & 0x03; // eb
6732
6733 return i;
6734 };
6735 /**
6736 * Parse and execute the SPL command.
6737 *
6738 * Set pen location of the current window.
6739 *
6740 * @param {Integer} i Current index in the 708 packet
6741 * @param {Service} service The service object to be affected
6742 * @return {Integer} New index after parsing
6743 */
6744
6745
6746 Cea708Stream.prototype.setPenLocation = function (i, service) {
6747 var packetData = this.current708Packet.data;
6748 var b = packetData[i];
6749 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
6750
6751 service.currentWindow.pendingNewLine = true;
6752 b = packetData[++i];
6753 penLoc.row = b & 0x0f; // r
6754
6755 b = packetData[++i];
6756 penLoc.column = b & 0x3f; // c
6757
6758 return i;
6759 };
6760 /**
6761 * Execute the RST command.
6762 *
6763 * Reset service to a clean slate. Re-initialize.
6764 *
6765 * @param {Integer} i Current index in the 708 packet
6766 * @param {Service} service The service object to be affected
6767 * @return {Service} Re-initialized service
6768 */
6769
6770
6771 Cea708Stream.prototype.reset = function (i, service) {
6772 var pts = this.getPts(i);
6773 this.flushDisplayed(pts, service);
6774 return this.initService(service.serviceNum, i);
6775 }; // This hash maps non-ASCII, special, and extended character codes to their
6776 // proper Unicode equivalent. The first keys that are only a single byte
6777 // are the non-standard ASCII characters, which simply map the CEA608 byte
6778 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6779 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6780 // can be performed regardless of the field and data channel on which the
6781 // character code was received.
6782
6783
6784 var CHARACTER_TRANSLATION = {
6785 0x2a: 0xe1,
6786 // á
6787 0x5c: 0xe9,
6788 // é
6789 0x5e: 0xed,
6790 // í
6791 0x5f: 0xf3,
6792 // ó
6793 0x60: 0xfa,
6794 // ú
6795 0x7b: 0xe7,
6796 // ç
6797 0x7c: 0xf7,
6798 // ÷
6799 0x7d: 0xd1,
6800 // Ñ
6801 0x7e: 0xf1,
6802 // ñ
6803 0x7f: 0x2588,
6804 // █
6805 0x0130: 0xae,
6806 // ®
6807 0x0131: 0xb0,
6808 // °
6809 0x0132: 0xbd,
6810 // ½
6811 0x0133: 0xbf,
6812 // ¿
6813 0x0134: 0x2122,
6814 // ™
6815 0x0135: 0xa2,
6816 // ¢
6817 0x0136: 0xa3,
6818 // £
6819 0x0137: 0x266a,
6820 // ♪
6821 0x0138: 0xe0,
6822 // à
6823 0x0139: 0xa0,
6824 //
6825 0x013a: 0xe8,
6826 // è
6827 0x013b: 0xe2,
6828 // â
6829 0x013c: 0xea,
6830 // ê
6831 0x013d: 0xee,
6832 // î
6833 0x013e: 0xf4,
6834 // ô
6835 0x013f: 0xfb,
6836 // û
6837 0x0220: 0xc1,
6838 // Á
6839 0x0221: 0xc9,
6840 // É
6841 0x0222: 0xd3,
6842 // Ó
6843 0x0223: 0xda,
6844 // Ú
6845 0x0224: 0xdc,
6846 // Ü
6847 0x0225: 0xfc,
6848 // ü
6849 0x0226: 0x2018,
6850 // ‘
6851 0x0227: 0xa1,
6852 // ¡
6853 0x0228: 0x2a,
6854 // *
6855 0x0229: 0x27,
6856 // '
6857 0x022a: 0x2014,
6858 // —
6859 0x022b: 0xa9,
6860 // ©
6861 0x022c: 0x2120,
6862 // ℠
6863 0x022d: 0x2022,
6864 // •
6865 0x022e: 0x201c,
6866 // “
6867 0x022f: 0x201d,
6868 // ”
6869 0x0230: 0xc0,
6870 // À
6871 0x0231: 0xc2,
6872 // Â
6873 0x0232: 0xc7,
6874 // Ç
6875 0x0233: 0xc8,
6876 // È
6877 0x0234: 0xca,
6878 // Ê
6879 0x0235: 0xcb,
6880 // Ë
6881 0x0236: 0xeb,
6882 // ë
6883 0x0237: 0xce,
6884 // Î
6885 0x0238: 0xcf,
6886 // Ï
6887 0x0239: 0xef,
6888 // ï
6889 0x023a: 0xd4,
6890 // Ô
6891 0x023b: 0xd9,
6892 // Ù
6893 0x023c: 0xf9,
6894 // ù
6895 0x023d: 0xdb,
6896 // Û
6897 0x023e: 0xab,
6898 // «
6899 0x023f: 0xbb,
6900 // »
6901 0x0320: 0xc3,
6902 // Ã
6903 0x0321: 0xe3,
6904 // ã
6905 0x0322: 0xcd,
6906 // Í
6907 0x0323: 0xcc,
6908 // Ì
6909 0x0324: 0xec,
6910 // ì
6911 0x0325: 0xd2,
6912 // Ò
6913 0x0326: 0xf2,
6914 // ò
6915 0x0327: 0xd5,
6916 // Õ
6917 0x0328: 0xf5,
6918 // õ
6919 0x0329: 0x7b,
6920 // {
6921 0x032a: 0x7d,
6922 // }
6923 0x032b: 0x5c,
6924 // \
6925 0x032c: 0x5e,
6926 // ^
6927 0x032d: 0x5f,
6928 // _
6929 0x032e: 0x7c,
6930 // |
6931 0x032f: 0x7e,
6932 // ~
6933 0x0330: 0xc4,
6934 // Ä
6935 0x0331: 0xe4,
6936 // ä
6937 0x0332: 0xd6,
6938 // Ö
6939 0x0333: 0xf6,
6940 // ö
6941 0x0334: 0xdf,
6942 // ß
6943 0x0335: 0xa5,
6944 // ¥
6945 0x0336: 0xa4,
6946 // ¤
6947 0x0337: 0x2502,
6948 // │
6949 0x0338: 0xc5,
6950 // Å
6951 0x0339: 0xe5,
6952 // å
6953 0x033a: 0xd8,
6954 // Ø
6955 0x033b: 0xf8,
6956 // ø
6957 0x033c: 0x250c,
6958 // ┌
6959 0x033d: 0x2510,
6960 // ┐
6961 0x033e: 0x2514,
6962 // └
6963 0x033f: 0x2518 // ┘
6964
6965 };
6966
6967 var getCharFromCode = function getCharFromCode(code) {
6968 if (code === null) {
6969 return '';
6970 }
6971
6972 code = CHARACTER_TRANSLATION[code] || code;
6973 return String.fromCharCode(code);
6974 }; // the index of the last row in a CEA-608 display buffer
6975
6976
6977 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
6978 // getting it through bit logic.
6979
6980 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
6981 // cells. The "bottom" row is the last element in the outer array.
6982
6983 var createDisplayBuffer = function createDisplayBuffer() {
6984 var result = [],
6985 i = BOTTOM_ROW + 1;
6986
6987 while (i--) {
6988 result.push('');
6989 }
6990
6991 return result;
6992 };
6993
6994 var Cea608Stream = function Cea608Stream(field, dataChannel) {
6995 Cea608Stream.prototype.init.call(this);
6996 this.field_ = field || 0;
6997 this.dataChannel_ = dataChannel || 0;
6998 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
6999 this.setConstants();
7000 this.reset();
7001
7002 this.push = function (packet) {
7003 var data, swap, char0, char1, text; // remove the parity bits
7004
7005 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
7006
7007 if (data === this.lastControlCode_) {
7008 this.lastControlCode_ = null;
7009 return;
7010 } // Store control codes
7011
7012
7013 if ((data & 0xf000) === 0x1000) {
7014 this.lastControlCode_ = data;
7015 } else if (data !== this.PADDING_) {
7016 this.lastControlCode_ = null;
7017 }
7018
7019 char0 = data >>> 8;
7020 char1 = data & 0xff;
7021
7022 if (data === this.PADDING_) {
7023 return;
7024 } else if (data === this.RESUME_CAPTION_LOADING_) {
7025 this.mode_ = 'popOn';
7026 } else if (data === this.END_OF_CAPTION_) {
7027 // If an EOC is received while in paint-on mode, the displayed caption
7028 // text should be swapped to non-displayed memory as if it was a pop-on
7029 // caption. Because of that, we should explicitly switch back to pop-on
7030 // mode
7031 this.mode_ = 'popOn';
7032 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
7033
7034 this.flushDisplayed(packet.pts); // flip memory
7035
7036 swap = this.displayed_;
7037 this.displayed_ = this.nonDisplayed_;
7038 this.nonDisplayed_ = swap; // start measuring the time to display the caption
7039
7040 this.startPts_ = packet.pts;
7041 } else if (data === this.ROLL_UP_2_ROWS_) {
7042 this.rollUpRows_ = 2;
7043 this.setRollUp(packet.pts);
7044 } else if (data === this.ROLL_UP_3_ROWS_) {
7045 this.rollUpRows_ = 3;
7046 this.setRollUp(packet.pts);
7047 } else if (data === this.ROLL_UP_4_ROWS_) {
7048 this.rollUpRows_ = 4;
7049 this.setRollUp(packet.pts);
7050 } else if (data === this.CARRIAGE_RETURN_) {
7051 this.clearFormatting(packet.pts);
7052 this.flushDisplayed(packet.pts);
7053 this.shiftRowsUp_();
7054 this.startPts_ = packet.pts;
7055 } else if (data === this.BACKSPACE_) {
7056 if (this.mode_ === 'popOn') {
7057 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7058 } else {
7059 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7060 }
7061 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
7062 this.flushDisplayed(packet.pts);
7063 this.displayed_ = createDisplayBuffer();
7064 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
7065 this.nonDisplayed_ = createDisplayBuffer();
7066 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
7067 if (this.mode_ !== 'paintOn') {
7068 // NOTE: This should be removed when proper caption positioning is
7069 // implemented
7070 this.flushDisplayed(packet.pts);
7071 this.displayed_ = createDisplayBuffer();
7072 }
7073
7074 this.mode_ = 'paintOn';
7075 this.startPts_ = packet.pts; // Append special characters to caption text
7076 } else if (this.isSpecialCharacter(char0, char1)) {
7077 // Bitmask char0 so that we can apply character transformations
7078 // regardless of field and data channel.
7079 // Then byte-shift to the left and OR with char1 so we can pass the
7080 // entire character code to `getCharFromCode`.
7081 char0 = (char0 & 0x03) << 8;
7082 text = getCharFromCode(char0 | char1);
7083 this[this.mode_](packet.pts, text);
7084 this.column_++; // Append extended characters to caption text
7085 } else if (this.isExtCharacter(char0, char1)) {
7086 // Extended characters always follow their "non-extended" equivalents.
7087 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
7088 // decoders are supposed to drop the "è", while compliant decoders
7089 // backspace the "e" and insert "è".
7090 // Delete the previous character
7091 if (this.mode_ === 'popOn') {
7092 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7093 } else {
7094 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7095 } // Bitmask char0 so that we can apply character transformations
7096 // regardless of field and data channel.
7097 // Then byte-shift to the left and OR with char1 so we can pass the
7098 // entire character code to `getCharFromCode`.
7099
7100
7101 char0 = (char0 & 0x03) << 8;
7102 text = getCharFromCode(char0 | char1);
7103 this[this.mode_](packet.pts, text);
7104 this.column_++; // Process mid-row codes
7105 } else if (this.isMidRowCode(char0, char1)) {
7106 // Attributes are not additive, so clear all formatting
7107 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
7108 // should be replaced with spaces, so add one now
7109
7110 this[this.mode_](packet.pts, ' ');
7111 this.column_++;
7112
7113 if ((char1 & 0xe) === 0xe) {
7114 this.addFormatting(packet.pts, ['i']);
7115 }
7116
7117 if ((char1 & 0x1) === 0x1) {
7118 this.addFormatting(packet.pts, ['u']);
7119 } // Detect offset control codes and adjust cursor
7120
7121 } else if (this.isOffsetControlCode(char0, char1)) {
7122 // Cursor position is set by indent PAC (see below) in 4-column
7123 // increments, with an additional offset code of 1-3 to reach any
7124 // of the 32 columns specified by CEA-608. So all we need to do
7125 // here is increment the column cursor by the given offset.
7126 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
7127 } else if (this.isPAC(char0, char1)) {
7128 // There's no logic for PAC -> row mapping, so we have to just
7129 // find the row code in an array and use its index :(
7130 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
7131
7132 if (this.mode_ === 'rollUp') {
7133 // This implies that the base row is incorrectly set.
7134 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
7135 // of roll-up rows set.
7136 if (row - this.rollUpRows_ + 1 < 0) {
7137 row = this.rollUpRows_ - 1;
7138 }
7139
7140 this.setRollUp(packet.pts, row);
7141 }
7142
7143 if (row !== this.row_) {
7144 // formatting is only persistent for current row
7145 this.clearFormatting(packet.pts);
7146 this.row_ = row;
7147 } // All PACs can apply underline, so detect and apply
7148 // (All odd-numbered second bytes set underline)
7149
7150
7151 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
7152 this.addFormatting(packet.pts, ['u']);
7153 }
7154
7155 if ((data & 0x10) === 0x10) {
7156 // We've got an indent level code. Each successive even number
7157 // increments the column cursor by 4, so we can get the desired
7158 // column position by bit-shifting to the right (to get n/2)
7159 // and multiplying by 4.
7160 this.column_ = ((data & 0xe) >> 1) * 4;
7161 }
7162
7163 if (this.isColorPAC(char1)) {
7164 // it's a color code, though we only support white, which
7165 // can be either normal or italicized. white italics can be
7166 // either 0x4e or 0x6e depending on the row, so we just
7167 // bitwise-and with 0xe to see if italics should be turned on
7168 if ((char1 & 0xe) === 0xe) {
7169 this.addFormatting(packet.pts, ['i']);
7170 }
7171 } // We have a normal character in char0, and possibly one in char1
7172
7173 } else if (this.isNormalChar(char0)) {
7174 if (char1 === 0x00) {
7175 char1 = null;
7176 }
7177
7178 text = getCharFromCode(char0);
7179 text += getCharFromCode(char1);
7180 this[this.mode_](packet.pts, text);
7181 this.column_ += text.length;
7182 } // finish data processing
7183
7184 };
7185 };
7186
7187 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
7188 // display buffer
7189
7190 Cea608Stream.prototype.flushDisplayed = function (pts) {
7191 var content = this.displayed_ // remove spaces from the start and end of the string
7192 .map(function (row, index) {
7193 try {
7194 return row.trim();
7195 } catch (e) {
7196 // Ordinarily, this shouldn't happen. However, caption
7197 // parsing errors should not throw exceptions and
7198 // break playback.
7199 this.trigger('log', {
7200 level: 'warn',
7201 message: 'Skipping a malformed 608 caption at index ' + index + '.'
7202 });
7203 return '';
7204 }
7205 }, this) // combine all text rows to display in one cue
7206 .join('\n') // and remove blank rows from the start and end, but not the middle
7207 .replace(/^\n+|\n+$/g, '');
7208
7209 if (content.length) {
7210 this.trigger('data', {
7211 startPts: this.startPts_,
7212 endPts: pts,
7213 text: content,
7214 stream: this.name_
7215 });
7216 }
7217 };
7218 /**
7219 * Zero out the data, used for startup and on seek
7220 */
7221
7222
7223 Cea608Stream.prototype.reset = function () {
7224 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
7225 // actually display captions. If a caption is shifted to a row
7226 // with a lower index than this, it is cleared from the display
7227 // buffer
7228
7229 this.topRow_ = 0;
7230 this.startPts_ = 0;
7231 this.displayed_ = createDisplayBuffer();
7232 this.nonDisplayed_ = createDisplayBuffer();
7233 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
7234
7235 this.column_ = 0;
7236 this.row_ = BOTTOM_ROW;
7237 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
7238
7239 this.formatting_ = [];
7240 };
7241 /**
7242 * Sets up control code and related constants for this instance
7243 */
7244
7245
7246 Cea608Stream.prototype.setConstants = function () {
7247 // The following attributes have these uses:
7248 // ext_ : char0 for mid-row codes, and the base for extended
7249 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
7250 // extended codes)
7251 // control_: char0 for control codes, except byte-shifted to the
7252 // left so that we can do this.control_ | CONTROL_CODE
7253 // offset_: char0 for tab offset codes
7254 //
7255 // It's also worth noting that control codes, and _only_ control codes,
7256 // differ between field 1 and field2. Field 2 control codes are always
7257 // their field 1 value plus 1. That's why there's the "| field" on the
7258 // control value.
7259 if (this.dataChannel_ === 0) {
7260 this.BASE_ = 0x10;
7261 this.EXT_ = 0x11;
7262 this.CONTROL_ = (0x14 | this.field_) << 8;
7263 this.OFFSET_ = 0x17;
7264 } else if (this.dataChannel_ === 1) {
7265 this.BASE_ = 0x18;
7266 this.EXT_ = 0x19;
7267 this.CONTROL_ = (0x1c | this.field_) << 8;
7268 this.OFFSET_ = 0x1f;
7269 } // Constants for the LSByte command codes recognized by Cea608Stream. This
7270 // list is not exhaustive. For a more comprehensive listing and semantics see
7271 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
7272 // Padding
7273
7274
7275 this.PADDING_ = 0x0000; // Pop-on Mode
7276
7277 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
7278 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
7279
7280 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
7281 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
7282 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
7283 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
7284
7285 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
7286
7287 this.BACKSPACE_ = this.CONTROL_ | 0x21;
7288 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
7289 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
7290 };
7291 /**
7292 * Detects if the 2-byte packet data is a special character
7293 *
7294 * Special characters have a second byte in the range 0x30 to 0x3f,
7295 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
7296 * data channel 2).
7297 *
7298 * @param {Integer} char0 The first byte
7299 * @param {Integer} char1 The second byte
7300 * @return {Boolean} Whether the 2 bytes are an special character
7301 */
7302
7303
7304 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
7305 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
7306 };
7307 /**
7308 * Detects if the 2-byte packet data is an extended character
7309 *
7310 * Extended characters have a second byte in the range 0x20 to 0x3f,
7311 * with the first byte being 0x12 or 0x13 (for data channel 1) or
7312 * 0x1a or 0x1b (for data channel 2).
7313 *
7314 * @param {Integer} char0 The first byte
7315 * @param {Integer} char1 The second byte
7316 * @return {Boolean} Whether the 2 bytes are an extended character
7317 */
7318
7319
7320 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
7321 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
7322 };
7323 /**
7324 * Detects if the 2-byte packet is a mid-row code
7325 *
7326 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
7327 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
7328 * channel 2).
7329 *
7330 * @param {Integer} char0 The first byte
7331 * @param {Integer} char1 The second byte
7332 * @return {Boolean} Whether the 2 bytes are a mid-row code
7333 */
7334
7335
7336 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
7337 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
7338 };
7339 /**
7340 * Detects if the 2-byte packet is an offset control code
7341 *
7342 * Offset control codes have a second byte in the range 0x21 to 0x23,
7343 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
7344 * data channel 2).
7345 *
7346 * @param {Integer} char0 The first byte
7347 * @param {Integer} char1 The second byte
7348 * @return {Boolean} Whether the 2 bytes are an offset control code
7349 */
7350
7351
7352 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
7353 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
7354 };
7355 /**
7356 * Detects if the 2-byte packet is a Preamble Address Code
7357 *
7358 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
7359 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
7360 * range 0x40 to 0x7f.
7361 *
7362 * @param {Integer} char0 The first byte
7363 * @param {Integer} char1 The second byte
7364 * @return {Boolean} Whether the 2 bytes are a PAC
7365 */
7366
7367
7368 Cea608Stream.prototype.isPAC = function (char0, char1) {
7369 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
7370 };
7371 /**
7372 * Detects if a packet's second byte is in the range of a PAC color code
7373 *
7374 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
7375 * 0x60 to 0x6f.
7376 *
7377 * @param {Integer} char1 The second byte
7378 * @return {Boolean} Whether the byte is a color PAC
7379 */
7380
7381
7382 Cea608Stream.prototype.isColorPAC = function (char1) {
7383 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
7384 };
7385 /**
7386 * Detects if a single byte is in the range of a normal character
7387 *
7388 * Normal text bytes are in the range 0x20 to 0x7f.
7389 *
7390 * @param {Integer} char The byte
7391 * @return {Boolean} Whether the byte is a normal character
7392 */
7393
7394
7395 Cea608Stream.prototype.isNormalChar = function (char) {
7396 return char >= 0x20 && char <= 0x7f;
7397 };
7398 /**
7399 * Configures roll-up
7400 *
7401 * @param {Integer} pts Current PTS
7402 * @param {Integer} newBaseRow Used by PACs to slide the current window to
7403 * a new position
7404 */
7405
7406
7407 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
7408 // Reset the base row to the bottom row when switching modes
7409 if (this.mode_ !== 'rollUp') {
7410 this.row_ = BOTTOM_ROW;
7411 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
7412
7413 this.flushDisplayed(pts);
7414 this.nonDisplayed_ = createDisplayBuffer();
7415 this.displayed_ = createDisplayBuffer();
7416 }
7417
7418 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
7419 // move currently displayed captions (up or down) to the new base row
7420 for (var i = 0; i < this.rollUpRows_; i++) {
7421 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
7422 this.displayed_[this.row_ - i] = '';
7423 }
7424 }
7425
7426 if (newBaseRow === undefined) {
7427 newBaseRow = this.row_;
7428 }
7429
7430 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
7431 }; // Adds the opening HTML tag for the passed character to the caption text,
7432 // and keeps track of it for later closing
7433
7434
7435 Cea608Stream.prototype.addFormatting = function (pts, format) {
7436 this.formatting_ = this.formatting_.concat(format);
7437 var text = format.reduce(function (text, format) {
7438 return text + '<' + format + '>';
7439 }, '');
7440 this[this.mode_](pts, text);
7441 }; // Adds HTML closing tags for current formatting to caption text and
7442 // clears remembered formatting
7443
7444
7445 Cea608Stream.prototype.clearFormatting = function (pts) {
7446 if (!this.formatting_.length) {
7447 return;
7448 }
7449
7450 var text = this.formatting_.reverse().reduce(function (text, format) {
7451 return text + '</' + format + '>';
7452 }, '');
7453 this.formatting_ = [];
7454 this[this.mode_](pts, text);
7455 }; // Mode Implementations
7456
7457
7458 Cea608Stream.prototype.popOn = function (pts, text) {
7459 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
7460
7461 baseRow += text;
7462 this.nonDisplayed_[this.row_] = baseRow;
7463 };
7464
7465 Cea608Stream.prototype.rollUp = function (pts, text) {
7466 var baseRow = this.displayed_[this.row_];
7467 baseRow += text;
7468 this.displayed_[this.row_] = baseRow;
7469 };
7470
7471 Cea608Stream.prototype.shiftRowsUp_ = function () {
7472 var i; // clear out inactive rows
7473
7474 for (i = 0; i < this.topRow_; i++) {
7475 this.displayed_[i] = '';
7476 }
7477
7478 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7479 this.displayed_[i] = '';
7480 } // shift displayed rows up
7481
7482
7483 for (i = this.topRow_; i < this.row_; i++) {
7484 this.displayed_[i] = this.displayed_[i + 1];
7485 } // clear out the bottom row
7486
7487
7488 this.displayed_[this.row_] = '';
7489 };
7490
7491 Cea608Stream.prototype.paintOn = function (pts, text) {
7492 var baseRow = this.displayed_[this.row_];
7493 baseRow += text;
7494 this.displayed_[this.row_] = baseRow;
7495 }; // exports
7496
7497
7498 var captionStream = {
7499 CaptionStream: CaptionStream$1,
7500 Cea608Stream: Cea608Stream,
7501 Cea708Stream: Cea708Stream
7502 };
7503 /**
7504 * mux.js
7505 *
7506 * Copyright (c) Brightcove
7507 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7508 */
7509
7510 var streamTypes = {
7511 H264_STREAM_TYPE: 0x1B,
7512 ADTS_STREAM_TYPE: 0x0F,
7513 METADATA_STREAM_TYPE: 0x15
7514 };
7515 var MAX_TS = 8589934592;
7516 var RO_THRESH = 4294967296;
7517 var TYPE_SHARED = 'shared';
7518
7519 var handleRollover$1 = function handleRollover(value, reference) {
7520 var direction = 1;
7521
7522 if (value > reference) {
7523 // If the current timestamp value is greater than our reference timestamp and we detect a
7524 // timestamp rollover, this means the roll over is happening in the opposite direction.
7525 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7526 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7527 // rollover point. In loading this segment, the timestamp values will be very large,
7528 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7529 // the time stamp to be `value - 2^33`.
7530 direction = -1;
7531 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7532 // cause an incorrect adjustment.
7533
7534
7535 while (Math.abs(reference - value) > RO_THRESH) {
7536 value += direction * MAX_TS;
7537 }
7538
7539 return value;
7540 };
7541
7542 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
7543 var lastDTS, referenceDTS;
7544 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
7545 // video and audio. We could use `undefined` here, but having a string
7546 // makes debugging a little clearer.
7547
7548 this.type_ = type || TYPE_SHARED;
7549
7550 this.push = function (data) {
7551 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7552 // streams will only accept data that matches their type.
7553 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7554 return;
7555 }
7556
7557 if (referenceDTS === undefined) {
7558 referenceDTS = data.dts;
7559 }
7560
7561 data.dts = handleRollover$1(data.dts, referenceDTS);
7562 data.pts = handleRollover$1(data.pts, referenceDTS);
7563 lastDTS = data.dts;
7564 this.trigger('data', data);
7565 };
7566
7567 this.flush = function () {
7568 referenceDTS = lastDTS;
7569 this.trigger('done');
7570 };
7571
7572 this.endTimeline = function () {
7573 this.flush();
7574 this.trigger('endedtimeline');
7575 };
7576
7577 this.discontinuity = function () {
7578 referenceDTS = void 0;
7579 lastDTS = void 0;
7580 };
7581
7582 this.reset = function () {
7583 this.discontinuity();
7584 this.trigger('reset');
7585 };
7586 };
7587
7588 TimestampRolloverStream$1.prototype = new stream();
7589 var timestampRolloverStream = {
7590 TimestampRolloverStream: TimestampRolloverStream$1,
7591 handleRollover: handleRollover$1
7592 };
7593
7594 var percentEncode$1 = function percentEncode(bytes, start, end) {
7595 var i,
7596 result = '';
7597
7598 for (i = start; i < end; i++) {
7599 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7600 }
7601
7602 return result;
7603 },
7604 // return the string representation of the specified byte range,
7605 // interpreted as UTf-8.
7606 parseUtf8 = function parseUtf8(bytes, start, end) {
7607 return decodeURIComponent(percentEncode$1(bytes, start, end));
7608 },
7609 // return the string representation of the specified byte range,
7610 // interpreted as ISO-8859-1.
7611 parseIso88591$1 = function parseIso88591(bytes, start, end) {
7612 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
7613 },
7614 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
7615 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7616 },
7617 tagParsers = {
7618 TXXX: function TXXX(tag) {
7619 var i;
7620
7621 if (tag.data[0] !== 3) {
7622 // ignore frames with unrecognized character encodings
7623 return;
7624 }
7625
7626 for (i = 1; i < tag.data.length; i++) {
7627 if (tag.data[i] === 0) {
7628 // parse the text fields
7629 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
7630
7631 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7632 break;
7633 }
7634 }
7635
7636 tag.data = tag.value;
7637 },
7638 WXXX: function WXXX(tag) {
7639 var i;
7640
7641 if (tag.data[0] !== 3) {
7642 // ignore frames with unrecognized character encodings
7643 return;
7644 }
7645
7646 for (i = 1; i < tag.data.length; i++) {
7647 if (tag.data[i] === 0) {
7648 // parse the description and URL fields
7649 tag.description = parseUtf8(tag.data, 1, i);
7650 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7651 break;
7652 }
7653 }
7654 },
7655 PRIV: function PRIV(tag) {
7656 var i;
7657
7658 for (i = 0; i < tag.data.length; i++) {
7659 if (tag.data[i] === 0) {
7660 // parse the description and URL fields
7661 tag.owner = parseIso88591$1(tag.data, 0, i);
7662 break;
7663 }
7664 }
7665
7666 tag.privateData = tag.data.subarray(i + 1);
7667 tag.data = tag.privateData;
7668 }
7669 },
7670 _MetadataStream;
7671
7672 _MetadataStream = function MetadataStream(options) {
7673 var settings = {
7674 // the bytes of the program-level descriptor field in MP2T
7675 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7676 // program element descriptors"
7677 descriptor: options && options.descriptor
7678 },
7679 // the total size in bytes of the ID3 tag being parsed
7680 tagSize = 0,
7681 // tag data that is not complete enough to be parsed
7682 buffer = [],
7683 // the total number of bytes currently in the buffer
7684 bufferSize = 0,
7685 i;
7686
7687 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
7688 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7689
7690
7691 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7692
7693 if (settings.descriptor) {
7694 for (i = 0; i < settings.descriptor.length; i++) {
7695 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7696 }
7697 }
7698
7699 this.push = function (chunk) {
7700 var tag, frameStart, frameSize, frame, i, frameHeader;
7701
7702 if (chunk.type !== 'timed-metadata') {
7703 return;
7704 } // if data_alignment_indicator is set in the PES header,
7705 // we must have the start of a new ID3 tag. Assume anything
7706 // remaining in the buffer was malformed and throw it out
7707
7708
7709 if (chunk.dataAlignmentIndicator) {
7710 bufferSize = 0;
7711 buffer.length = 0;
7712 } // ignore events that don't look like ID3 data
7713
7714
7715 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7716 this.trigger('log', {
7717 level: 'warn',
7718 message: 'Skipping unrecognized metadata packet'
7719 });
7720 return;
7721 } // add this chunk to the data we've collected so far
7722
7723
7724 buffer.push(chunk);
7725 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
7726
7727 if (buffer.length === 1) {
7728 // the frame size is transmitted as a 28-bit integer in the
7729 // last four bytes of the ID3 header.
7730 // The most significant bit of each byte is dropped and the
7731 // results concatenated to recover the actual value.
7732 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
7733 // convenient for our comparisons to include it
7734
7735 tagSize += 10;
7736 } // if the entire frame has not arrived, wait for more data
7737
7738
7739 if (bufferSize < tagSize) {
7740 return;
7741 } // collect the entire frame so it can be parsed
7742
7743
7744 tag = {
7745 data: new Uint8Array(tagSize),
7746 frames: [],
7747 pts: buffer[0].pts,
7748 dts: buffer[0].dts
7749 };
7750
7751 for (i = 0; i < tagSize;) {
7752 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7753 i += buffer[0].data.byteLength;
7754 bufferSize -= buffer[0].data.byteLength;
7755 buffer.shift();
7756 } // find the start of the first frame and the end of the tag
7757
7758
7759 frameStart = 10;
7760
7761 if (tag.data[5] & 0x40) {
7762 // advance the frame start past the extended header
7763 frameStart += 4; // header size field
7764
7765 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
7766
7767 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
7768 } // parse one or more ID3 frames
7769 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7770
7771
7772 do {
7773 // determine the number of bytes in this frame
7774 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
7775
7776 if (frameSize < 1) {
7777 this.trigger('log', {
7778 level: 'warn',
7779 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
7780 });
7781 return;
7782 }
7783
7784 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7785 frame = {
7786 id: frameHeader,
7787 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7788 };
7789 frame.key = frame.id;
7790
7791 if (tagParsers[frame.id]) {
7792 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7793 // time for raw AAC data
7794
7795 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7796 var d = frame.data,
7797 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7798 size *= 4;
7799 size += d[7] & 0x03;
7800 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7801 // on the value of this frame
7802 // we couldn't have known the appropriate pts and dts before
7803 // parsing this ID3 tag so set those values now
7804
7805 if (tag.pts === undefined && tag.dts === undefined) {
7806 tag.pts = frame.timeStamp;
7807 tag.dts = frame.timeStamp;
7808 }
7809
7810 this.trigger('timestamp', frame);
7811 }
7812 }
7813
7814 tag.frames.push(frame);
7815 frameStart += 10; // advance past the frame header
7816
7817 frameStart += frameSize; // advance past the frame body
7818 } while (frameStart < tagSize);
7819
7820 this.trigger('data', tag);
7821 };
7822 };
7823
7824 _MetadataStream.prototype = new stream();
7825 var metadataStream = _MetadataStream;
7826 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
7827
7828 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7829
7830
7831 var MP2T_PACKET_LENGTH$1 = 188,
7832 // bytes
7833 SYNC_BYTE$1 = 0x47;
7834 /**
7835 * Splits an incoming stream of binary data into MPEG-2 Transport
7836 * Stream packets.
7837 */
7838
7839 _TransportPacketStream = function TransportPacketStream() {
7840 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
7841 bytesInBuffer = 0;
7842
7843 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7844
7845 /**
7846 * Split a stream of data into M2TS packets
7847 **/
7848
7849
7850 this.push = function (bytes) {
7851 var startIndex = 0,
7852 endIndex = MP2T_PACKET_LENGTH$1,
7853 everything; // If there are bytes remaining from the last segment, prepend them to the
7854 // bytes that were pushed in
7855
7856 if (bytesInBuffer) {
7857 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7858 everything.set(buffer.subarray(0, bytesInBuffer));
7859 everything.set(bytes, bytesInBuffer);
7860 bytesInBuffer = 0;
7861 } else {
7862 everything = bytes;
7863 } // While we have enough data for a packet
7864
7865
7866 while (endIndex < everything.byteLength) {
7867 // Look for a pair of start and end sync bytes in the data..
7868 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
7869 // We found a packet so emit it and jump one whole packet forward in
7870 // the stream
7871 this.trigger('data', everything.subarray(startIndex, endIndex));
7872 startIndex += MP2T_PACKET_LENGTH$1;
7873 endIndex += MP2T_PACKET_LENGTH$1;
7874 continue;
7875 } // If we get here, we have somehow become de-synchronized and we need to step
7876 // forward one byte at a time until we find a pair of sync bytes that denote
7877 // a packet
7878
7879
7880 startIndex++;
7881 endIndex++;
7882 } // If there was some data left over at the end of the segment that couldn't
7883 // possibly be a whole packet, keep it because it might be the start of a packet
7884 // that continues in the next segment
7885
7886
7887 if (startIndex < everything.byteLength) {
7888 buffer.set(everything.subarray(startIndex), 0);
7889 bytesInBuffer = everything.byteLength - startIndex;
7890 }
7891 };
7892 /**
7893 * Passes identified M2TS packets to the TransportParseStream to be parsed
7894 **/
7895
7896
7897 this.flush = function () {
7898 // If the buffer contains a whole packet when we are being flushed, emit it
7899 // and empty the buffer. Otherwise hold onto the data because it may be
7900 // important for decoding the next segment
7901 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
7902 this.trigger('data', buffer);
7903 bytesInBuffer = 0;
7904 }
7905
7906 this.trigger('done');
7907 };
7908
7909 this.endTimeline = function () {
7910 this.flush();
7911 this.trigger('endedtimeline');
7912 };
7913
7914 this.reset = function () {
7915 bytesInBuffer = 0;
7916 this.trigger('reset');
7917 };
7918 };
7919
7920 _TransportPacketStream.prototype = new stream();
7921 /**
7922 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7923 * forms of the individual transport stream packets.
7924 */
7925
7926 _TransportParseStream = function TransportParseStream() {
7927 var parsePsi, parsePat, parsePmt, self;
7928
7929 _TransportParseStream.prototype.init.call(this);
7930
7931 self = this;
7932 this.packetsWaitingForPmt = [];
7933 this.programMapTable = undefined;
7934
7935 parsePsi = function parsePsi(payload, psi) {
7936 var offset = 0; // PSI packets may be split into multiple sections and those
7937 // sections may be split into multiple packets. If a PSI
7938 // section starts in this packet, the payload_unit_start_indicator
7939 // will be true and the first byte of the payload will indicate
7940 // the offset from the current position to the start of the
7941 // section.
7942
7943 if (psi.payloadUnitStartIndicator) {
7944 offset += payload[offset] + 1;
7945 }
7946
7947 if (psi.type === 'pat') {
7948 parsePat(payload.subarray(offset), psi);
7949 } else {
7950 parsePmt(payload.subarray(offset), psi);
7951 }
7952 };
7953
7954 parsePat = function parsePat(payload, pat) {
7955 pat.section_number = payload[7]; // eslint-disable-line camelcase
7956
7957 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
7958 // skip the PSI header and parse the first PMT entry
7959
7960 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
7961 pat.pmtPid = self.pmtPid;
7962 };
7963 /**
7964 * Parse out the relevant fields of a Program Map Table (PMT).
7965 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
7966 * packet. The first byte in this array should be the table_id
7967 * field.
7968 * @param pmt {object} the object that should be decorated with
7969 * fields parsed from the PMT.
7970 */
7971
7972
7973 parsePmt = function parsePmt(payload, pmt) {
7974 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
7975 // take effect. We don't believe this should ever be the case
7976 // for HLS but we'll ignore "forward" PMT declarations if we see
7977 // them. Future PMT declarations have the current_next_indicator
7978 // set to zero.
7979
7980 if (!(payload[5] & 0x01)) {
7981 return;
7982 } // overwrite any existing program map table
7983
7984
7985 self.programMapTable = {
7986 video: null,
7987 audio: null,
7988 'timed-metadata': {}
7989 }; // the mapping table ends at the end of the current section
7990
7991 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
7992 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
7993 // long the program info descriptors are
7994
7995 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
7996
7997 offset = 12 + programInfoLength;
7998
7999 while (offset < tableEnd) {
8000 var streamType = payload[offset];
8001 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
8002 // TODO: should this be done for metadata too? for now maintain behavior of
8003 // multiple metadata streams
8004
8005 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
8006 self.programMapTable.video = pid;
8007 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
8008 self.programMapTable.audio = pid;
8009 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
8010 // map pid to stream type for metadata streams
8011 self.programMapTable['timed-metadata'][pid] = streamType;
8012 } // move to the next table entry
8013 // skip past the elementary stream descriptors, if present
8014
8015
8016 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
8017 } // record the map on the packet as well
8018
8019
8020 pmt.programMapTable = self.programMapTable;
8021 };
8022 /**
8023 * Deliver a new MP2T packet to the next stream in the pipeline.
8024 */
8025
8026
8027 this.push = function (packet) {
8028 var result = {},
8029 offset = 4;
8030 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
8031
8032 result.pid = packet[1] & 0x1f;
8033 result.pid <<= 8;
8034 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
8035 // fifth byte of the TS packet header. The adaptation field is
8036 // used to add stuffing to PES packets that don't fill a complete
8037 // TS packet, and to specify some forms of timing and control data
8038 // that we do not currently use.
8039
8040 if ((packet[3] & 0x30) >>> 4 > 0x01) {
8041 offset += packet[offset] + 1;
8042 } // parse the rest of the packet based on the type
8043
8044
8045 if (result.pid === 0) {
8046 result.type = 'pat';
8047 parsePsi(packet.subarray(offset), result);
8048 this.trigger('data', result);
8049 } else if (result.pid === this.pmtPid) {
8050 result.type = 'pmt';
8051 parsePsi(packet.subarray(offset), result);
8052 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
8053
8054 while (this.packetsWaitingForPmt.length) {
8055 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
8056 }
8057 } else if (this.programMapTable === undefined) {
8058 // When we have not seen a PMT yet, defer further processing of
8059 // PES packets until one has been parsed
8060 this.packetsWaitingForPmt.push([packet, offset, result]);
8061 } else {
8062 this.processPes_(packet, offset, result);
8063 }
8064 };
8065
8066 this.processPes_ = function (packet, offset, result) {
8067 // set the appropriate stream type
8068 if (result.pid === this.programMapTable.video) {
8069 result.streamType = streamTypes.H264_STREAM_TYPE;
8070 } else if (result.pid === this.programMapTable.audio) {
8071 result.streamType = streamTypes.ADTS_STREAM_TYPE;
8072 } else {
8073 // if not video or audio, it is timed-metadata or unknown
8074 // if unknown, streamType will be undefined
8075 result.streamType = this.programMapTable['timed-metadata'][result.pid];
8076 }
8077
8078 result.type = 'pes';
8079 result.data = packet.subarray(offset);
8080 this.trigger('data', result);
8081 };
8082 };
8083
8084 _TransportParseStream.prototype = new stream();
8085 _TransportParseStream.STREAM_TYPES = {
8086 h264: 0x1b,
8087 adts: 0x0f
8088 };
8089 /**
8090 * Reconsistutes program elementary stream (PES) packets from parsed
8091 * transport stream packets. That is, if you pipe an
8092 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
8093 * events will be events which capture the bytes for individual PES
8094 * packets plus relevant metadata that has been extracted from the
8095 * container.
8096 */
8097
8098 _ElementaryStream = function ElementaryStream() {
8099 var self = this,
8100 segmentHadPmt = false,
8101 // PES packet fragments
8102 video = {
8103 data: [],
8104 size: 0
8105 },
8106 audio = {
8107 data: [],
8108 size: 0
8109 },
8110 timedMetadata = {
8111 data: [],
8112 size: 0
8113 },
8114 programMapTable,
8115 parsePes = function parsePes(payload, pes) {
8116 var ptsDtsFlags;
8117 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
8118
8119 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
8120 // that are frame data that is continuing from the previous fragment. This
8121 // is to check that the pes data is the start of a new pes payload
8122
8123 if (startPrefix !== 1) {
8124 return;
8125 } // get the packet length, this will be 0 for video
8126
8127
8128 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
8129
8130 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
8131 // and a DTS value. Determine what combination of values is
8132 // available to work with.
8133
8134 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
8135 // performs all bitwise operations on 32-bit integers but javascript
8136 // supports a much greater range (52-bits) of integer using standard
8137 // mathematical operations.
8138 // We construct a 31-bit value using bitwise operators over the 31
8139 // most significant bits and then multiply by 4 (equal to a left-shift
8140 // of 2) before we add the final 2 least significant bits of the
8141 // timestamp (equal to an OR.)
8142
8143 if (ptsDtsFlags & 0xC0) {
8144 // the PTS and DTS are not written out directly. For information
8145 // on how they are encoded, see
8146 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
8147 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
8148 pes.pts *= 4; // Left shift by 2
8149
8150 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
8151
8152 pes.dts = pes.pts;
8153
8154 if (ptsDtsFlags & 0x40) {
8155 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
8156 pes.dts *= 4; // Left shift by 2
8157
8158 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
8159 }
8160 } // the data section starts immediately after the PES header.
8161 // pes_header_data_length specifies the number of header bytes
8162 // that follow the last byte of the field.
8163
8164
8165 pes.data = payload.subarray(9 + payload[8]);
8166 },
8167
8168 /**
8169 * Pass completely parsed PES packets to the next stream in the pipeline
8170 **/
8171 flushStream = function flushStream(stream, type, forceFlush) {
8172 var packetData = new Uint8Array(stream.size),
8173 event = {
8174 type: type
8175 },
8176 i = 0,
8177 offset = 0,
8178 packetFlushable = false,
8179 fragment; // do nothing if there is not enough buffered data for a complete
8180 // PES header
8181
8182 if (!stream.data.length || stream.size < 9) {
8183 return;
8184 }
8185
8186 event.trackId = stream.data[0].pid; // reassemble the packet
8187
8188 for (i = 0; i < stream.data.length; i++) {
8189 fragment = stream.data[i];
8190 packetData.set(fragment.data, offset);
8191 offset += fragment.data.byteLength;
8192 } // parse assembled packet's PES header
8193
8194
8195 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
8196 // check that there is enough stream data to fill the packet
8197
8198 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
8199
8200 if (forceFlush || packetFlushable) {
8201 stream.size = 0;
8202 stream.data.length = 0;
8203 } // only emit packets that are complete. this is to avoid assembling
8204 // incomplete PES packets due to poor segmentation
8205
8206
8207 if (packetFlushable) {
8208 self.trigger('data', event);
8209 }
8210 };
8211
8212 _ElementaryStream.prototype.init.call(this);
8213 /**
8214 * Identifies M2TS packet types and parses PES packets using metadata
8215 * parsed from the PMT
8216 **/
8217
8218
8219 this.push = function (data) {
8220 ({
8221 pat: function pat() {// we have to wait for the PMT to arrive as well before we
8222 // have any meaningful metadata
8223 },
8224 pes: function pes() {
8225 var stream, streamType;
8226
8227 switch (data.streamType) {
8228 case streamTypes.H264_STREAM_TYPE:
8229 stream = video;
8230 streamType = 'video';
8231 break;
8232
8233 case streamTypes.ADTS_STREAM_TYPE:
8234 stream = audio;
8235 streamType = 'audio';
8236 break;
8237
8238 case streamTypes.METADATA_STREAM_TYPE:
8239 stream = timedMetadata;
8240 streamType = 'timed-metadata';
8241 break;
8242
8243 default:
8244 // ignore unknown stream types
8245 return;
8246 } // if a new packet is starting, we can flush the completed
8247 // packet
8248
8249
8250 if (data.payloadUnitStartIndicator) {
8251 flushStream(stream, streamType, true);
8252 } // buffer this fragment until we are sure we've received the
8253 // complete payload
8254
8255
8256 stream.data.push(data);
8257 stream.size += data.data.byteLength;
8258 },
8259 pmt: function pmt() {
8260 var event = {
8261 type: 'metadata',
8262 tracks: []
8263 };
8264 programMapTable = data.programMapTable; // translate audio and video streams to tracks
8265
8266 if (programMapTable.video !== null) {
8267 event.tracks.push({
8268 timelineStartInfo: {
8269 baseMediaDecodeTime: 0
8270 },
8271 id: +programMapTable.video,
8272 codec: 'avc',
8273 type: 'video'
8274 });
8275 }
8276
8277 if (programMapTable.audio !== null) {
8278 event.tracks.push({
8279 timelineStartInfo: {
8280 baseMediaDecodeTime: 0
8281 },
8282 id: +programMapTable.audio,
8283 codec: 'adts',
8284 type: 'audio'
8285 });
8286 }
8287
8288 segmentHadPmt = true;
8289 self.trigger('data', event);
8290 }
8291 })[data.type]();
8292 };
8293
8294 this.reset = function () {
8295 video.size = 0;
8296 video.data.length = 0;
8297 audio.size = 0;
8298 audio.data.length = 0;
8299 this.trigger('reset');
8300 };
8301 /**
8302 * Flush any remaining input. Video PES packets may be of variable
8303 * length. Normally, the start of a new video packet can trigger the
8304 * finalization of the previous packet. That is not possible if no
8305 * more video is forthcoming, however. In that case, some other
8306 * mechanism (like the end of the file) has to be employed. When it is
8307 * clear that no additional data is forthcoming, calling this method
8308 * will flush the buffered packets.
8309 */
8310
8311
8312 this.flushStreams_ = function () {
8313 // !!THIS ORDER IS IMPORTANT!!
8314 // video first then audio
8315 flushStream(video, 'video');
8316 flushStream(audio, 'audio');
8317 flushStream(timedMetadata, 'timed-metadata');
8318 };
8319
8320 this.flush = function () {
8321 // if on flush we haven't had a pmt emitted
8322 // and we have a pmt to emit. emit the pmt
8323 // so that we trigger a trackinfo downstream.
8324 if (!segmentHadPmt && programMapTable) {
8325 var pmt = {
8326 type: 'metadata',
8327 tracks: []
8328 }; // translate audio and video streams to tracks
8329
8330 if (programMapTable.video !== null) {
8331 pmt.tracks.push({
8332 timelineStartInfo: {
8333 baseMediaDecodeTime: 0
8334 },
8335 id: +programMapTable.video,
8336 codec: 'avc',
8337 type: 'video'
8338 });
8339 }
8340
8341 if (programMapTable.audio !== null) {
8342 pmt.tracks.push({
8343 timelineStartInfo: {
8344 baseMediaDecodeTime: 0
8345 },
8346 id: +programMapTable.audio,
8347 codec: 'adts',
8348 type: 'audio'
8349 });
8350 }
8351
8352 self.trigger('data', pmt);
8353 }
8354
8355 segmentHadPmt = false;
8356 this.flushStreams_();
8357 this.trigger('done');
8358 };
8359 };
8360
8361 _ElementaryStream.prototype = new stream();
8362 var m2ts = {
8363 PAT_PID: 0x0000,
8364 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
8365 TransportPacketStream: _TransportPacketStream,
8366 TransportParseStream: _TransportParseStream,
8367 ElementaryStream: _ElementaryStream,
8368 TimestampRolloverStream: TimestampRolloverStream,
8369 CaptionStream: captionStream.CaptionStream,
8370 Cea608Stream: captionStream.Cea608Stream,
8371 Cea708Stream: captionStream.Cea708Stream,
8372 MetadataStream: metadataStream
8373 };
8374
8375 for (var type in streamTypes) {
8376 if (streamTypes.hasOwnProperty(type)) {
8377 m2ts[type] = streamTypes[type];
8378 }
8379 }
8380
8381 var m2ts_1 = m2ts;
8382 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
8383
8384 var _AdtsStream;
8385
8386 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8387 /*
8388 * Accepts a ElementaryStream and emits data events with parsed
8389 * AAC Audio Frames of the individual packets. Input audio in ADTS
8390 * format is unpacked and re-emitted as AAC frames.
8391 *
8392 * @see http://wiki.multimedia.cx/index.php?title=ADTS
8393 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
8394 */
8395
8396 _AdtsStream = function AdtsStream(handlePartialSegments) {
8397 var buffer,
8398 frameNum = 0;
8399
8400 _AdtsStream.prototype.init.call(this);
8401
8402 this.skipWarn_ = function (start, end) {
8403 this.trigger('log', {
8404 level: 'warn',
8405 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
8406 });
8407 };
8408
8409 this.push = function (packet) {
8410 var i = 0,
8411 frameLength,
8412 protectionSkipBytes,
8413 oldBuffer,
8414 sampleCount,
8415 adtsFrameDuration;
8416
8417 if (!handlePartialSegments) {
8418 frameNum = 0;
8419 }
8420
8421 if (packet.type !== 'audio') {
8422 // ignore non-audio data
8423 return;
8424 } // Prepend any data in the buffer to the input data so that we can parse
8425 // aac frames the cross a PES packet boundary
8426
8427
8428 if (buffer && buffer.length) {
8429 oldBuffer = buffer;
8430 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
8431 buffer.set(oldBuffer);
8432 buffer.set(packet.data, oldBuffer.byteLength);
8433 } else {
8434 buffer = packet.data;
8435 } // unpack any ADTS frames which have been fully received
8436 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
8437
8438
8439 var skip; // We use i + 7 here because we want to be able to parse the entire header.
8440 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
8441
8442 while (i + 7 < buffer.length) {
8443 // Look for the start of an ADTS header..
8444 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
8445 if (typeof skip !== 'number') {
8446 skip = i;
8447 } // If a valid header was not found, jump one forward and attempt to
8448 // find a valid ADTS header starting at the next byte
8449
8450
8451 i++;
8452 continue;
8453 }
8454
8455 if (typeof skip === 'number') {
8456 this.skipWarn_(skip, i);
8457 skip = null;
8458 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
8459 // end of the ADTS header
8460
8461
8462 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
8463 // end of the sync sequence
8464 // NOTE: frame length includes the size of the header
8465
8466 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
8467 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
8468 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
8469 // then we have to wait for more data
8470
8471 if (buffer.byteLength - i < frameLength) {
8472 break;
8473 } // Otherwise, deliver the complete AAC frame
8474
8475
8476 this.trigger('data', {
8477 pts: packet.pts + frameNum * adtsFrameDuration,
8478 dts: packet.dts + frameNum * adtsFrameDuration,
8479 sampleCount: sampleCount,
8480 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
8481 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
8482 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
8483 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
8484 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
8485 samplesize: 16,
8486 // data is the frame without it's header
8487 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
8488 });
8489 frameNum++;
8490 i += frameLength;
8491 }
8492
8493 if (typeof skip === 'number') {
8494 this.skipWarn_(skip, i);
8495 skip = null;
8496 } // remove processed bytes from the buffer.
8497
8498
8499 buffer = buffer.subarray(i);
8500 };
8501
8502 this.flush = function () {
8503 frameNum = 0;
8504 this.trigger('done');
8505 };
8506
8507 this.reset = function () {
8508 buffer = void 0;
8509 this.trigger('reset');
8510 };
8511
8512 this.endTimeline = function () {
8513 buffer = void 0;
8514 this.trigger('endedtimeline');
8515 };
8516 };
8517
8518 _AdtsStream.prototype = new stream();
8519 var adts = _AdtsStream;
8520 /**
8521 * mux.js
8522 *
8523 * Copyright (c) Brightcove
8524 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8525 */
8526
8527 var ExpGolomb;
8528 /**
8529 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
8530 * scheme used by h264.
8531 */
8532
8533 ExpGolomb = function ExpGolomb(workingData) {
8534 var // the number of bytes left to examine in workingData
8535 workingBytesAvailable = workingData.byteLength,
8536 // the current word being examined
8537 workingWord = 0,
8538 // :uint
8539 // the number of bits left to examine in the current word
8540 workingBitsAvailable = 0; // :uint;
8541 // ():uint
8542
8543 this.length = function () {
8544 return 8 * workingBytesAvailable;
8545 }; // ():uint
8546
8547
8548 this.bitsAvailable = function () {
8549 return 8 * workingBytesAvailable + workingBitsAvailable;
8550 }; // ():void
8551
8552
8553 this.loadWord = function () {
8554 var position = workingData.byteLength - workingBytesAvailable,
8555 workingBytes = new Uint8Array(4),
8556 availableBytes = Math.min(4, workingBytesAvailable);
8557
8558 if (availableBytes === 0) {
8559 throw new Error('no bytes available');
8560 }
8561
8562 workingBytes.set(workingData.subarray(position, position + availableBytes));
8563 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
8564
8565 workingBitsAvailable = availableBytes * 8;
8566 workingBytesAvailable -= availableBytes;
8567 }; // (count:int):void
8568
8569
8570 this.skipBits = function (count) {
8571 var skipBytes; // :int
8572
8573 if (workingBitsAvailable > count) {
8574 workingWord <<= count;
8575 workingBitsAvailable -= count;
8576 } else {
8577 count -= workingBitsAvailable;
8578 skipBytes = Math.floor(count / 8);
8579 count -= skipBytes * 8;
8580 workingBytesAvailable -= skipBytes;
8581 this.loadWord();
8582 workingWord <<= count;
8583 workingBitsAvailable -= count;
8584 }
8585 }; // (size:int):uint
8586
8587
8588 this.readBits = function (size) {
8589 var bits = Math.min(workingBitsAvailable, size),
8590 // :uint
8591 valu = workingWord >>> 32 - bits; // :uint
8592 // if size > 31, handle error
8593
8594 workingBitsAvailable -= bits;
8595
8596 if (workingBitsAvailable > 0) {
8597 workingWord <<= bits;
8598 } else if (workingBytesAvailable > 0) {
8599 this.loadWord();
8600 }
8601
8602 bits = size - bits;
8603
8604 if (bits > 0) {
8605 return valu << bits | this.readBits(bits);
8606 }
8607
8608 return valu;
8609 }; // ():uint
8610
8611
8612 this.skipLeadingZeros = function () {
8613 var leadingZeroCount; // :uint
8614
8615 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8616 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8617 // the first bit of working word is 1
8618 workingWord <<= leadingZeroCount;
8619 workingBitsAvailable -= leadingZeroCount;
8620 return leadingZeroCount;
8621 }
8622 } // we exhausted workingWord and still have not found a 1
8623
8624
8625 this.loadWord();
8626 return leadingZeroCount + this.skipLeadingZeros();
8627 }; // ():void
8628
8629
8630 this.skipUnsignedExpGolomb = function () {
8631 this.skipBits(1 + this.skipLeadingZeros());
8632 }; // ():void
8633
8634
8635 this.skipExpGolomb = function () {
8636 this.skipBits(1 + this.skipLeadingZeros());
8637 }; // ():uint
8638
8639
8640 this.readUnsignedExpGolomb = function () {
8641 var clz = this.skipLeadingZeros(); // :uint
8642
8643 return this.readBits(clz + 1) - 1;
8644 }; // ():int
8645
8646
8647 this.readExpGolomb = function () {
8648 var valu = this.readUnsignedExpGolomb(); // :int
8649
8650 if (0x01 & valu) {
8651 // the number is odd if the low order bit is set
8652 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8653 }
8654
8655 return -1 * (valu >>> 1); // divide by two then make it negative
8656 }; // Some convenience functions
8657 // :Boolean
8658
8659
8660 this.readBoolean = function () {
8661 return this.readBits(1) === 1;
8662 }; // ():int
8663
8664
8665 this.readUnsignedByte = function () {
8666 return this.readBits(8);
8667 };
8668
8669 this.loadWord();
8670 };
8671
8672 var expGolomb = ExpGolomb;
8673
8674 var _H264Stream, _NalByteStream;
8675
8676 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8677 /**
8678 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8679 */
8680
8681 _NalByteStream = function NalByteStream() {
8682 var syncPoint = 0,
8683 i,
8684 buffer;
8685
8686 _NalByteStream.prototype.init.call(this);
8687 /*
8688 * Scans a byte stream and triggers a data event with the NAL units found.
8689 * @param {Object} data Event received from H264Stream
8690 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8691 *
8692 * @see H264Stream.push
8693 */
8694
8695
8696 this.push = function (data) {
8697 var swapBuffer;
8698
8699 if (!buffer) {
8700 buffer = data.data;
8701 } else {
8702 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8703 swapBuffer.set(buffer);
8704 swapBuffer.set(data.data, buffer.byteLength);
8705 buffer = swapBuffer;
8706 }
8707
8708 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
8709 // scan for NAL unit boundaries
8710 // a match looks like this:
8711 // 0 0 1 .. NAL .. 0 0 1
8712 // ^ sync point ^ i
8713 // or this:
8714 // 0 0 1 .. NAL .. 0 0 0
8715 // ^ sync point ^ i
8716 // advance the sync point to a NAL start, if necessary
8717
8718 for (; syncPoint < len - 3; syncPoint++) {
8719 if (buffer[syncPoint + 2] === 1) {
8720 // the sync point is properly aligned
8721 i = syncPoint + 5;
8722 break;
8723 }
8724 }
8725
8726 while (i < len) {
8727 // look at the current byte to determine if we've hit the end of
8728 // a NAL unit boundary
8729 switch (buffer[i]) {
8730 case 0:
8731 // skip past non-sync sequences
8732 if (buffer[i - 1] !== 0) {
8733 i += 2;
8734 break;
8735 } else if (buffer[i - 2] !== 0) {
8736 i++;
8737 break;
8738 } // deliver the NAL unit if it isn't empty
8739
8740
8741 if (syncPoint + 3 !== i - 2) {
8742 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8743 } // drop trailing zeroes
8744
8745
8746 do {
8747 i++;
8748 } while (buffer[i] !== 1 && i < len);
8749
8750 syncPoint = i - 2;
8751 i += 3;
8752 break;
8753
8754 case 1:
8755 // skip past non-sync sequences
8756 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8757 i += 3;
8758 break;
8759 } // deliver the NAL unit
8760
8761
8762 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8763 syncPoint = i - 2;
8764 i += 3;
8765 break;
8766
8767 default:
8768 // the current byte isn't a one or zero, so it cannot be part
8769 // of a sync sequence
8770 i += 3;
8771 break;
8772 }
8773 } // filter out the NAL units that were delivered
8774
8775
8776 buffer = buffer.subarray(syncPoint);
8777 i -= syncPoint;
8778 syncPoint = 0;
8779 };
8780
8781 this.reset = function () {
8782 buffer = null;
8783 syncPoint = 0;
8784 this.trigger('reset');
8785 };
8786
8787 this.flush = function () {
8788 // deliver the last buffered NAL unit
8789 if (buffer && buffer.byteLength > 3) {
8790 this.trigger('data', buffer.subarray(syncPoint + 3));
8791 } // reset the stream state
8792
8793
8794 buffer = null;
8795 syncPoint = 0;
8796 this.trigger('done');
8797 };
8798
8799 this.endTimeline = function () {
8800 this.flush();
8801 this.trigger('endedtimeline');
8802 };
8803 };
8804
8805 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
8806 // see Recommendation ITU-T H.264 (4/2013),
8807 // 7.3.2.1.1 Sequence parameter set data syntax
8808
8809 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8810 100: true,
8811 110: true,
8812 122: true,
8813 244: true,
8814 44: true,
8815 83: true,
8816 86: true,
8817 118: true,
8818 128: true,
8819 // TODO: the three profiles below don't
8820 // appear to have sps data in the specificiation anymore?
8821 138: true,
8822 139: true,
8823 134: true
8824 };
8825 /**
8826 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8827 * events.
8828 */
8829
8830 _H264Stream = function H264Stream() {
8831 var nalByteStream = new _NalByteStream(),
8832 self,
8833 trackId,
8834 currentPts,
8835 currentDts,
8836 discardEmulationPreventionBytes,
8837 readSequenceParameterSet,
8838 skipScalingList;
8839
8840 _H264Stream.prototype.init.call(this);
8841
8842 self = this;
8843 /*
8844 * Pushes a packet from a stream onto the NalByteStream
8845 *
8846 * @param {Object} packet - A packet received from a stream
8847 * @param {Uint8Array} packet.data - The raw bytes of the packet
8848 * @param {Number} packet.dts - Decode timestamp of the packet
8849 * @param {Number} packet.pts - Presentation timestamp of the packet
8850 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8851 * @param {('video'|'audio')} packet.type - The type of packet
8852 *
8853 */
8854
8855 this.push = function (packet) {
8856 if (packet.type !== 'video') {
8857 return;
8858 }
8859
8860 trackId = packet.trackId;
8861 currentPts = packet.pts;
8862 currentDts = packet.dts;
8863 nalByteStream.push(packet);
8864 };
8865 /*
8866 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8867 * for the NALUs to the next stream component.
8868 * Also, preprocess caption and sequence parameter NALUs.
8869 *
8870 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8871 * @see NalByteStream.push
8872 */
8873
8874
8875 nalByteStream.on('data', function (data) {
8876 var event = {
8877 trackId: trackId,
8878 pts: currentPts,
8879 dts: currentDts,
8880 data: data,
8881 nalUnitTypeCode: data[0] & 0x1f
8882 };
8883
8884 switch (event.nalUnitTypeCode) {
8885 case 0x05:
8886 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8887 break;
8888
8889 case 0x06:
8890 event.nalUnitType = 'sei_rbsp';
8891 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8892 break;
8893
8894 case 0x07:
8895 event.nalUnitType = 'seq_parameter_set_rbsp';
8896 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8897 event.config = readSequenceParameterSet(event.escapedRBSP);
8898 break;
8899
8900 case 0x08:
8901 event.nalUnitType = 'pic_parameter_set_rbsp';
8902 break;
8903
8904 case 0x09:
8905 event.nalUnitType = 'access_unit_delimiter_rbsp';
8906 break;
8907 } // This triggers data on the H264Stream
8908
8909
8910 self.trigger('data', event);
8911 });
8912 nalByteStream.on('done', function () {
8913 self.trigger('done');
8914 });
8915 nalByteStream.on('partialdone', function () {
8916 self.trigger('partialdone');
8917 });
8918 nalByteStream.on('reset', function () {
8919 self.trigger('reset');
8920 });
8921 nalByteStream.on('endedtimeline', function () {
8922 self.trigger('endedtimeline');
8923 });
8924
8925 this.flush = function () {
8926 nalByteStream.flush();
8927 };
8928
8929 this.partialFlush = function () {
8930 nalByteStream.partialFlush();
8931 };
8932
8933 this.reset = function () {
8934 nalByteStream.reset();
8935 };
8936
8937 this.endTimeline = function () {
8938 nalByteStream.endTimeline();
8939 };
8940 /**
8941 * Advance the ExpGolomb decoder past a scaling list. The scaling
8942 * list is optionally transmitted as part of a sequence parameter
8943 * set and is not relevant to transmuxing.
8944 * @param count {number} the number of entries in this scaling list
8945 * @param expGolombDecoder {object} an ExpGolomb pointed to the
8946 * start of a scaling list
8947 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
8948 */
8949
8950
8951 skipScalingList = function skipScalingList(count, expGolombDecoder) {
8952 var lastScale = 8,
8953 nextScale = 8,
8954 j,
8955 deltaScale;
8956
8957 for (j = 0; j < count; j++) {
8958 if (nextScale !== 0) {
8959 deltaScale = expGolombDecoder.readExpGolomb();
8960 nextScale = (lastScale + deltaScale + 256) % 256;
8961 }
8962
8963 lastScale = nextScale === 0 ? lastScale : nextScale;
8964 }
8965 };
8966 /**
8967 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8968 * Sequence Payload"
8969 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8970 * unit
8971 * @return {Uint8Array} the RBSP without any Emulation
8972 * Prevention Bytes
8973 */
8974
8975
8976 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
8977 var length = data.byteLength,
8978 emulationPreventionBytesPositions = [],
8979 i = 1,
8980 newLength,
8981 newData; // Find all `Emulation Prevention Bytes`
8982
8983 while (i < length - 2) {
8984 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
8985 emulationPreventionBytesPositions.push(i + 2);
8986 i += 2;
8987 } else {
8988 i++;
8989 }
8990 } // If no Emulation Prevention Bytes were found just return the original
8991 // array
8992
8993
8994 if (emulationPreventionBytesPositions.length === 0) {
8995 return data;
8996 } // Create a new array to hold the NAL unit data
8997
8998
8999 newLength = length - emulationPreventionBytesPositions.length;
9000 newData = new Uint8Array(newLength);
9001 var sourceIndex = 0;
9002
9003 for (i = 0; i < newLength; sourceIndex++, i++) {
9004 if (sourceIndex === emulationPreventionBytesPositions[0]) {
9005 // Skip this byte
9006 sourceIndex++; // Remove this position index
9007
9008 emulationPreventionBytesPositions.shift();
9009 }
9010
9011 newData[i] = data[sourceIndex];
9012 }
9013
9014 return newData;
9015 };
9016 /**
9017 * Read a sequence parameter set and return some interesting video
9018 * properties. A sequence parameter set is the H264 metadata that
9019 * describes the properties of upcoming video frames.
9020 * @param data {Uint8Array} the bytes of a sequence parameter set
9021 * @return {object} an object with configuration parsed from the
9022 * sequence parameter set, including the dimensions of the
9023 * associated video frames.
9024 */
9025
9026
9027 readSequenceParameterSet = function readSequenceParameterSet(data) {
9028 var frameCropLeftOffset = 0,
9029 frameCropRightOffset = 0,
9030 frameCropTopOffset = 0,
9031 frameCropBottomOffset = 0,
9032 expGolombDecoder,
9033 profileIdc,
9034 levelIdc,
9035 profileCompatibility,
9036 chromaFormatIdc,
9037 picOrderCntType,
9038 numRefFramesInPicOrderCntCycle,
9039 picWidthInMbsMinus1,
9040 picHeightInMapUnitsMinus1,
9041 frameMbsOnlyFlag,
9042 scalingListCount,
9043 sarRatio = [1, 1],
9044 aspectRatioIdc,
9045 i;
9046 expGolombDecoder = new expGolomb(data);
9047 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
9048
9049 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
9050
9051 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
9052
9053 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
9054 // some profiles have more optional data we don't need
9055
9056 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
9057 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
9058
9059 if (chromaFormatIdc === 3) {
9060 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
9061 }
9062
9063 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
9064
9065 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
9066
9067 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
9068
9069 if (expGolombDecoder.readBoolean()) {
9070 // seq_scaling_matrix_present_flag
9071 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
9072
9073 for (i = 0; i < scalingListCount; i++) {
9074 if (expGolombDecoder.readBoolean()) {
9075 // seq_scaling_list_present_flag[ i ]
9076 if (i < 6) {
9077 skipScalingList(16, expGolombDecoder);
9078 } else {
9079 skipScalingList(64, expGolombDecoder);
9080 }
9081 }
9082 }
9083 }
9084 }
9085
9086 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
9087
9088 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
9089
9090 if (picOrderCntType === 0) {
9091 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
9092 } else if (picOrderCntType === 1) {
9093 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
9094
9095 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
9096
9097 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
9098
9099 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
9100
9101 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
9102 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
9103 }
9104 }
9105
9106 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
9107
9108 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
9109
9110 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9111 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9112 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
9113
9114 if (frameMbsOnlyFlag === 0) {
9115 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
9116 }
9117
9118 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
9119
9120 if (expGolombDecoder.readBoolean()) {
9121 // frame_cropping_flag
9122 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
9123 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
9124 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
9125 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
9126 }
9127
9128 if (expGolombDecoder.readBoolean()) {
9129 // vui_parameters_present_flag
9130 if (expGolombDecoder.readBoolean()) {
9131 // aspect_ratio_info_present_flag
9132 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
9133
9134 switch (aspectRatioIdc) {
9135 case 1:
9136 sarRatio = [1, 1];
9137 break;
9138
9139 case 2:
9140 sarRatio = [12, 11];
9141 break;
9142
9143 case 3:
9144 sarRatio = [10, 11];
9145 break;
9146
9147 case 4:
9148 sarRatio = [16, 11];
9149 break;
9150
9151 case 5:
9152 sarRatio = [40, 33];
9153 break;
9154
9155 case 6:
9156 sarRatio = [24, 11];
9157 break;
9158
9159 case 7:
9160 sarRatio = [20, 11];
9161 break;
9162
9163 case 8:
9164 sarRatio = [32, 11];
9165 break;
9166
9167 case 9:
9168 sarRatio = [80, 33];
9169 break;
9170
9171 case 10:
9172 sarRatio = [18, 11];
9173 break;
9174
9175 case 11:
9176 sarRatio = [15, 11];
9177 break;
9178
9179 case 12:
9180 sarRatio = [64, 33];
9181 break;
9182
9183 case 13:
9184 sarRatio = [160, 99];
9185 break;
9186
9187 case 14:
9188 sarRatio = [4, 3];
9189 break;
9190
9191 case 15:
9192 sarRatio = [3, 2];
9193 break;
9194
9195 case 16:
9196 sarRatio = [2, 1];
9197 break;
9198
9199 case 255:
9200 {
9201 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
9202 break;
9203 }
9204 }
9205
9206 if (sarRatio) {
9207 sarRatio[0] / sarRatio[1];
9208 }
9209 }
9210 }
9211
9212 return {
9213 profileIdc: profileIdc,
9214 levelIdc: levelIdc,
9215 profileCompatibility: profileCompatibility,
9216 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
9217 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
9218 // sar is sample aspect ratio
9219 sarRatio: sarRatio
9220 };
9221 };
9222 };
9223
9224 _H264Stream.prototype = new stream();
9225 var h264 = {
9226 H264Stream: _H264Stream,
9227 NalByteStream: _NalByteStream
9228 };
9229 /**
9230 * mux.js
9231 *
9232 * Copyright (c) Brightcove
9233 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9234 *
9235 * Utilities to detect basic properties and metadata about Aac data.
9236 */
9237
9238 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
9239
9240 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
9241 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
9242 flags = header[byteIndex + 5],
9243 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
9244
9245 returnSize = returnSize >= 0 ? returnSize : 0;
9246
9247 if (footerPresent) {
9248 return returnSize + 20;
9249 }
9250
9251 return returnSize + 10;
9252 };
9253
9254 var getId3Offset = function getId3Offset(data, offset) {
9255 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
9256 return offset;
9257 }
9258
9259 offset += parseId3TagSize(data, offset);
9260 return getId3Offset(data, offset);
9261 }; // TODO: use vhs-utils
9262
9263
9264 var isLikelyAacData$1 = function isLikelyAacData(data) {
9265 var offset = getId3Offset(data, 0);
9266 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
9267 // is not mp3 data but aac data.
9268 (data[offset + 1] & 0x16) === 0x10;
9269 };
9270
9271 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
9272 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
9273 }; // return a percent-encoded representation of the specified byte range
9274 // @see http://en.wikipedia.org/wiki/Percent-encoding
9275
9276
9277 var percentEncode = function percentEncode(bytes, start, end) {
9278 var i,
9279 result = '';
9280
9281 for (i = start; i < end; i++) {
9282 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
9283 }
9284
9285 return result;
9286 }; // return the string representation of the specified byte range,
9287 // interpreted as ISO-8859-1.
9288
9289
9290 var parseIso88591 = function parseIso88591(bytes, start, end) {
9291 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
9292 };
9293
9294 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
9295 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9296 middle = header[byteIndex + 4] << 3,
9297 highTwo = header[byteIndex + 3] & 0x3 << 11;
9298 return highTwo | middle | lowThree;
9299 };
9300
9301 var parseType$2 = function parseType(header, byteIndex) {
9302 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
9303 return 'timed-metadata';
9304 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
9305 return 'audio';
9306 }
9307
9308 return null;
9309 };
9310
9311 var parseSampleRate = function parseSampleRate(packet) {
9312 var i = 0;
9313
9314 while (i + 5 < packet.length) {
9315 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
9316 // If a valid header was not found, jump one forward and attempt to
9317 // find a valid ADTS header starting at the next byte
9318 i++;
9319 continue;
9320 }
9321
9322 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
9323 }
9324
9325 return null;
9326 };
9327
9328 var parseAacTimestamp = function parseAacTimestamp(packet) {
9329 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
9330
9331 frameStart = 10;
9332
9333 if (packet[5] & 0x40) {
9334 // advance the frame start past the extended header
9335 frameStart += 4; // header size field
9336
9337 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
9338 } // parse one or more ID3 frames
9339 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
9340
9341
9342 do {
9343 // determine the number of bytes in this frame
9344 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
9345
9346 if (frameSize < 1) {
9347 return null;
9348 }
9349
9350 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
9351
9352 if (frameHeader === 'PRIV') {
9353 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
9354
9355 for (var i = 0; i < frame.byteLength; i++) {
9356 if (frame[i] === 0) {
9357 var owner = parseIso88591(frame, 0, i);
9358
9359 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
9360 var d = frame.subarray(i + 1);
9361 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
9362 size *= 4;
9363 size += d[7] & 0x03;
9364 return size;
9365 }
9366
9367 break;
9368 }
9369 }
9370 }
9371
9372 frameStart += 10; // advance past the frame header
9373
9374 frameStart += frameSize; // advance past the frame body
9375 } while (frameStart < packet.byteLength);
9376
9377 return null;
9378 };
9379
9380 var utils = {
9381 isLikelyAacData: isLikelyAacData$1,
9382 parseId3TagSize: parseId3TagSize,
9383 parseAdtsSize: parseAdtsSize,
9384 parseType: parseType$2,
9385 parseSampleRate: parseSampleRate,
9386 parseAacTimestamp: parseAacTimestamp
9387 };
9388
9389 var _AacStream;
9390 /**
9391 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
9392 */
9393
9394
9395 _AacStream = function AacStream() {
9396 var everything = new Uint8Array(),
9397 timeStamp = 0;
9398
9399 _AacStream.prototype.init.call(this);
9400
9401 this.setTimestamp = function (timestamp) {
9402 timeStamp = timestamp;
9403 };
9404
9405 this.push = function (bytes) {
9406 var frameSize = 0,
9407 byteIndex = 0,
9408 bytesLeft,
9409 chunk,
9410 packet,
9411 tempLength; // If there are bytes remaining from the last segment, prepend them to the
9412 // bytes that were pushed in
9413
9414 if (everything.length) {
9415 tempLength = everything.length;
9416 everything = new Uint8Array(bytes.byteLength + tempLength);
9417 everything.set(everything.subarray(0, tempLength));
9418 everything.set(bytes, tempLength);
9419 } else {
9420 everything = bytes;
9421 }
9422
9423 while (everything.length - byteIndex >= 3) {
9424 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
9425 // Exit early because we don't have enough to parse
9426 // the ID3 tag header
9427 if (everything.length - byteIndex < 10) {
9428 break;
9429 } // check framesize
9430
9431
9432 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9433 // to emit a full packet
9434 // Add to byteIndex to support multiple ID3 tags in sequence
9435
9436 if (byteIndex + frameSize > everything.length) {
9437 break;
9438 }
9439
9440 chunk = {
9441 type: 'timed-metadata',
9442 data: everything.subarray(byteIndex, byteIndex + frameSize)
9443 };
9444 this.trigger('data', chunk);
9445 byteIndex += frameSize;
9446 continue;
9447 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
9448 // Exit early because we don't have enough to parse
9449 // the ADTS frame header
9450 if (everything.length - byteIndex < 7) {
9451 break;
9452 }
9453
9454 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9455 // to emit a full packet
9456
9457 if (byteIndex + frameSize > everything.length) {
9458 break;
9459 }
9460
9461 packet = {
9462 type: 'audio',
9463 data: everything.subarray(byteIndex, byteIndex + frameSize),
9464 pts: timeStamp,
9465 dts: timeStamp
9466 };
9467 this.trigger('data', packet);
9468 byteIndex += frameSize;
9469 continue;
9470 }
9471
9472 byteIndex++;
9473 }
9474
9475 bytesLeft = everything.length - byteIndex;
9476
9477 if (bytesLeft > 0) {
9478 everything = everything.subarray(byteIndex);
9479 } else {
9480 everything = new Uint8Array();
9481 }
9482 };
9483
9484 this.reset = function () {
9485 everything = new Uint8Array();
9486 this.trigger('reset');
9487 };
9488
9489 this.endTimeline = function () {
9490 everything = new Uint8Array();
9491 this.trigger('endedtimeline');
9492 };
9493 };
9494
9495 _AacStream.prototype = new stream();
9496 var aac = _AacStream; // constants
9497
9498 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
9499 var audioProperties = AUDIO_PROPERTIES;
9500 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
9501 var videoProperties = VIDEO_PROPERTIES;
9502 var H264Stream = h264.H264Stream;
9503 var isLikelyAacData = utils.isLikelyAacData;
9504 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
9505
9506 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
9507
9508 var retriggerForStream = function retriggerForStream(key, event) {
9509 event.stream = key;
9510 this.trigger('log', event);
9511 };
9512
9513 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
9514 var keys = Object.keys(pipeline);
9515
9516 for (var i = 0; i < keys.length; i++) {
9517 var key = keys[i]; // skip non-stream keys and headOfPipeline
9518 // which is just a duplicate
9519
9520 if (key === 'headOfPipeline' || !pipeline[key].on) {
9521 continue;
9522 }
9523
9524 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
9525 }
9526 };
9527 /**
9528 * Compare two arrays (even typed) for same-ness
9529 */
9530
9531
9532 var arrayEquals = function arrayEquals(a, b) {
9533 var i;
9534
9535 if (a.length !== b.length) {
9536 return false;
9537 } // compare the value of each element in the array
9538
9539
9540 for (i = 0; i < a.length; i++) {
9541 if (a[i] !== b[i]) {
9542 return false;
9543 }
9544 }
9545
9546 return true;
9547 };
9548
9549 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
9550 var ptsOffsetFromDts = startPts - startDts,
9551 decodeDuration = endDts - startDts,
9552 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
9553 // however, the player time values will reflect a start from the baseMediaDecodeTime.
9554 // In order to provide relevant values for the player times, base timing info on the
9555 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
9556
9557 return {
9558 start: {
9559 dts: baseMediaDecodeTime,
9560 pts: baseMediaDecodeTime + ptsOffsetFromDts
9561 },
9562 end: {
9563 dts: baseMediaDecodeTime + decodeDuration,
9564 pts: baseMediaDecodeTime + presentationDuration
9565 },
9566 prependedContentDuration: prependedContentDuration,
9567 baseMediaDecodeTime: baseMediaDecodeTime
9568 };
9569 };
9570 /**
9571 * Constructs a single-track, ISO BMFF media segment from AAC data
9572 * events. The output of this stream can be fed to a SourceBuffer
9573 * configured with a suitable initialization segment.
9574 * @param track {object} track metadata configuration
9575 * @param options {object} transmuxer options object
9576 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9577 * in the source; false to adjust the first segment to start at 0.
9578 */
9579
9580
9581 _AudioSegmentStream = function AudioSegmentStream(track, options) {
9582 var adtsFrames = [],
9583 sequenceNumber,
9584 earliestAllowedDts = 0,
9585 audioAppendStartTs = 0,
9586 videoBaseMediaDecodeTime = Infinity;
9587 options = options || {};
9588 sequenceNumber = options.firstSequenceNumber || 0;
9589
9590 _AudioSegmentStream.prototype.init.call(this);
9591
9592 this.push = function (data) {
9593 trackDecodeInfo.collectDtsInfo(track, data);
9594
9595 if (track) {
9596 audioProperties.forEach(function (prop) {
9597 track[prop] = data[prop];
9598 });
9599 } // buffer audio data until end() is called
9600
9601
9602 adtsFrames.push(data);
9603 };
9604
9605 this.setEarliestDts = function (earliestDts) {
9606 earliestAllowedDts = earliestDts;
9607 };
9608
9609 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9610 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9611 };
9612
9613 this.setAudioAppendStart = function (timestamp) {
9614 audioAppendStartTs = timestamp;
9615 };
9616
9617 this.flush = function () {
9618 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
9619
9620 if (adtsFrames.length === 0) {
9621 this.trigger('done', 'AudioSegmentStream');
9622 return;
9623 }
9624
9625 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9626 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
9627
9628 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9629 // samples (that is, adts frames) in the audio data
9630
9631 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9632
9633 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9634 adtsFrames = [];
9635 moof = mp4Generator.moof(sequenceNumber, [track]);
9636 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
9637
9638 sequenceNumber++;
9639 boxes.set(moof);
9640 boxes.set(mdat, moof.byteLength);
9641 trackDecodeInfo.clearDtsInfo(track);
9642 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
9643 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9644 // valid use-case where an init segment/data should be triggered without associated
9645 // frames. Leaving for now, but should be looked into.
9646
9647 if (frames.length) {
9648 segmentDuration = frames.length * frameDuration;
9649 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
9650 // frame info is in video clock cycles. Convert to match expectation of
9651 // listeners (that all timestamps will be based on video clock cycles).
9652 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
9653 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
9654 this.trigger('timingInfo', {
9655 start: frames[0].pts,
9656 end: frames[0].pts + segmentDuration
9657 });
9658 }
9659
9660 this.trigger('data', {
9661 track: track,
9662 boxes: boxes
9663 });
9664 this.trigger('done', 'AudioSegmentStream');
9665 };
9666
9667 this.reset = function () {
9668 trackDecodeInfo.clearDtsInfo(track);
9669 adtsFrames = [];
9670 this.trigger('reset');
9671 };
9672 };
9673
9674 _AudioSegmentStream.prototype = new stream();
9675 /**
9676 * Constructs a single-track, ISO BMFF media segment from H264 data
9677 * events. The output of this stream can be fed to a SourceBuffer
9678 * configured with a suitable initialization segment.
9679 * @param track {object} track metadata configuration
9680 * @param options {object} transmuxer options object
9681 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9682 * gopsToAlignWith list when attempting to align gop pts
9683 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9684 * in the source; false to adjust the first segment to start at 0.
9685 */
9686
9687 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9688 var sequenceNumber,
9689 nalUnits = [],
9690 gopsToAlignWith = [],
9691 config,
9692 pps;
9693 options = options || {};
9694 sequenceNumber = options.firstSequenceNumber || 0;
9695
9696 _VideoSegmentStream.prototype.init.call(this);
9697
9698 delete track.minPTS;
9699 this.gopCache_ = [];
9700 /**
9701 * Constructs a ISO BMFF segment given H264 nalUnits
9702 * @param {Object} nalUnit A data event representing a nalUnit
9703 * @param {String} nalUnit.nalUnitType
9704 * @param {Object} nalUnit.config Properties for a mp4 track
9705 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9706 * @see lib/codecs/h264.js
9707 **/
9708
9709 this.push = function (nalUnit) {
9710 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
9711
9712 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9713 config = nalUnit.config;
9714 track.sps = [nalUnit.data];
9715 videoProperties.forEach(function (prop) {
9716 track[prop] = config[prop];
9717 }, this);
9718 }
9719
9720 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9721 pps = nalUnit.data;
9722 track.pps = [nalUnit.data];
9723 } // buffer video until flush() is called
9724
9725
9726 nalUnits.push(nalUnit);
9727 };
9728 /**
9729 * Pass constructed ISO BMFF track and boxes on to the
9730 * next stream in the pipeline
9731 **/
9732
9733
9734 this.flush = function () {
9735 var frames,
9736 gopForFusion,
9737 gops,
9738 moof,
9739 mdat,
9740 boxes,
9741 prependedContentDuration = 0,
9742 firstGop,
9743 lastGop; // Throw away nalUnits at the start of the byte stream until
9744 // we find the first AUD
9745
9746 while (nalUnits.length) {
9747 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9748 break;
9749 }
9750
9751 nalUnits.shift();
9752 } // Return early if no video data has been observed
9753
9754
9755 if (nalUnits.length === 0) {
9756 this.resetStream_();
9757 this.trigger('done', 'VideoSegmentStream');
9758 return;
9759 } // Organize the raw nal-units into arrays that represent
9760 // higher-level constructs such as frames and gops
9761 // (group-of-pictures)
9762
9763
9764 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9765 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
9766 // a problem since MSE (on Chrome) requires a leading keyframe.
9767 //
9768 // We have two approaches to repairing this situation:
9769 // 1) GOP-FUSION:
9770 // This is where we keep track of the GOPS (group-of-pictures)
9771 // from previous fragments and attempt to find one that we can
9772 // prepend to the current fragment in order to create a valid
9773 // fragment.
9774 // 2) KEYFRAME-PULLING:
9775 // Here we search for the first keyframe in the fragment and
9776 // throw away all the frames between the start of the fragment
9777 // and that keyframe. We then extend the duration and pull the
9778 // PTS of the keyframe forward so that it covers the time range
9779 // of the frames that were disposed of.
9780 //
9781 // #1 is far prefereable over #2 which can cause "stuttering" but
9782 // requires more things to be just right.
9783
9784 if (!gops[0][0].keyFrame) {
9785 // Search for a gop for fusion from our gopCache
9786 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9787
9788 if (gopForFusion) {
9789 // in order to provide more accurate timing information about the segment, save
9790 // the number of seconds prepended to the original segment due to GOP fusion
9791 prependedContentDuration = gopForFusion.duration;
9792 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
9793 // new gop at the beginning
9794
9795 gops.byteLength += gopForFusion.byteLength;
9796 gops.nalCount += gopForFusion.nalCount;
9797 gops.pts = gopForFusion.pts;
9798 gops.dts = gopForFusion.dts;
9799 gops.duration += gopForFusion.duration;
9800 } else {
9801 // If we didn't find a candidate gop fall back to keyframe-pulling
9802 gops = frameUtils.extendFirstKeyFrame(gops);
9803 }
9804 } // Trim gops to align with gopsToAlignWith
9805
9806
9807 if (gopsToAlignWith.length) {
9808 var alignedGops;
9809
9810 if (options.alignGopsAtEnd) {
9811 alignedGops = this.alignGopsAtEnd_(gops);
9812 } else {
9813 alignedGops = this.alignGopsAtStart_(gops);
9814 }
9815
9816 if (!alignedGops) {
9817 // save all the nals in the last GOP into the gop cache
9818 this.gopCache_.unshift({
9819 gop: gops.pop(),
9820 pps: track.pps,
9821 sps: track.sps
9822 }); // Keep a maximum of 6 GOPs in the cache
9823
9824 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9825
9826 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
9827
9828 this.resetStream_();
9829 this.trigger('done', 'VideoSegmentStream');
9830 return;
9831 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9832 // when recalculated before sending off to CoalesceStream
9833
9834
9835 trackDecodeInfo.clearDtsInfo(track);
9836 gops = alignedGops;
9837 }
9838
9839 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
9840 // samples (that is, frames) in the video data
9841
9842 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
9843
9844 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9845 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9846 this.trigger('processedGopsInfo', gops.map(function (gop) {
9847 return {
9848 pts: gop.pts,
9849 dts: gop.dts,
9850 byteLength: gop.byteLength
9851 };
9852 }));
9853 firstGop = gops[0];
9854 lastGop = gops[gops.length - 1];
9855 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9856 this.trigger('timingInfo', {
9857 start: gops[0].pts,
9858 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9859 }); // save all the nals in the last GOP into the gop cache
9860
9861 this.gopCache_.unshift({
9862 gop: gops.pop(),
9863 pps: track.pps,
9864 sps: track.sps
9865 }); // Keep a maximum of 6 GOPs in the cache
9866
9867 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9868
9869 nalUnits = [];
9870 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9871 this.trigger('timelineStartInfo', track.timelineStartInfo);
9872 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9873 // throwing away hundreds of media segment fragments
9874
9875 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9876
9877 sequenceNumber++;
9878 boxes.set(moof);
9879 boxes.set(mdat, moof.byteLength);
9880 this.trigger('data', {
9881 track: track,
9882 boxes: boxes
9883 });
9884 this.resetStream_(); // Continue with the flush process now
9885
9886 this.trigger('done', 'VideoSegmentStream');
9887 };
9888
9889 this.reset = function () {
9890 this.resetStream_();
9891 nalUnits = [];
9892 this.gopCache_.length = 0;
9893 gopsToAlignWith.length = 0;
9894 this.trigger('reset');
9895 };
9896
9897 this.resetStream_ = function () {
9898 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9899 // for instance, when we are rendition switching
9900
9901 config = undefined;
9902 pps = undefined;
9903 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9904 // return it or return null if no good candidate was found
9905
9906
9907 this.getGopForFusion_ = function (nalUnit) {
9908 var halfSecond = 45000,
9909 // Half-a-second in a 90khz clock
9910 allowableOverlap = 10000,
9911 // About 3 frames @ 30fps
9912 nearestDistance = Infinity,
9913 dtsDistance,
9914 nearestGopObj,
9915 currentGop,
9916 currentGopObj,
9917 i; // Search for the GOP nearest to the beginning of this nal unit
9918
9919 for (i = 0; i < this.gopCache_.length; i++) {
9920 currentGopObj = this.gopCache_[i];
9921 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
9922
9923 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9924 continue;
9925 } // Reject Gops that would require a negative baseMediaDecodeTime
9926
9927
9928 if (currentGop.dts < track.timelineStartInfo.dts) {
9929 continue;
9930 } // The distance between the end of the gop and the start of the nalUnit
9931
9932
9933 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
9934 // a half-second of the nal unit
9935
9936 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9937 // Always use the closest GOP we found if there is more than
9938 // one candidate
9939 if (!nearestGopObj || nearestDistance > dtsDistance) {
9940 nearestGopObj = currentGopObj;
9941 nearestDistance = dtsDistance;
9942 }
9943 }
9944 }
9945
9946 if (nearestGopObj) {
9947 return nearestGopObj.gop;
9948 }
9949
9950 return null;
9951 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
9952 // of gopsToAlignWith starting from the START of the list
9953
9954
9955 this.alignGopsAtStart_ = function (gops) {
9956 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
9957 byteLength = gops.byteLength;
9958 nalCount = gops.nalCount;
9959 duration = gops.duration;
9960 alignIndex = gopIndex = 0;
9961
9962 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
9963 align = gopsToAlignWith[alignIndex];
9964 gop = gops[gopIndex];
9965
9966 if (align.pts === gop.pts) {
9967 break;
9968 }
9969
9970 if (gop.pts > align.pts) {
9971 // this current gop starts after the current gop we want to align on, so increment
9972 // align index
9973 alignIndex++;
9974 continue;
9975 } // current gop starts before the current gop we want to align on. so increment gop
9976 // index
9977
9978
9979 gopIndex++;
9980 byteLength -= gop.byteLength;
9981 nalCount -= gop.nalCount;
9982 duration -= gop.duration;
9983 }
9984
9985 if (gopIndex === 0) {
9986 // no gops to trim
9987 return gops;
9988 }
9989
9990 if (gopIndex === gops.length) {
9991 // all gops trimmed, skip appending all gops
9992 return null;
9993 }
9994
9995 alignedGops = gops.slice(gopIndex);
9996 alignedGops.byteLength = byteLength;
9997 alignedGops.duration = duration;
9998 alignedGops.nalCount = nalCount;
9999 alignedGops.pts = alignedGops[0].pts;
10000 alignedGops.dts = alignedGops[0].dts;
10001 return alignedGops;
10002 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10003 // of gopsToAlignWith starting from the END of the list
10004
10005
10006 this.alignGopsAtEnd_ = function (gops) {
10007 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
10008 alignIndex = gopsToAlignWith.length - 1;
10009 gopIndex = gops.length - 1;
10010 alignEndIndex = null;
10011 matchFound = false;
10012
10013 while (alignIndex >= 0 && gopIndex >= 0) {
10014 align = gopsToAlignWith[alignIndex];
10015 gop = gops[gopIndex];
10016
10017 if (align.pts === gop.pts) {
10018 matchFound = true;
10019 break;
10020 }
10021
10022 if (align.pts > gop.pts) {
10023 alignIndex--;
10024 continue;
10025 }
10026
10027 if (alignIndex === gopsToAlignWith.length - 1) {
10028 // gop.pts is greater than the last alignment candidate. If no match is found
10029 // by the end of this loop, we still want to append gops that come after this
10030 // point
10031 alignEndIndex = gopIndex;
10032 }
10033
10034 gopIndex--;
10035 }
10036
10037 if (!matchFound && alignEndIndex === null) {
10038 return null;
10039 }
10040
10041 var trimIndex;
10042
10043 if (matchFound) {
10044 trimIndex = gopIndex;
10045 } else {
10046 trimIndex = alignEndIndex;
10047 }
10048
10049 if (trimIndex === 0) {
10050 return gops;
10051 }
10052
10053 var alignedGops = gops.slice(trimIndex);
10054 var metadata = alignedGops.reduce(function (total, gop) {
10055 total.byteLength += gop.byteLength;
10056 total.duration += gop.duration;
10057 total.nalCount += gop.nalCount;
10058 return total;
10059 }, {
10060 byteLength: 0,
10061 duration: 0,
10062 nalCount: 0
10063 });
10064 alignedGops.byteLength = metadata.byteLength;
10065 alignedGops.duration = metadata.duration;
10066 alignedGops.nalCount = metadata.nalCount;
10067 alignedGops.pts = alignedGops[0].pts;
10068 alignedGops.dts = alignedGops[0].dts;
10069 return alignedGops;
10070 };
10071
10072 this.alignGopsWith = function (newGopsToAlignWith) {
10073 gopsToAlignWith = newGopsToAlignWith;
10074 };
10075 };
10076
10077 _VideoSegmentStream.prototype = new stream();
10078 /**
10079 * A Stream that can combine multiple streams (ie. audio & video)
10080 * into a single output segment for MSE. Also supports audio-only
10081 * and video-only streams.
10082 * @param options {object} transmuxer options object
10083 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
10084 * in the source; false to adjust the first segment to start at media timeline start.
10085 */
10086
10087 _CoalesceStream = function CoalesceStream(options, metadataStream) {
10088 // Number of Tracks per output segment
10089 // If greater than 1, we combine multiple
10090 // tracks into a single segment
10091 this.numberOfTracks = 0;
10092 this.metadataStream = metadataStream;
10093 options = options || {};
10094
10095 if (typeof options.remux !== 'undefined') {
10096 this.remuxTracks = !!options.remux;
10097 } else {
10098 this.remuxTracks = true;
10099 }
10100
10101 if (typeof options.keepOriginalTimestamps === 'boolean') {
10102 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
10103 } else {
10104 this.keepOriginalTimestamps = false;
10105 }
10106
10107 this.pendingTracks = [];
10108 this.videoTrack = null;
10109 this.pendingBoxes = [];
10110 this.pendingCaptions = [];
10111 this.pendingMetadata = [];
10112 this.pendingBytes = 0;
10113 this.emittedTracks = 0;
10114
10115 _CoalesceStream.prototype.init.call(this); // Take output from multiple
10116
10117
10118 this.push = function (output) {
10119 // buffer incoming captions until the associated video segment
10120 // finishes
10121 if (output.text) {
10122 return this.pendingCaptions.push(output);
10123 } // buffer incoming id3 tags until the final flush
10124
10125
10126 if (output.frames) {
10127 return this.pendingMetadata.push(output);
10128 } // Add this track to the list of pending tracks and store
10129 // important information required for the construction of
10130 // the final segment
10131
10132
10133 this.pendingTracks.push(output.track);
10134 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
10135 // We unshift audio and push video because
10136 // as of Chrome 75 when switching from
10137 // one init segment to another if the video
10138 // mdat does not appear after the audio mdat
10139 // only audio will play for the duration of our transmux.
10140
10141 if (output.track.type === 'video') {
10142 this.videoTrack = output.track;
10143 this.pendingBoxes.push(output.boxes);
10144 }
10145
10146 if (output.track.type === 'audio') {
10147 this.audioTrack = output.track;
10148 this.pendingBoxes.unshift(output.boxes);
10149 }
10150 };
10151 };
10152
10153 _CoalesceStream.prototype = new stream();
10154
10155 _CoalesceStream.prototype.flush = function (flushSource) {
10156 var offset = 0,
10157 event = {
10158 captions: [],
10159 captionStreams: {},
10160 metadata: [],
10161 info: {}
10162 },
10163 caption,
10164 id3,
10165 initSegment,
10166 timelineStartPts = 0,
10167 i;
10168
10169 if (this.pendingTracks.length < this.numberOfTracks) {
10170 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
10171 // Return because we haven't received a flush from a data-generating
10172 // portion of the segment (meaning that we have only recieved meta-data
10173 // or captions.)
10174 return;
10175 } else if (this.remuxTracks) {
10176 // Return until we have enough tracks from the pipeline to remux (if we
10177 // are remuxing audio and video into a single MP4)
10178 return;
10179 } else if (this.pendingTracks.length === 0) {
10180 // In the case where we receive a flush without any data having been
10181 // received we consider it an emitted track for the purposes of coalescing
10182 // `done` events.
10183 // We do this for the case where there is an audio and video track in the
10184 // segment but no audio data. (seen in several playlists with alternate
10185 // audio tracks and no audio present in the main TS segments.)
10186 this.emittedTracks++;
10187
10188 if (this.emittedTracks >= this.numberOfTracks) {
10189 this.trigger('done');
10190 this.emittedTracks = 0;
10191 }
10192
10193 return;
10194 }
10195 }
10196
10197 if (this.videoTrack) {
10198 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
10199 videoProperties.forEach(function (prop) {
10200 event.info[prop] = this.videoTrack[prop];
10201 }, this);
10202 } else if (this.audioTrack) {
10203 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
10204 audioProperties.forEach(function (prop) {
10205 event.info[prop] = this.audioTrack[prop];
10206 }, this);
10207 }
10208
10209 if (this.videoTrack || this.audioTrack) {
10210 if (this.pendingTracks.length === 1) {
10211 event.type = this.pendingTracks[0].type;
10212 } else {
10213 event.type = 'combined';
10214 }
10215
10216 this.emittedTracks += this.pendingTracks.length;
10217 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
10218
10219 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
10220 // and track definitions
10221
10222 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
10223
10224 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
10225
10226 for (i = 0; i < this.pendingBoxes.length; i++) {
10227 event.data.set(this.pendingBoxes[i], offset);
10228 offset += this.pendingBoxes[i].byteLength;
10229 } // Translate caption PTS times into second offsets to match the
10230 // video timeline for the segment, and add track info
10231
10232
10233 for (i = 0; i < this.pendingCaptions.length; i++) {
10234 caption = this.pendingCaptions[i];
10235 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
10236 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
10237 event.captionStreams[caption.stream] = true;
10238 event.captions.push(caption);
10239 } // Translate ID3 frame PTS times into second offsets to match the
10240 // video timeline for the segment
10241
10242
10243 for (i = 0; i < this.pendingMetadata.length; i++) {
10244 id3 = this.pendingMetadata[i];
10245 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
10246 event.metadata.push(id3);
10247 } // We add this to every single emitted segment even though we only need
10248 // it for the first
10249
10250
10251 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
10252
10253 this.pendingTracks.length = 0;
10254 this.videoTrack = null;
10255 this.pendingBoxes.length = 0;
10256 this.pendingCaptions.length = 0;
10257 this.pendingBytes = 0;
10258 this.pendingMetadata.length = 0; // Emit the built segment
10259 // We include captions and ID3 tags for backwards compatibility,
10260 // ideally we should send only video and audio in the data event
10261
10262 this.trigger('data', event); // Emit each caption to the outside world
10263 // Ideally, this would happen immediately on parsing captions,
10264 // but we need to ensure that video data is sent back first
10265 // so that caption timing can be adjusted to match video timing
10266
10267 for (i = 0; i < event.captions.length; i++) {
10268 caption = event.captions[i];
10269 this.trigger('caption', caption);
10270 } // Emit each id3 tag to the outside world
10271 // Ideally, this would happen immediately on parsing the tag,
10272 // but we need to ensure that video data is sent back first
10273 // so that ID3 frame timing can be adjusted to match video timing
10274
10275
10276 for (i = 0; i < event.metadata.length; i++) {
10277 id3 = event.metadata[i];
10278 this.trigger('id3Frame', id3);
10279 }
10280 } // Only emit `done` if all tracks have been flushed and emitted
10281
10282
10283 if (this.emittedTracks >= this.numberOfTracks) {
10284 this.trigger('done');
10285 this.emittedTracks = 0;
10286 }
10287 };
10288
10289 _CoalesceStream.prototype.setRemux = function (val) {
10290 this.remuxTracks = val;
10291 };
10292 /**
10293 * A Stream that expects MP2T binary data as input and produces
10294 * corresponding media segments, suitable for use with Media Source
10295 * Extension (MSE) implementations that support the ISO BMFF byte
10296 * stream format, like Chrome.
10297 */
10298
10299
10300 _Transmuxer = function Transmuxer(options) {
10301 var self = this,
10302 hasFlushed = true,
10303 videoTrack,
10304 audioTrack;
10305
10306 _Transmuxer.prototype.init.call(this);
10307
10308 options = options || {};
10309 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10310 this.transmuxPipeline_ = {};
10311
10312 this.setupAacPipeline = function () {
10313 var pipeline = {};
10314 this.transmuxPipeline_ = pipeline;
10315 pipeline.type = 'aac';
10316 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10317
10318 pipeline.aacStream = new aac();
10319 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
10320 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
10321 pipeline.adtsStream = new adts();
10322 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10323 pipeline.headOfPipeline = pipeline.aacStream;
10324 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
10325 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
10326 pipeline.metadataStream.on('timestamp', function (frame) {
10327 pipeline.aacStream.setTimestamp(frame.timeStamp);
10328 });
10329 pipeline.aacStream.on('data', function (data) {
10330 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10331 return;
10332 }
10333
10334 audioTrack = audioTrack || {
10335 timelineStartInfo: {
10336 baseMediaDecodeTime: self.baseMediaDecodeTime
10337 },
10338 codec: 'adts',
10339 type: 'audio'
10340 }; // hook up the audio segment stream to the first track with aac data
10341
10342 pipeline.coalesceStream.numberOfTracks++;
10343 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10344 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10345 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10346
10347 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
10348
10349 self.trigger('trackinfo', {
10350 hasAudio: !!audioTrack,
10351 hasVideo: !!videoTrack
10352 });
10353 }); // Re-emit any data coming from the coalesce stream to the outside world
10354
10355 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
10356
10357 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10358 addPipelineLogRetriggers(this, pipeline);
10359 };
10360
10361 this.setupTsPipeline = function () {
10362 var pipeline = {};
10363 this.transmuxPipeline_ = pipeline;
10364 pipeline.type = 'ts';
10365 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10366
10367 pipeline.packetStream = new m2ts_1.TransportPacketStream();
10368 pipeline.parseStream = new m2ts_1.TransportParseStream();
10369 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
10370 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
10371 pipeline.adtsStream = new adts();
10372 pipeline.h264Stream = new H264Stream();
10373 pipeline.captionStream = new m2ts_1.CaptionStream(options);
10374 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10375 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
10376
10377 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
10378 // demux the streams
10379
10380 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
10381 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
10382 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
10383
10384 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
10385 pipeline.elementaryStream.on('data', function (data) {
10386 var i;
10387
10388 if (data.type === 'metadata') {
10389 i = data.tracks.length; // scan the tracks listed in the metadata
10390
10391 while (i--) {
10392 if (!videoTrack && data.tracks[i].type === 'video') {
10393 videoTrack = data.tracks[i];
10394 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10395 } else if (!audioTrack && data.tracks[i].type === 'audio') {
10396 audioTrack = data.tracks[i];
10397 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10398 }
10399 } // hook up the video segment stream to the first track with h264 data
10400
10401
10402 if (videoTrack && !pipeline.videoSegmentStream) {
10403 pipeline.coalesceStream.numberOfTracks++;
10404 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
10405 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
10406 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10407 // When video emits timelineStartInfo data after a flush, we forward that
10408 // info to the AudioSegmentStream, if it exists, because video timeline
10409 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
10410 // because this is a particularly subtle form of timestamp alteration.
10411 if (audioTrack && !options.keepOriginalTimestamps) {
10412 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
10413 // very earliest DTS we have seen in video because Chrome will
10414 // interpret any video track with a baseMediaDecodeTime that is
10415 // non-zero as a gap.
10416
10417 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
10418 }
10419 });
10420 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
10421 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
10422 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
10423 if (audioTrack) {
10424 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
10425 }
10426 });
10427 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
10428
10429 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
10430 }
10431
10432 if (audioTrack && !pipeline.audioSegmentStream) {
10433 // hook up the audio segment stream to the first track with aac data
10434 pipeline.coalesceStream.numberOfTracks++;
10435 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10436 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10437 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
10438 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
10439
10440 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
10441 } // emit pmt info
10442
10443
10444 self.trigger('trackinfo', {
10445 hasAudio: !!audioTrack,
10446 hasVideo: !!videoTrack
10447 });
10448 }
10449 }); // Re-emit any data coming from the coalesce stream to the outside world
10450
10451 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
10452 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
10453 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
10454 self.trigger('id3Frame', id3Frame);
10455 });
10456 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
10457
10458 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10459 addPipelineLogRetriggers(this, pipeline);
10460 }; // hook up the segment streams once track metadata is delivered
10461
10462
10463 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10464 var pipeline = this.transmuxPipeline_;
10465
10466 if (!options.keepOriginalTimestamps) {
10467 this.baseMediaDecodeTime = baseMediaDecodeTime;
10468 }
10469
10470 if (audioTrack) {
10471 audioTrack.timelineStartInfo.dts = undefined;
10472 audioTrack.timelineStartInfo.pts = undefined;
10473 trackDecodeInfo.clearDtsInfo(audioTrack);
10474
10475 if (pipeline.audioTimestampRolloverStream) {
10476 pipeline.audioTimestampRolloverStream.discontinuity();
10477 }
10478 }
10479
10480 if (videoTrack) {
10481 if (pipeline.videoSegmentStream) {
10482 pipeline.videoSegmentStream.gopCache_ = [];
10483 }
10484
10485 videoTrack.timelineStartInfo.dts = undefined;
10486 videoTrack.timelineStartInfo.pts = undefined;
10487 trackDecodeInfo.clearDtsInfo(videoTrack);
10488 pipeline.captionStream.reset();
10489 }
10490
10491 if (pipeline.timestampRolloverStream) {
10492 pipeline.timestampRolloverStream.discontinuity();
10493 }
10494 };
10495
10496 this.setAudioAppendStart = function (timestamp) {
10497 if (audioTrack) {
10498 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
10499 }
10500 };
10501
10502 this.setRemux = function (val) {
10503 var pipeline = this.transmuxPipeline_;
10504 options.remux = val;
10505
10506 if (pipeline && pipeline.coalesceStream) {
10507 pipeline.coalesceStream.setRemux(val);
10508 }
10509 };
10510
10511 this.alignGopsWith = function (gopsToAlignWith) {
10512 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
10513 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
10514 }
10515 };
10516
10517 this.getLogTrigger_ = function (key) {
10518 var self = this;
10519 return function (event) {
10520 event.stream = key;
10521 self.trigger('log', event);
10522 };
10523 }; // feed incoming data to the front of the parsing pipeline
10524
10525
10526 this.push = function (data) {
10527 if (hasFlushed) {
10528 var isAac = isLikelyAacData(data);
10529
10530 if (isAac && this.transmuxPipeline_.type !== 'aac') {
10531 this.setupAacPipeline();
10532 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
10533 this.setupTsPipeline();
10534 }
10535
10536 hasFlushed = false;
10537 }
10538
10539 this.transmuxPipeline_.headOfPipeline.push(data);
10540 }; // flush any buffered data
10541
10542
10543 this.flush = function () {
10544 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
10545
10546 this.transmuxPipeline_.headOfPipeline.flush();
10547 };
10548
10549 this.endTimeline = function () {
10550 this.transmuxPipeline_.headOfPipeline.endTimeline();
10551 };
10552
10553 this.reset = function () {
10554 if (this.transmuxPipeline_.headOfPipeline) {
10555 this.transmuxPipeline_.headOfPipeline.reset();
10556 }
10557 }; // Caption data has to be reset when seeking outside buffered range
10558
10559
10560 this.resetCaptions = function () {
10561 if (this.transmuxPipeline_.captionStream) {
10562 this.transmuxPipeline_.captionStream.reset();
10563 }
10564 };
10565 };
10566
10567 _Transmuxer.prototype = new stream();
10568 var transmuxer = {
10569 Transmuxer: _Transmuxer,
10570 VideoSegmentStream: _VideoSegmentStream,
10571 AudioSegmentStream: _AudioSegmentStream,
10572 AUDIO_PROPERTIES: audioProperties,
10573 VIDEO_PROPERTIES: videoProperties,
10574 // exported for testing
10575 generateSegmentTimingInfo: generateSegmentTimingInfo
10576 };
10577 /**
10578 * mux.js
10579 *
10580 * Copyright (c) Brightcove
10581 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10582 */
10583
10584 var toUnsigned$3 = function toUnsigned(value) {
10585 return value >>> 0;
10586 };
10587
10588 var toHexString$1 = function toHexString(value) {
10589 return ('00' + value.toString(16)).slice(-2);
10590 };
10591
10592 var bin = {
10593 toUnsigned: toUnsigned$3,
10594 toHexString: toHexString$1
10595 };
10596
10597 var parseType$1 = function parseType(buffer) {
10598 var result = '';
10599 result += String.fromCharCode(buffer[0]);
10600 result += String.fromCharCode(buffer[1]);
10601 result += String.fromCharCode(buffer[2]);
10602 result += String.fromCharCode(buffer[3]);
10603 return result;
10604 };
10605
10606 var parseType_1 = parseType$1;
10607 var toUnsigned$2 = bin.toUnsigned;
10608
10609 var findBox = function findBox(data, path) {
10610 var results = [],
10611 i,
10612 size,
10613 type,
10614 end,
10615 subresults;
10616
10617 if (!path.length) {
10618 // short-circuit the search for empty paths
10619 return null;
10620 }
10621
10622 for (i = 0; i < data.byteLength;) {
10623 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10624 type = parseType_1(data.subarray(i + 4, i + 8));
10625 end = size > 1 ? i + size : data.byteLength;
10626
10627 if (type === path[0]) {
10628 if (path.length === 1) {
10629 // this is the end of the path and we've found the box we were
10630 // looking for
10631 results.push(data.subarray(i + 8, end));
10632 } else {
10633 // recursively search for the next box along the path
10634 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10635
10636 if (subresults.length) {
10637 results = results.concat(subresults);
10638 }
10639 }
10640 }
10641
10642 i = end;
10643 } // we've finished searching all of data
10644
10645
10646 return results;
10647 };
10648
10649 var findBox_1 = findBox;
10650 var toUnsigned$1 = bin.toUnsigned;
10651
10652 var tfdt = function tfdt(data) {
10653 var result = {
10654 version: data[0],
10655 flags: new Uint8Array(data.subarray(1, 4)),
10656 baseMediaDecodeTime: toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
10657 };
10658
10659 if (result.version === 1) {
10660 result.baseMediaDecodeTime *= Math.pow(2, 32);
10661 result.baseMediaDecodeTime += toUnsigned$1(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
10662 }
10663
10664 return result;
10665 };
10666
10667 var parseTfdt = tfdt;
10668
10669 var parseSampleFlags = function parseSampleFlags(flags) {
10670 return {
10671 isLeading: (flags[0] & 0x0c) >>> 2,
10672 dependsOn: flags[0] & 0x03,
10673 isDependedOn: (flags[1] & 0xc0) >>> 6,
10674 hasRedundancy: (flags[1] & 0x30) >>> 4,
10675 paddingValue: (flags[1] & 0x0e) >>> 1,
10676 isNonSyncSample: flags[1] & 0x01,
10677 degradationPriority: flags[2] << 8 | flags[3]
10678 };
10679 };
10680
10681 var parseSampleFlags_1 = parseSampleFlags;
10682
10683 var trun = function trun(data) {
10684 var result = {
10685 version: data[0],
10686 flags: new Uint8Array(data.subarray(1, 4)),
10687 samples: []
10688 },
10689 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10690 // Flag interpretation
10691 dataOffsetPresent = result.flags[2] & 0x01,
10692 // compare with 2nd byte of 0x1
10693 firstSampleFlagsPresent = result.flags[2] & 0x04,
10694 // compare with 2nd byte of 0x4
10695 sampleDurationPresent = result.flags[1] & 0x01,
10696 // compare with 2nd byte of 0x100
10697 sampleSizePresent = result.flags[1] & 0x02,
10698 // compare with 2nd byte of 0x200
10699 sampleFlagsPresent = result.flags[1] & 0x04,
10700 // compare with 2nd byte of 0x400
10701 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10702 // compare with 2nd byte of 0x800
10703 sampleCount = view.getUint32(4),
10704 offset = 8,
10705 sample;
10706
10707 if (dataOffsetPresent) {
10708 // 32 bit signed integer
10709 result.dataOffset = view.getInt32(offset);
10710 offset += 4;
10711 } // Overrides the flags for the first sample only. The order of
10712 // optional values will be: duration, size, compositionTimeOffset
10713
10714
10715 if (firstSampleFlagsPresent && sampleCount) {
10716 sample = {
10717 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10718 };
10719 offset += 4;
10720
10721 if (sampleDurationPresent) {
10722 sample.duration = view.getUint32(offset);
10723 offset += 4;
10724 }
10725
10726 if (sampleSizePresent) {
10727 sample.size = view.getUint32(offset);
10728 offset += 4;
10729 }
10730
10731 if (sampleCompositionTimeOffsetPresent) {
10732 if (result.version === 1) {
10733 sample.compositionTimeOffset = view.getInt32(offset);
10734 } else {
10735 sample.compositionTimeOffset = view.getUint32(offset);
10736 }
10737
10738 offset += 4;
10739 }
10740
10741 result.samples.push(sample);
10742 sampleCount--;
10743 }
10744
10745 while (sampleCount--) {
10746 sample = {};
10747
10748 if (sampleDurationPresent) {
10749 sample.duration = view.getUint32(offset);
10750 offset += 4;
10751 }
10752
10753 if (sampleSizePresent) {
10754 sample.size = view.getUint32(offset);
10755 offset += 4;
10756 }
10757
10758 if (sampleFlagsPresent) {
10759 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10760 offset += 4;
10761 }
10762
10763 if (sampleCompositionTimeOffsetPresent) {
10764 if (result.version === 1) {
10765 sample.compositionTimeOffset = view.getInt32(offset);
10766 } else {
10767 sample.compositionTimeOffset = view.getUint32(offset);
10768 }
10769
10770 offset += 4;
10771 }
10772
10773 result.samples.push(sample);
10774 }
10775
10776 return result;
10777 };
10778
10779 var parseTrun = trun;
10780
10781 var tfhd = function tfhd(data) {
10782 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10783 result = {
10784 version: data[0],
10785 flags: new Uint8Array(data.subarray(1, 4)),
10786 trackId: view.getUint32(4)
10787 },
10788 baseDataOffsetPresent = result.flags[2] & 0x01,
10789 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10790 defaultSampleDurationPresent = result.flags[2] & 0x08,
10791 defaultSampleSizePresent = result.flags[2] & 0x10,
10792 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10793 durationIsEmpty = result.flags[0] & 0x010000,
10794 defaultBaseIsMoof = result.flags[0] & 0x020000,
10795 i;
10796 i = 8;
10797
10798 if (baseDataOffsetPresent) {
10799 i += 4; // truncate top 4 bytes
10800 // FIXME: should we read the full 64 bits?
10801
10802 result.baseDataOffset = view.getUint32(12);
10803 i += 4;
10804 }
10805
10806 if (sampleDescriptionIndexPresent) {
10807 result.sampleDescriptionIndex = view.getUint32(i);
10808 i += 4;
10809 }
10810
10811 if (defaultSampleDurationPresent) {
10812 result.defaultSampleDuration = view.getUint32(i);
10813 i += 4;
10814 }
10815
10816 if (defaultSampleSizePresent) {
10817 result.defaultSampleSize = view.getUint32(i);
10818 i += 4;
10819 }
10820
10821 if (defaultSampleFlagsPresent) {
10822 result.defaultSampleFlags = view.getUint32(i);
10823 }
10824
10825 if (durationIsEmpty) {
10826 result.durationIsEmpty = true;
10827 }
10828
10829 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10830 result.baseDataOffsetIsMoof = true;
10831 }
10832
10833 return result;
10834 };
10835
10836 var parseTfhd = tfhd;
10837 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
10838 var CaptionStream = captionStream.CaptionStream;
10839 /**
10840 * Maps an offset in the mdat to a sample based on the the size of the samples.
10841 * Assumes that `parseSamples` has been called first.
10842 *
10843 * @param {Number} offset - The offset into the mdat
10844 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10845 * @return {?Object} The matching sample, or null if no match was found.
10846 *
10847 * @see ISO-BMFF-12/2015, Section 8.8.8
10848 **/
10849
10850 var mapToSample = function mapToSample(offset, samples) {
10851 var approximateOffset = offset;
10852
10853 for (var i = 0; i < samples.length; i++) {
10854 var sample = samples[i];
10855
10856 if (approximateOffset < sample.size) {
10857 return sample;
10858 }
10859
10860 approximateOffset -= sample.size;
10861 }
10862
10863 return null;
10864 };
10865 /**
10866 * Finds SEI nal units contained in a Media Data Box.
10867 * Assumes that `parseSamples` has been called first.
10868 *
10869 * @param {Uint8Array} avcStream - The bytes of the mdat
10870 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10871 * @param {Number} trackId - The trackId of this video track
10872 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10873 * The contents of the seiNal should match what is expected by
10874 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10875 *
10876 * @see ISO-BMFF-12/2015, Section 8.1.1
10877 * @see Rec. ITU-T H.264, 7.3.2.3.1
10878 **/
10879
10880
10881 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10882 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10883 result = {
10884 logs: [],
10885 seiNals: []
10886 },
10887 seiNal,
10888 i,
10889 length,
10890 lastMatchedSample;
10891
10892 for (i = 0; i + 4 < avcStream.length; i += length) {
10893 length = avcView.getUint32(i);
10894 i += 4; // Bail if this doesn't appear to be an H264 stream
10895
10896 if (length <= 0) {
10897 continue;
10898 }
10899
10900 switch (avcStream[i] & 0x1F) {
10901 case 0x06:
10902 var data = avcStream.subarray(i + 1, i + 1 + length);
10903 var matchingSample = mapToSample(i, samples);
10904 seiNal = {
10905 nalUnitType: 'sei_rbsp',
10906 size: length,
10907 data: data,
10908 escapedRBSP: discardEmulationPreventionBytes(data),
10909 trackId: trackId
10910 };
10911
10912 if (matchingSample) {
10913 seiNal.pts = matchingSample.pts;
10914 seiNal.dts = matchingSample.dts;
10915 lastMatchedSample = matchingSample;
10916 } else if (lastMatchedSample) {
10917 // If a matching sample cannot be found, use the last
10918 // sample's values as they should be as close as possible
10919 seiNal.pts = lastMatchedSample.pts;
10920 seiNal.dts = lastMatchedSample.dts;
10921 } else {
10922 result.logs.push({
10923 level: 'warn',
10924 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
10925 });
10926 break;
10927 }
10928
10929 result.seiNals.push(seiNal);
10930 break;
10931 }
10932 }
10933
10934 return result;
10935 };
10936 /**
10937 * Parses sample information out of Track Run Boxes and calculates
10938 * the absolute presentation and decode timestamps of each sample.
10939 *
10940 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
10941 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
10942 @see ISO-BMFF-12/2015, Section 8.8.12
10943 * @param {Object} tfhd - The parsed Track Fragment Header
10944 * @see inspect.parseTfhd
10945 * @return {Object[]} the parsed samples
10946 *
10947 * @see ISO-BMFF-12/2015, Section 8.8.8
10948 **/
10949
10950
10951 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
10952 var currentDts = baseMediaDecodeTime;
10953 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
10954 var defaultSampleSize = tfhd.defaultSampleSize || 0;
10955 var trackId = tfhd.trackId;
10956 var allSamples = [];
10957 truns.forEach(function (trun) {
10958 // Note: We currently do not parse the sample table as well
10959 // as the trun. It's possible some sources will require this.
10960 // moov > trak > mdia > minf > stbl
10961 var trackRun = parseTrun(trun);
10962 var samples = trackRun.samples;
10963 samples.forEach(function (sample) {
10964 if (sample.duration === undefined) {
10965 sample.duration = defaultSampleDuration;
10966 }
10967
10968 if (sample.size === undefined) {
10969 sample.size = defaultSampleSize;
10970 }
10971
10972 sample.trackId = trackId;
10973 sample.dts = currentDts;
10974
10975 if (sample.compositionTimeOffset === undefined) {
10976 sample.compositionTimeOffset = 0;
10977 }
10978
10979 sample.pts = currentDts + sample.compositionTimeOffset;
10980 currentDts += sample.duration;
10981 });
10982 allSamples = allSamples.concat(samples);
10983 });
10984 return allSamples;
10985 };
10986 /**
10987 * Parses out caption nals from an FMP4 segment's video tracks.
10988 *
10989 * @param {Uint8Array} segment - The bytes of a single segment
10990 * @param {Number} videoTrackId - The trackId of a video track in the segment
10991 * @return {Object.<Number, Object[]>} A mapping of video trackId to
10992 * a list of seiNals found in that track
10993 **/
10994
10995
10996 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
10997 // To get the samples
10998 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
10999
11000 var mdats = findBox_1(segment, ['mdat']);
11001 var captionNals = {};
11002 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
11003
11004 mdats.forEach(function (mdat, index) {
11005 var matchingTraf = trafs[index];
11006 mdatTrafPairs.push({
11007 mdat: mdat,
11008 traf: matchingTraf
11009 });
11010 });
11011 mdatTrafPairs.forEach(function (pair) {
11012 var mdat = pair.mdat;
11013 var traf = pair.traf;
11014 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
11015
11016 var headerInfo = parseTfhd(tfhd[0]);
11017 var trackId = headerInfo.trackId;
11018 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
11019
11020 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
11021 var truns = findBox_1(traf, ['trun']);
11022 var samples;
11023 var result; // Only parse video data for the chosen video track
11024
11025 if (videoTrackId === trackId && truns.length > 0) {
11026 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
11027 result = findSeiNals(mdat, samples, trackId);
11028
11029 if (!captionNals[trackId]) {
11030 captionNals[trackId] = {
11031 seiNals: [],
11032 logs: []
11033 };
11034 }
11035
11036 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
11037 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
11038 }
11039 });
11040 return captionNals;
11041 };
11042 /**
11043 * Parses out inband captions from an MP4 container and returns
11044 * caption objects that can be used by WebVTT and the TextTrack API.
11045 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
11046 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
11047 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
11048 *
11049 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11050 * @param {Number} trackId - The id of the video track to parse
11051 * @param {Number} timescale - The timescale for the video track from the init segment
11052 *
11053 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
11054 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
11055 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
11056 * @return {String} parsedCaptions[].text - The visible content of the caption
11057 **/
11058
11059
11060 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
11061 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11062
11063 if (trackId === null) {
11064 return null;
11065 }
11066
11067 captionNals = parseCaptionNals(segment, trackId);
11068 var trackNals = captionNals[trackId] || {};
11069 return {
11070 seiNals: trackNals.seiNals,
11071 logs: trackNals.logs,
11072 timescale: timescale
11073 };
11074 };
11075 /**
11076 * Converts SEI NALUs into captions that can be used by video.js
11077 **/
11078
11079
11080 var CaptionParser = function CaptionParser() {
11081 var isInitialized = false;
11082 var captionStream; // Stores segments seen before trackId and timescale are set
11083
11084 var segmentCache; // Stores video track ID of the track being parsed
11085
11086 var trackId; // Stores the timescale of the track being parsed
11087
11088 var timescale; // Stores captions parsed so far
11089
11090 var parsedCaptions; // Stores whether we are receiving partial data or not
11091
11092 var parsingPartial;
11093 /**
11094 * A method to indicate whether a CaptionParser has been initalized
11095 * @returns {Boolean}
11096 **/
11097
11098 this.isInitialized = function () {
11099 return isInitialized;
11100 };
11101 /**
11102 * Initializes the underlying CaptionStream, SEI NAL parsing
11103 * and management, and caption collection
11104 **/
11105
11106
11107 this.init = function (options) {
11108 captionStream = new CaptionStream();
11109 isInitialized = true;
11110 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
11111
11112 captionStream.on('data', function (event) {
11113 // Convert to seconds in the source's timescale
11114 event.startTime = event.startPts / timescale;
11115 event.endTime = event.endPts / timescale;
11116 parsedCaptions.captions.push(event);
11117 parsedCaptions.captionStreams[event.stream] = true;
11118 });
11119 captionStream.on('log', function (log) {
11120 parsedCaptions.logs.push(log);
11121 });
11122 };
11123 /**
11124 * Determines if a new video track will be selected
11125 * or if the timescale changed
11126 * @return {Boolean}
11127 **/
11128
11129
11130 this.isNewInit = function (videoTrackIds, timescales) {
11131 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
11132 return false;
11133 }
11134
11135 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
11136 };
11137 /**
11138 * Parses out SEI captions and interacts with underlying
11139 * CaptionStream to return dispatched captions
11140 *
11141 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11142 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
11143 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
11144 * @see parseEmbeddedCaptions
11145 * @see m2ts/caption-stream.js
11146 **/
11147
11148
11149 this.parse = function (segment, videoTrackIds, timescales) {
11150 var parsedData;
11151
11152 if (!this.isInitialized()) {
11153 return null; // This is not likely to be a video segment
11154 } else if (!videoTrackIds || !timescales) {
11155 return null;
11156 } else if (this.isNewInit(videoTrackIds, timescales)) {
11157 // Use the first video track only as there is no
11158 // mechanism to switch to other video tracks
11159 trackId = videoTrackIds[0];
11160 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
11161 // data until we have one.
11162 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11163 } else if (trackId === null || !timescale) {
11164 segmentCache.push(segment);
11165 return null;
11166 } // Now that a timescale and trackId is set, parse cached segments
11167
11168
11169 while (segmentCache.length > 0) {
11170 var cachedSegment = segmentCache.shift();
11171 this.parse(cachedSegment, videoTrackIds, timescales);
11172 }
11173
11174 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
11175
11176 if (parsedData && parsedData.logs) {
11177 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
11178 }
11179
11180 if (parsedData === null || !parsedData.seiNals) {
11181 if (parsedCaptions.logs.length) {
11182 return {
11183 logs: parsedCaptions.logs,
11184 captions: [],
11185 captionStreams: []
11186 };
11187 }
11188
11189 return null;
11190 }
11191
11192 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
11193
11194 this.flushStream();
11195 return parsedCaptions;
11196 };
11197 /**
11198 * Pushes SEI NALUs onto CaptionStream
11199 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
11200 * Assumes that `parseCaptionNals` has been called first
11201 * @see m2ts/caption-stream.js
11202 **/
11203
11204
11205 this.pushNals = function (nals) {
11206 if (!this.isInitialized() || !nals || nals.length === 0) {
11207 return null;
11208 }
11209
11210 nals.forEach(function (nal) {
11211 captionStream.push(nal);
11212 });
11213 };
11214 /**
11215 * Flushes underlying CaptionStream to dispatch processed, displayable captions
11216 * @see m2ts/caption-stream.js
11217 **/
11218
11219
11220 this.flushStream = function () {
11221 if (!this.isInitialized()) {
11222 return null;
11223 }
11224
11225 if (!parsingPartial) {
11226 captionStream.flush();
11227 } else {
11228 captionStream.partialFlush();
11229 }
11230 };
11231 /**
11232 * Reset caption buckets for new data
11233 **/
11234
11235
11236 this.clearParsedCaptions = function () {
11237 parsedCaptions.captions = [];
11238 parsedCaptions.captionStreams = {};
11239 parsedCaptions.logs = [];
11240 };
11241 /**
11242 * Resets underlying CaptionStream
11243 * @see m2ts/caption-stream.js
11244 **/
11245
11246
11247 this.resetCaptionStream = function () {
11248 if (!this.isInitialized()) {
11249 return null;
11250 }
11251
11252 captionStream.reset();
11253 };
11254 /**
11255 * Convenience method to clear all captions flushed from the
11256 * CaptionStream and still being parsed
11257 * @see m2ts/caption-stream.js
11258 **/
11259
11260
11261 this.clearAllCaptions = function () {
11262 this.clearParsedCaptions();
11263 this.resetCaptionStream();
11264 };
11265 /**
11266 * Reset caption parser
11267 **/
11268
11269
11270 this.reset = function () {
11271 segmentCache = [];
11272 trackId = null;
11273 timescale = null;
11274
11275 if (!parsedCaptions) {
11276 parsedCaptions = {
11277 captions: [],
11278 // CC1, CC2, CC3, CC4
11279 captionStreams: {},
11280 logs: []
11281 };
11282 } else {
11283 this.clearParsedCaptions();
11284 }
11285
11286 this.resetCaptionStream();
11287 };
11288
11289 this.reset();
11290 };
11291
11292 var captionParser = CaptionParser;
11293 var toUnsigned = bin.toUnsigned;
11294 var toHexString = bin.toHexString;
11295 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
11296 /**
11297 * Parses an MP4 initialization segment and extracts the timescale
11298 * values for any declared tracks. Timescale values indicate the
11299 * number of clock ticks per second to assume for time-based values
11300 * elsewhere in the MP4.
11301 *
11302 * To determine the start time of an MP4, you need two pieces of
11303 * information: the timescale unit and the earliest base media decode
11304 * time. Multiple timescales can be specified within an MP4 but the
11305 * base media decode time is always expressed in the timescale from
11306 * the media header box for the track:
11307 * ```
11308 * moov > trak > mdia > mdhd.timescale
11309 * ```
11310 * @param init {Uint8Array} the bytes of the init segment
11311 * @return {object} a hash of track ids to timescale values or null if
11312 * the init segment is malformed.
11313 */
11314
11315 timescale = function timescale(init) {
11316 var result = {},
11317 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
11318
11319 return traks.reduce(function (result, trak) {
11320 var tkhd, version, index, id, mdhd;
11321 tkhd = findBox_1(trak, ['tkhd'])[0];
11322
11323 if (!tkhd) {
11324 return null;
11325 }
11326
11327 version = tkhd[0];
11328 index = version === 0 ? 12 : 20;
11329 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
11330 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11331
11332 if (!mdhd) {
11333 return null;
11334 }
11335
11336 version = mdhd[0];
11337 index = version === 0 ? 12 : 20;
11338 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11339 return result;
11340 }, result);
11341 };
11342 /**
11343 * Determine the base media decode start time, in seconds, for an MP4
11344 * fragment. If multiple fragments are specified, the earliest time is
11345 * returned.
11346 *
11347 * The base media decode time can be parsed from track fragment
11348 * metadata:
11349 * ```
11350 * moof > traf > tfdt.baseMediaDecodeTime
11351 * ```
11352 * It requires the timescale value from the mdhd to interpret.
11353 *
11354 * @param timescale {object} a hash of track ids to timescale values.
11355 * @return {number} the earliest base media decode start time for the
11356 * fragment, in seconds
11357 */
11358
11359
11360 startTime = function startTime(timescale, fragment) {
11361 var trafs, baseTimes, result; // we need info from two childrend of each track fragment box
11362
11363 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
11364
11365 baseTimes = [].concat.apply([], trafs.map(function (traf) {
11366 return findBox_1(traf, ['tfhd']).map(function (tfhd) {
11367 var id, scale, baseTime; // get the track id from the tfhd
11368
11369 id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
11370
11371 scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
11372
11373 baseTime = findBox_1(traf, ['tfdt']).map(function (tfdt) {
11374 var version, result;
11375 version = tfdt[0];
11376 result = toUnsigned(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]);
11377
11378 if (version === 1) {
11379 result *= Math.pow(2, 32);
11380 result += toUnsigned(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11]);
11381 }
11382
11383 return result;
11384 })[0];
11385 baseTime = typeof baseTime === 'number' && !isNaN(baseTime) ? baseTime : Infinity; // convert base time to seconds
11386
11387 return baseTime / scale;
11388 });
11389 })); // return the minimum
11390
11391 result = Math.min.apply(null, baseTimes);
11392 return isFinite(result) ? result : 0;
11393 };
11394 /**
11395 * Determine the composition start, in seconds, for an MP4
11396 * fragment.
11397 *
11398 * The composition start time of a fragment can be calculated using the base
11399 * media decode time, composition time offset, and timescale, as follows:
11400 *
11401 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
11402 *
11403 * All of the aforementioned information is contained within a media fragment's
11404 * `traf` box, except for timescale info, which comes from the initialization
11405 * segment, so a track id (also contained within a `traf`) is also necessary to
11406 * associate it with a timescale
11407 *
11408 *
11409 * @param timescales {object} - a hash of track ids to timescale values.
11410 * @param fragment {Unit8Array} - the bytes of a media segment
11411 * @return {number} the composition start time for the fragment, in seconds
11412 **/
11413
11414
11415 compositionStartTime = function compositionStartTime(timescales, fragment) {
11416 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
11417 var baseMediaDecodeTime = 0;
11418 var compositionTimeOffset = 0;
11419 var trackId;
11420
11421 if (trafBoxes && trafBoxes.length) {
11422 // The spec states that track run samples contained within a `traf` box are contiguous, but
11423 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
11424 // We will assume that they are, so we only need the first to calculate start time.
11425 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
11426 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
11427 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
11428
11429 if (tfhd) {
11430 var parsedTfhd = parseTfhd(tfhd);
11431 trackId = parsedTfhd.trackId;
11432 }
11433
11434 if (tfdt) {
11435 var parsedTfdt = parseTfdt(tfdt);
11436 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
11437 }
11438
11439 if (trun) {
11440 var parsedTrun = parseTrun(trun);
11441
11442 if (parsedTrun.samples && parsedTrun.samples.length) {
11443 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
11444 }
11445 }
11446 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
11447 // specified.
11448
11449
11450 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
11451
11452 return (baseMediaDecodeTime + compositionTimeOffset) / timescale;
11453 };
11454 /**
11455 * Find the trackIds of the video tracks in this source.
11456 * Found by parsing the Handler Reference and Track Header Boxes:
11457 * moov > trak > mdia > hdlr
11458 * moov > trak > tkhd
11459 *
11460 * @param {Uint8Array} init - The bytes of the init segment for this source
11461 * @return {Number[]} A list of trackIds
11462 *
11463 * @see ISO-BMFF-12/2015, Section 8.4.3
11464 **/
11465
11466
11467 getVideoTrackIds = function getVideoTrackIds(init) {
11468 var traks = findBox_1(init, ['moov', 'trak']);
11469 var videoTrackIds = [];
11470 traks.forEach(function (trak) {
11471 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
11472 var tkhds = findBox_1(trak, ['tkhd']);
11473 hdlrs.forEach(function (hdlr, index) {
11474 var handlerType = parseType_1(hdlr.subarray(8, 12));
11475 var tkhd = tkhds[index];
11476 var view;
11477 var version;
11478 var trackId;
11479
11480 if (handlerType === 'vide') {
11481 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11482 version = view.getUint8(0);
11483 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
11484 videoTrackIds.push(trackId);
11485 }
11486 });
11487 });
11488 return videoTrackIds;
11489 };
11490
11491 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
11492 // mdhd is a FullBox, meaning it will have its own version as the first byte
11493 var version = mdhd[0];
11494 var index = version === 0 ? 12 : 20;
11495 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11496 };
11497 /**
11498 * Get all the video, audio, and hint tracks from a non fragmented
11499 * mp4 segment
11500 */
11501
11502
11503 getTracks = function getTracks(init) {
11504 var traks = findBox_1(init, ['moov', 'trak']);
11505 var tracks = [];
11506 traks.forEach(function (trak) {
11507 var track = {};
11508 var tkhd = findBox_1(trak, ['tkhd'])[0];
11509 var view, tkhdVersion; // id
11510
11511 if (tkhd) {
11512 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11513 tkhdVersion = view.getUint8(0);
11514 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
11515 }
11516
11517 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
11518
11519 if (hdlr) {
11520 var type = parseType_1(hdlr.subarray(8, 12));
11521
11522 if (type === 'vide') {
11523 track.type = 'video';
11524 } else if (type === 'soun') {
11525 track.type = 'audio';
11526 } else {
11527 track.type = type;
11528 }
11529 } // codec
11530
11531
11532 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
11533
11534 if (stsd) {
11535 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
11536
11537 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
11538 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
11539 var codecConfig, codecConfigType;
11540
11541 if (codecBox) {
11542 // https://tools.ietf.org/html/rfc6381#section-3.3
11543 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
11544 // we don't need anything but the "config" parameter of the
11545 // avc1 codecBox
11546 codecConfig = codecBox.subarray(78);
11547 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11548
11549 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
11550 track.codec += '.'; // left padded with zeroes for single digit hex
11551 // profile idc
11552
11553 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
11554
11555 track.codec += toHexString(codecConfig[10]); // level idc
11556
11557 track.codec += toHexString(codecConfig[11]);
11558 } else {
11559 // TODO: show a warning that we couldn't parse the codec
11560 // and are using the default
11561 track.codec = 'avc1.4d400d';
11562 }
11563 } else if (/^mp4[a,v]$/i.test(track.codec)) {
11564 // we do not need anything but the streamDescriptor of the mp4a codecBox
11565 codecConfig = codecBox.subarray(28);
11566 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11567
11568 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
11569 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
11570
11571 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
11572 } else {
11573 // TODO: show a warning that we couldn't parse the codec
11574 // and are using the default
11575 track.codec = 'mp4a.40.2';
11576 }
11577 } else {
11578 // flac, opus, etc
11579 track.codec = track.codec.toLowerCase();
11580 }
11581 }
11582 }
11583
11584 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11585
11586 if (mdhd) {
11587 track.timescale = getTimescaleFromMediaHeader(mdhd);
11588 }
11589
11590 tracks.push(track);
11591 });
11592 return tracks;
11593 };
11594
11595 var probe$2 = {
11596 // export mp4 inspector's findBox and parseType for backwards compatibility
11597 findBox: findBox_1,
11598 parseType: parseType_1,
11599 timescale: timescale,
11600 startTime: startTime,
11601 compositionStartTime: compositionStartTime,
11602 videoTrackIds: getVideoTrackIds,
11603 tracks: getTracks,
11604 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
11605 };
11606
11607 var parsePid = function parsePid(packet) {
11608 var pid = packet[1] & 0x1f;
11609 pid <<= 8;
11610 pid |= packet[2];
11611 return pid;
11612 };
11613
11614 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
11615 return !!(packet[1] & 0x40);
11616 };
11617
11618 var parseAdaptionField = function parseAdaptionField(packet) {
11619 var offset = 0; // if an adaption field is present, its length is specified by the
11620 // fifth byte of the TS packet header. The adaptation field is
11621 // used to add stuffing to PES packets that don't fill a complete
11622 // TS packet, and to specify some forms of timing and control data
11623 // that we do not currently use.
11624
11625 if ((packet[3] & 0x30) >>> 4 > 0x01) {
11626 offset += packet[4] + 1;
11627 }
11628
11629 return offset;
11630 };
11631
11632 var parseType = function parseType(packet, pmtPid) {
11633 var pid = parsePid(packet);
11634
11635 if (pid === 0) {
11636 return 'pat';
11637 } else if (pid === pmtPid) {
11638 return 'pmt';
11639 } else if (pmtPid) {
11640 return 'pes';
11641 }
11642
11643 return null;
11644 };
11645
11646 var parsePat = function parsePat(packet) {
11647 var pusi = parsePayloadUnitStartIndicator(packet);
11648 var offset = 4 + parseAdaptionField(packet);
11649
11650 if (pusi) {
11651 offset += packet[offset] + 1;
11652 }
11653
11654 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
11655 };
11656
11657 var parsePmt = function parsePmt(packet) {
11658 var programMapTable = {};
11659 var pusi = parsePayloadUnitStartIndicator(packet);
11660 var payloadOffset = 4 + parseAdaptionField(packet);
11661
11662 if (pusi) {
11663 payloadOffset += packet[payloadOffset] + 1;
11664 } // PMTs can be sent ahead of the time when they should actually
11665 // take effect. We don't believe this should ever be the case
11666 // for HLS but we'll ignore "forward" PMT declarations if we see
11667 // them. Future PMT declarations have the current_next_indicator
11668 // set to zero.
11669
11670
11671 if (!(packet[payloadOffset + 5] & 0x01)) {
11672 return;
11673 }
11674
11675 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
11676
11677 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
11678 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
11679 // long the program info descriptors are
11680
11681 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
11682
11683 var offset = 12 + programInfoLength;
11684
11685 while (offset < tableEnd) {
11686 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
11687
11688 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
11689 // skip past the elementary stream descriptors, if present
11690
11691 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
11692 }
11693
11694 return programMapTable;
11695 };
11696
11697 var parsePesType = function parsePesType(packet, programMapTable) {
11698 var pid = parsePid(packet);
11699 var type = programMapTable[pid];
11700
11701 switch (type) {
11702 case streamTypes.H264_STREAM_TYPE:
11703 return 'video';
11704
11705 case streamTypes.ADTS_STREAM_TYPE:
11706 return 'audio';
11707
11708 case streamTypes.METADATA_STREAM_TYPE:
11709 return 'timed-metadata';
11710
11711 default:
11712 return null;
11713 }
11714 };
11715
11716 var parsePesTime = function parsePesTime(packet) {
11717 var pusi = parsePayloadUnitStartIndicator(packet);
11718
11719 if (!pusi) {
11720 return null;
11721 }
11722
11723 var offset = 4 + parseAdaptionField(packet);
11724
11725 if (offset >= packet.byteLength) {
11726 // From the H 222.0 MPEG-TS spec
11727 // "For transport stream packets carrying PES packets, stuffing is needed when there
11728 // is insufficient PES packet data to completely fill the transport stream packet
11729 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
11730 // the sum of the lengths of the data elements in it, so that the payload bytes
11731 // remaining after the adaptation field exactly accommodates the available PES packet
11732 // data."
11733 //
11734 // If the offset is >= the length of the packet, then the packet contains no data
11735 // and instead is just adaption field stuffing bytes
11736 return null;
11737 }
11738
11739 var pes = null;
11740 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
11741 // and a DTS value. Determine what combination of values is
11742 // available to work with.
11743
11744 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
11745 // performs all bitwise operations on 32-bit integers but javascript
11746 // supports a much greater range (52-bits) of integer using standard
11747 // mathematical operations.
11748 // We construct a 31-bit value using bitwise operators over the 31
11749 // most significant bits and then multiply by 4 (equal to a left-shift
11750 // of 2) before we add the final 2 least significant bits of the
11751 // timestamp (equal to an OR.)
11752
11753 if (ptsDtsFlags & 0xC0) {
11754 pes = {}; // the PTS and DTS are not written out directly. For information
11755 // on how they are encoded, see
11756 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
11757
11758 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
11759 pes.pts *= 4; // Left shift by 2
11760
11761 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
11762
11763 pes.dts = pes.pts;
11764
11765 if (ptsDtsFlags & 0x40) {
11766 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
11767 pes.dts *= 4; // Left shift by 2
11768
11769 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
11770 }
11771 }
11772
11773 return pes;
11774 };
11775
11776 var parseNalUnitType = function parseNalUnitType(type) {
11777 switch (type) {
11778 case 0x05:
11779 return 'slice_layer_without_partitioning_rbsp_idr';
11780
11781 case 0x06:
11782 return 'sei_rbsp';
11783
11784 case 0x07:
11785 return 'seq_parameter_set_rbsp';
11786
11787 case 0x08:
11788 return 'pic_parameter_set_rbsp';
11789
11790 case 0x09:
11791 return 'access_unit_delimiter_rbsp';
11792
11793 default:
11794 return null;
11795 }
11796 };
11797
11798 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
11799 var offset = 4 + parseAdaptionField(packet);
11800 var frameBuffer = packet.subarray(offset);
11801 var frameI = 0;
11802 var frameSyncPoint = 0;
11803 var foundKeyFrame = false;
11804 var nalType; // advance the sync point to a NAL start, if necessary
11805
11806 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
11807 if (frameBuffer[frameSyncPoint + 2] === 1) {
11808 // the sync point is properly aligned
11809 frameI = frameSyncPoint + 5;
11810 break;
11811 }
11812 }
11813
11814 while (frameI < frameBuffer.byteLength) {
11815 // look at the current byte to determine if we've hit the end of
11816 // a NAL unit boundary
11817 switch (frameBuffer[frameI]) {
11818 case 0:
11819 // skip past non-sync sequences
11820 if (frameBuffer[frameI - 1] !== 0) {
11821 frameI += 2;
11822 break;
11823 } else if (frameBuffer[frameI - 2] !== 0) {
11824 frameI++;
11825 break;
11826 }
11827
11828 if (frameSyncPoint + 3 !== frameI - 2) {
11829 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11830
11831 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11832 foundKeyFrame = true;
11833 }
11834 } // drop trailing zeroes
11835
11836
11837 do {
11838 frameI++;
11839 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
11840
11841 frameSyncPoint = frameI - 2;
11842 frameI += 3;
11843 break;
11844
11845 case 1:
11846 // skip past non-sync sequences
11847 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
11848 frameI += 3;
11849 break;
11850 }
11851
11852 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11853
11854 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11855 foundKeyFrame = true;
11856 }
11857
11858 frameSyncPoint = frameI - 2;
11859 frameI += 3;
11860 break;
11861
11862 default:
11863 // the current byte isn't a one or zero, so it cannot be part
11864 // of a sync sequence
11865 frameI += 3;
11866 break;
11867 }
11868 }
11869
11870 frameBuffer = frameBuffer.subarray(frameSyncPoint);
11871 frameI -= frameSyncPoint;
11872 frameSyncPoint = 0; // parse the final nal
11873
11874 if (frameBuffer && frameBuffer.byteLength > 3) {
11875 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11876
11877 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11878 foundKeyFrame = true;
11879 }
11880 }
11881
11882 return foundKeyFrame;
11883 };
11884
11885 var probe$1 = {
11886 parseType: parseType,
11887 parsePat: parsePat,
11888 parsePmt: parsePmt,
11889 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
11890 parsePesType: parsePesType,
11891 parsePesTime: parsePesTime,
11892 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
11893 };
11894 var handleRollover = timestampRolloverStream.handleRollover;
11895 var probe = {};
11896 probe.ts = probe$1;
11897 probe.aac = utils;
11898 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
11899 var MP2T_PACKET_LENGTH = 188,
11900 // bytes
11901 SYNC_BYTE = 0x47;
11902 /**
11903 * walks through segment data looking for pat and pmt packets to parse out
11904 * program map table information
11905 */
11906
11907 var parsePsi_ = function parsePsi_(bytes, pmt) {
11908 var startIndex = 0,
11909 endIndex = MP2T_PACKET_LENGTH,
11910 packet,
11911 type;
11912
11913 while (endIndex < bytes.byteLength) {
11914 // Look for a pair of start and end sync bytes in the data..
11915 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
11916 // We found a packet
11917 packet = bytes.subarray(startIndex, endIndex);
11918 type = probe.ts.parseType(packet, pmt.pid);
11919
11920 switch (type) {
11921 case 'pat':
11922 pmt.pid = probe.ts.parsePat(packet);
11923 break;
11924
11925 case 'pmt':
11926 var table = probe.ts.parsePmt(packet);
11927 pmt.table = pmt.table || {};
11928 Object.keys(table).forEach(function (key) {
11929 pmt.table[key] = table[key];
11930 });
11931 break;
11932 }
11933
11934 startIndex += MP2T_PACKET_LENGTH;
11935 endIndex += MP2T_PACKET_LENGTH;
11936 continue;
11937 } // If we get here, we have somehow become de-synchronized and we need to step
11938 // forward one byte at a time until we find a pair of sync bytes that denote
11939 // a packet
11940
11941
11942 startIndex++;
11943 endIndex++;
11944 }
11945 };
11946 /**
11947 * walks through the segment data from the start and end to get timing information
11948 * for the first and last audio pes packets
11949 */
11950
11951
11952 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
11953 var startIndex = 0,
11954 endIndex = MP2T_PACKET_LENGTH,
11955 packet,
11956 type,
11957 pesType,
11958 pusi,
11959 parsed;
11960 var endLoop = false; // Start walking from start of segment to get first audio packet
11961
11962 while (endIndex <= bytes.byteLength) {
11963 // Look for a pair of start and end sync bytes in the data..
11964 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
11965 // We found a packet
11966 packet = bytes.subarray(startIndex, endIndex);
11967 type = probe.ts.parseType(packet, pmt.pid);
11968
11969 switch (type) {
11970 case 'pes':
11971 pesType = probe.ts.parsePesType(packet, pmt.table);
11972 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
11973
11974 if (pesType === 'audio' && pusi) {
11975 parsed = probe.ts.parsePesTime(packet);
11976
11977 if (parsed) {
11978 parsed.type = 'audio';
11979 result.audio.push(parsed);
11980 endLoop = true;
11981 }
11982 }
11983
11984 break;
11985 }
11986
11987 if (endLoop) {
11988 break;
11989 }
11990
11991 startIndex += MP2T_PACKET_LENGTH;
11992 endIndex += MP2T_PACKET_LENGTH;
11993 continue;
11994 } // If we get here, we have somehow become de-synchronized and we need to step
11995 // forward one byte at a time until we find a pair of sync bytes that denote
11996 // a packet
11997
11998
11999 startIndex++;
12000 endIndex++;
12001 } // Start walking from end of segment to get last audio packet
12002
12003
12004 endIndex = bytes.byteLength;
12005 startIndex = endIndex - MP2T_PACKET_LENGTH;
12006 endLoop = false;
12007
12008 while (startIndex >= 0) {
12009 // Look for a pair of start and end sync bytes in the data..
12010 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12011 // We found a packet
12012 packet = bytes.subarray(startIndex, endIndex);
12013 type = probe.ts.parseType(packet, pmt.pid);
12014
12015 switch (type) {
12016 case 'pes':
12017 pesType = probe.ts.parsePesType(packet, pmt.table);
12018 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12019
12020 if (pesType === 'audio' && pusi) {
12021 parsed = probe.ts.parsePesTime(packet);
12022
12023 if (parsed) {
12024 parsed.type = 'audio';
12025 result.audio.push(parsed);
12026 endLoop = true;
12027 }
12028 }
12029
12030 break;
12031 }
12032
12033 if (endLoop) {
12034 break;
12035 }
12036
12037 startIndex -= MP2T_PACKET_LENGTH;
12038 endIndex -= MP2T_PACKET_LENGTH;
12039 continue;
12040 } // If we get here, we have somehow become de-synchronized and we need to step
12041 // forward one byte at a time until we find a pair of sync bytes that denote
12042 // a packet
12043
12044
12045 startIndex--;
12046 endIndex--;
12047 }
12048 };
12049 /**
12050 * walks through the segment data from the start and end to get timing information
12051 * for the first and last video pes packets as well as timing information for the first
12052 * key frame.
12053 */
12054
12055
12056 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
12057 var startIndex = 0,
12058 endIndex = MP2T_PACKET_LENGTH,
12059 packet,
12060 type,
12061 pesType,
12062 pusi,
12063 parsed,
12064 frame,
12065 i,
12066 pes;
12067 var endLoop = false;
12068 var currentFrame = {
12069 data: [],
12070 size: 0
12071 }; // Start walking from start of segment to get first video packet
12072
12073 while (endIndex < bytes.byteLength) {
12074 // Look for a pair of start and end sync bytes in the data..
12075 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12076 // We found a packet
12077 packet = bytes.subarray(startIndex, endIndex);
12078 type = probe.ts.parseType(packet, pmt.pid);
12079
12080 switch (type) {
12081 case 'pes':
12082 pesType = probe.ts.parsePesType(packet, pmt.table);
12083 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12084
12085 if (pesType === 'video') {
12086 if (pusi && !endLoop) {
12087 parsed = probe.ts.parsePesTime(packet);
12088
12089 if (parsed) {
12090 parsed.type = 'video';
12091 result.video.push(parsed);
12092 endLoop = true;
12093 }
12094 }
12095
12096 if (!result.firstKeyFrame) {
12097 if (pusi) {
12098 if (currentFrame.size !== 0) {
12099 frame = new Uint8Array(currentFrame.size);
12100 i = 0;
12101
12102 while (currentFrame.data.length) {
12103 pes = currentFrame.data.shift();
12104 frame.set(pes, i);
12105 i += pes.byteLength;
12106 }
12107
12108 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
12109 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
12110 // the keyframe seems to work fine with HLS playback
12111 // and definitely preferable to a crash with TypeError...
12112
12113 if (firstKeyFrame) {
12114 result.firstKeyFrame = firstKeyFrame;
12115 result.firstKeyFrame.type = 'video';
12116 } else {
12117 // eslint-disable-next-line
12118 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
12119 }
12120 }
12121
12122 currentFrame.size = 0;
12123 }
12124 }
12125
12126 currentFrame.data.push(packet);
12127 currentFrame.size += packet.byteLength;
12128 }
12129 }
12130
12131 break;
12132 }
12133
12134 if (endLoop && result.firstKeyFrame) {
12135 break;
12136 }
12137
12138 startIndex += MP2T_PACKET_LENGTH;
12139 endIndex += MP2T_PACKET_LENGTH;
12140 continue;
12141 } // If we get here, we have somehow become de-synchronized and we need to step
12142 // forward one byte at a time until we find a pair of sync bytes that denote
12143 // a packet
12144
12145
12146 startIndex++;
12147 endIndex++;
12148 } // Start walking from end of segment to get last video packet
12149
12150
12151 endIndex = bytes.byteLength;
12152 startIndex = endIndex - MP2T_PACKET_LENGTH;
12153 endLoop = false;
12154
12155 while (startIndex >= 0) {
12156 // Look for a pair of start and end sync bytes in the data..
12157 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12158 // We found a packet
12159 packet = bytes.subarray(startIndex, endIndex);
12160 type = probe.ts.parseType(packet, pmt.pid);
12161
12162 switch (type) {
12163 case 'pes':
12164 pesType = probe.ts.parsePesType(packet, pmt.table);
12165 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12166
12167 if (pesType === 'video' && pusi) {
12168 parsed = probe.ts.parsePesTime(packet);
12169
12170 if (parsed) {
12171 parsed.type = 'video';
12172 result.video.push(parsed);
12173 endLoop = true;
12174 }
12175 }
12176
12177 break;
12178 }
12179
12180 if (endLoop) {
12181 break;
12182 }
12183
12184 startIndex -= MP2T_PACKET_LENGTH;
12185 endIndex -= MP2T_PACKET_LENGTH;
12186 continue;
12187 } // If we get here, we have somehow become de-synchronized and we need to step
12188 // forward one byte at a time until we find a pair of sync bytes that denote
12189 // a packet
12190
12191
12192 startIndex--;
12193 endIndex--;
12194 }
12195 };
12196 /**
12197 * Adjusts the timestamp information for the segment to account for
12198 * rollover and convert to seconds based on pes packet timescale (90khz clock)
12199 */
12200
12201
12202 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
12203 if (segmentInfo.audio && segmentInfo.audio.length) {
12204 var audioBaseTimestamp = baseTimestamp;
12205
12206 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
12207 audioBaseTimestamp = segmentInfo.audio[0].dts;
12208 }
12209
12210 segmentInfo.audio.forEach(function (info) {
12211 info.dts = handleRollover(info.dts, audioBaseTimestamp);
12212 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
12213
12214 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12215 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12216 });
12217 }
12218
12219 if (segmentInfo.video && segmentInfo.video.length) {
12220 var videoBaseTimestamp = baseTimestamp;
12221
12222 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
12223 videoBaseTimestamp = segmentInfo.video[0].dts;
12224 }
12225
12226 segmentInfo.video.forEach(function (info) {
12227 info.dts = handleRollover(info.dts, videoBaseTimestamp);
12228 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
12229
12230 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12231 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12232 });
12233
12234 if (segmentInfo.firstKeyFrame) {
12235 var frame = segmentInfo.firstKeyFrame;
12236 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
12237 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
12238
12239 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
12240 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
12241 }
12242 }
12243 };
12244 /**
12245 * inspects the aac data stream for start and end time information
12246 */
12247
12248
12249 var inspectAac_ = function inspectAac_(bytes) {
12250 var endLoop = false,
12251 audioCount = 0,
12252 sampleRate = null,
12253 timestamp = null,
12254 frameSize = 0,
12255 byteIndex = 0,
12256 packet;
12257
12258 while (bytes.length - byteIndex >= 3) {
12259 var type = probe.aac.parseType(bytes, byteIndex);
12260
12261 switch (type) {
12262 case 'timed-metadata':
12263 // Exit early because we don't have enough to parse
12264 // the ID3 tag header
12265 if (bytes.length - byteIndex < 10) {
12266 endLoop = true;
12267 break;
12268 }
12269
12270 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12271 // to emit a full packet
12272
12273 if (frameSize > bytes.length) {
12274 endLoop = true;
12275 break;
12276 }
12277
12278 if (timestamp === null) {
12279 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12280 timestamp = probe.aac.parseAacTimestamp(packet);
12281 }
12282
12283 byteIndex += frameSize;
12284 break;
12285
12286 case 'audio':
12287 // Exit early because we don't have enough to parse
12288 // the ADTS frame header
12289 if (bytes.length - byteIndex < 7) {
12290 endLoop = true;
12291 break;
12292 }
12293
12294 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12295 // to emit a full packet
12296
12297 if (frameSize > bytes.length) {
12298 endLoop = true;
12299 break;
12300 }
12301
12302 if (sampleRate === null) {
12303 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12304 sampleRate = probe.aac.parseSampleRate(packet);
12305 }
12306
12307 audioCount++;
12308 byteIndex += frameSize;
12309 break;
12310
12311 default:
12312 byteIndex++;
12313 break;
12314 }
12315
12316 if (endLoop) {
12317 return null;
12318 }
12319 }
12320
12321 if (sampleRate === null || timestamp === null) {
12322 return null;
12323 }
12324
12325 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
12326 var result = {
12327 audio: [{
12328 type: 'audio',
12329 dts: timestamp,
12330 pts: timestamp
12331 }, {
12332 type: 'audio',
12333 dts: timestamp + audioCount * 1024 * audioTimescale,
12334 pts: timestamp + audioCount * 1024 * audioTimescale
12335 }]
12336 };
12337 return result;
12338 };
12339 /**
12340 * inspects the transport stream segment data for start and end time information
12341 * of the audio and video tracks (when present) as well as the first key frame's
12342 * start time.
12343 */
12344
12345
12346 var inspectTs_ = function inspectTs_(bytes) {
12347 var pmt = {
12348 pid: null,
12349 table: null
12350 };
12351 var result = {};
12352 parsePsi_(bytes, pmt);
12353
12354 for (var pid in pmt.table) {
12355 if (pmt.table.hasOwnProperty(pid)) {
12356 var type = pmt.table[pid];
12357
12358 switch (type) {
12359 case streamTypes.H264_STREAM_TYPE:
12360 result.video = [];
12361 parseVideoPes_(bytes, pmt, result);
12362
12363 if (result.video.length === 0) {
12364 delete result.video;
12365 }
12366
12367 break;
12368
12369 case streamTypes.ADTS_STREAM_TYPE:
12370 result.audio = [];
12371 parseAudioPes_(bytes, pmt, result);
12372
12373 if (result.audio.length === 0) {
12374 delete result.audio;
12375 }
12376
12377 break;
12378 }
12379 }
12380 }
12381
12382 return result;
12383 };
12384 /**
12385 * Inspects segment byte data and returns an object with start and end timing information
12386 *
12387 * @param {Uint8Array} bytes The segment byte data
12388 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
12389 * timestamps for rollover. This value must be in 90khz clock.
12390 * @return {Object} Object containing start and end frame timing info of segment.
12391 */
12392
12393
12394 var inspect = function inspect(bytes, baseTimestamp) {
12395 var isAacData = probe.aac.isLikelyAacData(bytes);
12396 var result;
12397
12398 if (isAacData) {
12399 result = inspectAac_(bytes);
12400 } else {
12401 result = inspectTs_(bytes);
12402 }
12403
12404 if (!result || !result.audio && !result.video) {
12405 return null;
12406 }
12407
12408 adjustTimestamp_(result, baseTimestamp);
12409 return result;
12410 };
12411
12412 var tsInspector = {
12413 inspect: inspect,
12414 parseAudioPes_: parseAudioPes_
12415 };
12416 /* global self */
12417
12418 /**
12419 * Re-emits transmuxer events by converting them into messages to the
12420 * world outside the worker.
12421 *
12422 * @param {Object} transmuxer the transmuxer to wire events on
12423 * @private
12424 */
12425
12426 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
12427 transmuxer.on('data', function (segment) {
12428 // transfer ownership of the underlying ArrayBuffer
12429 // instead of doing a copy to save memory
12430 // ArrayBuffers are transferable but generic TypedArrays are not
12431 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
12432 var initArray = segment.initSegment;
12433 segment.initSegment = {
12434 data: initArray.buffer,
12435 byteOffset: initArray.byteOffset,
12436 byteLength: initArray.byteLength
12437 };
12438 var typedArray = segment.data;
12439 segment.data = typedArray.buffer;
12440 self.postMessage({
12441 action: 'data',
12442 segment: segment,
12443 byteOffset: typedArray.byteOffset,
12444 byteLength: typedArray.byteLength
12445 }, [segment.data]);
12446 });
12447 transmuxer.on('done', function (data) {
12448 self.postMessage({
12449 action: 'done'
12450 });
12451 });
12452 transmuxer.on('gopInfo', function (gopInfo) {
12453 self.postMessage({
12454 action: 'gopInfo',
12455 gopInfo: gopInfo
12456 });
12457 });
12458 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
12459 var videoSegmentTimingInfo = {
12460 start: {
12461 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12462 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12463 },
12464 end: {
12465 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12466 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12467 },
12468 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12469 };
12470
12471 if (timingInfo.prependedContentDuration) {
12472 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12473 }
12474
12475 self.postMessage({
12476 action: 'videoSegmentTimingInfo',
12477 videoSegmentTimingInfo: videoSegmentTimingInfo
12478 });
12479 });
12480 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
12481 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
12482 var audioSegmentTimingInfo = {
12483 start: {
12484 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12485 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12486 },
12487 end: {
12488 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12489 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12490 },
12491 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12492 };
12493
12494 if (timingInfo.prependedContentDuration) {
12495 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12496 }
12497
12498 self.postMessage({
12499 action: 'audioSegmentTimingInfo',
12500 audioSegmentTimingInfo: audioSegmentTimingInfo
12501 });
12502 });
12503 transmuxer.on('id3Frame', function (id3Frame) {
12504 self.postMessage({
12505 action: 'id3Frame',
12506 id3Frame: id3Frame
12507 });
12508 });
12509 transmuxer.on('caption', function (caption) {
12510 self.postMessage({
12511 action: 'caption',
12512 caption: caption
12513 });
12514 });
12515 transmuxer.on('trackinfo', function (trackInfo) {
12516 self.postMessage({
12517 action: 'trackinfo',
12518 trackInfo: trackInfo
12519 });
12520 });
12521 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
12522 // convert to video TS since we prioritize video time over audio
12523 self.postMessage({
12524 action: 'audioTimingInfo',
12525 audioTimingInfo: {
12526 start: clock.videoTsToSeconds(audioTimingInfo.start),
12527 end: clock.videoTsToSeconds(audioTimingInfo.end)
12528 }
12529 });
12530 });
12531 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
12532 self.postMessage({
12533 action: 'videoTimingInfo',
12534 videoTimingInfo: {
12535 start: clock.videoTsToSeconds(videoTimingInfo.start),
12536 end: clock.videoTsToSeconds(videoTimingInfo.end)
12537 }
12538 });
12539 });
12540 transmuxer.on('log', function (log) {
12541 self.postMessage({
12542 action: 'log',
12543 log: log
12544 });
12545 });
12546 };
12547 /**
12548 * All incoming messages route through this hash. If no function exists
12549 * to handle an incoming message, then we ignore the message.
12550 *
12551 * @class MessageHandlers
12552 * @param {Object} options the options to initialize with
12553 */
12554
12555
12556 var MessageHandlers = /*#__PURE__*/function () {
12557 function MessageHandlers(self, options) {
12558 this.options = options || {};
12559 this.self = self;
12560 this.init();
12561 }
12562 /**
12563 * initialize our web worker and wire all the events.
12564 */
12565
12566
12567 var _proto = MessageHandlers.prototype;
12568
12569 _proto.init = function init() {
12570 if (this.transmuxer) {
12571 this.transmuxer.dispose();
12572 }
12573
12574 this.transmuxer = new transmuxer.Transmuxer(this.options);
12575 wireTransmuxerEvents(this.self, this.transmuxer);
12576 };
12577
12578 _proto.pushMp4Captions = function pushMp4Captions(data) {
12579 if (!this.captionParser) {
12580 this.captionParser = new captionParser();
12581 this.captionParser.init();
12582 }
12583
12584 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12585 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
12586 this.self.postMessage({
12587 action: 'mp4Captions',
12588 captions: parsed && parsed.captions || [],
12589 logs: parsed && parsed.logs || [],
12590 data: segment.buffer
12591 }, [segment.buffer]);
12592 };
12593
12594 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
12595 var timescales = _ref.timescales,
12596 data = _ref.data;
12597 var startTime = probe$2.startTime(timescales, data);
12598 this.self.postMessage({
12599 action: 'probeMp4StartTime',
12600 startTime: startTime,
12601 data: data
12602 }, [data.buffer]);
12603 };
12604
12605 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
12606 var data = _ref2.data;
12607 var tracks = probe$2.tracks(data);
12608 this.self.postMessage({
12609 action: 'probeMp4Tracks',
12610 tracks: tracks,
12611 data: data
12612 }, [data.buffer]);
12613 }
12614 /**
12615 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
12616 * internal "media time," as well as whether it contains video and/or audio.
12617 *
12618 * @private
12619 * @param {Uint8Array} bytes - segment bytes
12620 * @param {number} baseStartTime
12621 * Relative reference timestamp used when adjusting frame timestamps for rollover.
12622 * This value should be in seconds, as it's converted to a 90khz clock within the
12623 * function body.
12624 * @return {Object} The start time of the current segment in "media time" as well as
12625 * whether it contains video and/or audio
12626 */
12627 ;
12628
12629 _proto.probeTs = function probeTs(_ref3) {
12630 var data = _ref3.data,
12631 baseStartTime = _ref3.baseStartTime;
12632 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
12633 var timeInfo = tsInspector.inspect(data, tsStartTime);
12634 var result = null;
12635
12636 if (timeInfo) {
12637 result = {
12638 // each type's time info comes back as an array of 2 times, start and end
12639 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
12640 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
12641 };
12642
12643 if (result.hasVideo) {
12644 result.videoStart = timeInfo.video[0].ptsTime;
12645 }
12646
12647 if (result.hasAudio) {
12648 result.audioStart = timeInfo.audio[0].ptsTime;
12649 }
12650 }
12651
12652 this.self.postMessage({
12653 action: 'probeTs',
12654 result: result,
12655 data: data
12656 }, [data.buffer]);
12657 };
12658
12659 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
12660 if (this.captionParser) {
12661 this.captionParser.clearAllCaptions();
12662 }
12663 };
12664
12665 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
12666 if (this.captionParser) {
12667 this.captionParser.clearParsedCaptions();
12668 }
12669 }
12670 /**
12671 * Adds data (a ts segment) to the start of the transmuxer pipeline for
12672 * processing.
12673 *
12674 * @param {ArrayBuffer} data data to push into the muxer
12675 */
12676 ;
12677
12678 _proto.push = function push(data) {
12679 // Cast array buffer to correct type for transmuxer
12680 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12681 this.transmuxer.push(segment);
12682 }
12683 /**
12684 * Recreate the transmuxer so that the next segment added via `push`
12685 * start with a fresh transmuxer.
12686 */
12687 ;
12688
12689 _proto.reset = function reset() {
12690 this.transmuxer.reset();
12691 }
12692 /**
12693 * Set the value that will be used as the `baseMediaDecodeTime` time for the
12694 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
12695 * set relative to the first based on the PTS values.
12696 *
12697 * @param {Object} data used to set the timestamp offset in the muxer
12698 */
12699 ;
12700
12701 _proto.setTimestampOffset = function setTimestampOffset(data) {
12702 var timestampOffset = data.timestampOffset || 0;
12703 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
12704 };
12705
12706 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
12707 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
12708 };
12709
12710 _proto.setRemux = function setRemux(data) {
12711 this.transmuxer.setRemux(data.remux);
12712 }
12713 /**
12714 * Forces the pipeline to finish processing the last segment and emit it's
12715 * results.
12716 *
12717 * @param {Object} data event data, not really used
12718 */
12719 ;
12720
12721 _proto.flush = function flush(data) {
12722 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
12723
12724 self.postMessage({
12725 action: 'done',
12726 type: 'transmuxed'
12727 });
12728 };
12729
12730 _proto.endTimeline = function endTimeline() {
12731 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
12732 // timelines
12733
12734 self.postMessage({
12735 action: 'endedtimeline',
12736 type: 'transmuxed'
12737 });
12738 };
12739
12740 _proto.alignGopsWith = function alignGopsWith(data) {
12741 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
12742 };
12743
12744 return MessageHandlers;
12745 }();
12746 /**
12747 * Our web worker interface so that things can talk to mux.js
12748 * that will be running in a web worker. the scope is passed to this by
12749 * webworkify.
12750 *
12751 * @param {Object} self the scope for the web worker
12752 */
12753
12754
12755 self.onmessage = function (event) {
12756 if (event.data.action === 'init' && event.data.options) {
12757 this.messageHandlers = new MessageHandlers(self, event.data.options);
12758 return;
12759 }
12760
12761 if (!this.messageHandlers) {
12762 this.messageHandlers = new MessageHandlers(self);
12763 }
12764
12765 if (event.data && event.data.action && event.data.action !== 'init') {
12766 if (this.messageHandlers[event.data.action]) {
12767 this.messageHandlers[event.data.action](event.data);
12768 }
12769 }
12770 };
12771}));
12772var TransmuxWorker = factory(workerCode$1);
12773/* rollup-plugin-worker-factory end for worker!/Users/bcasey/Projects/videojs-http-streaming/src/transmuxer-worker.js */
12774
12775var handleData_ = function handleData_(event, transmuxedData, callback) {
12776 var _event$data$segment = event.data.segment,
12777 type = _event$data$segment.type,
12778 initSegment = _event$data$segment.initSegment,
12779 captions = _event$data$segment.captions,
12780 captionStreams = _event$data$segment.captionStreams,
12781 metadata = _event$data$segment.metadata,
12782 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
12783 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
12784 transmuxedData.buffer.push({
12785 captions: captions,
12786 captionStreams: captionStreams,
12787 metadata: metadata
12788 });
12789 var boxes = event.data.segment.boxes || {
12790 data: event.data.segment.data
12791 };
12792 var result = {
12793 type: type,
12794 // cast ArrayBuffer to TypedArray
12795 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
12796 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
12797 };
12798
12799 if (typeof videoFrameDtsTime !== 'undefined') {
12800 result.videoFrameDtsTime = videoFrameDtsTime;
12801 }
12802
12803 if (typeof videoFramePtsTime !== 'undefined') {
12804 result.videoFramePtsTime = videoFramePtsTime;
12805 }
12806
12807 callback(result);
12808};
12809var handleDone_ = function handleDone_(_ref) {
12810 var transmuxedData = _ref.transmuxedData,
12811 callback = _ref.callback;
12812 // Previously we only returned data on data events,
12813 // not on done events. Clear out the buffer to keep that consistent.
12814 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
12815 // have received
12816
12817 callback(transmuxedData);
12818};
12819var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
12820 transmuxedData.gopInfo = event.data.gopInfo;
12821};
12822var processTransmux = function processTransmux(options) {
12823 var transmuxer = options.transmuxer,
12824 bytes = options.bytes,
12825 audioAppendStart = options.audioAppendStart,
12826 gopsToAlignWith = options.gopsToAlignWith,
12827 remux = options.remux,
12828 onData = options.onData,
12829 onTrackInfo = options.onTrackInfo,
12830 onAudioTimingInfo = options.onAudioTimingInfo,
12831 onVideoTimingInfo = options.onVideoTimingInfo,
12832 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
12833 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
12834 onId3 = options.onId3,
12835 onCaptions = options.onCaptions,
12836 onDone = options.onDone,
12837 onEndedTimeline = options.onEndedTimeline,
12838 onTransmuxerLog = options.onTransmuxerLog,
12839 isEndOfTimeline = options.isEndOfTimeline;
12840 var transmuxedData = {
12841 buffer: []
12842 };
12843 var waitForEndedTimelineEvent = isEndOfTimeline;
12844
12845 var handleMessage = function handleMessage(event) {
12846 if (transmuxer.currentTransmux !== options) {
12847 // disposed
12848 return;
12849 }
12850
12851 if (event.data.action === 'data') {
12852 handleData_(event, transmuxedData, onData);
12853 }
12854
12855 if (event.data.action === 'trackinfo') {
12856 onTrackInfo(event.data.trackInfo);
12857 }
12858
12859 if (event.data.action === 'gopInfo') {
12860 handleGopInfo_(event, transmuxedData);
12861 }
12862
12863 if (event.data.action === 'audioTimingInfo') {
12864 onAudioTimingInfo(event.data.audioTimingInfo);
12865 }
12866
12867 if (event.data.action === 'videoTimingInfo') {
12868 onVideoTimingInfo(event.data.videoTimingInfo);
12869 }
12870
12871 if (event.data.action === 'videoSegmentTimingInfo') {
12872 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
12873 }
12874
12875 if (event.data.action === 'audioSegmentTimingInfo') {
12876 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
12877 }
12878
12879 if (event.data.action === 'id3Frame') {
12880 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
12881 }
12882
12883 if (event.data.action === 'caption') {
12884 onCaptions(event.data.caption);
12885 }
12886
12887 if (event.data.action === 'endedtimeline') {
12888 waitForEndedTimelineEvent = false;
12889 onEndedTimeline();
12890 }
12891
12892 if (event.data.action === 'log') {
12893 onTransmuxerLog(event.data.log);
12894 } // wait for the transmuxed event since we may have audio and video
12895
12896
12897 if (event.data.type !== 'transmuxed') {
12898 return;
12899 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
12900 // of a timeline, that means there may still be data events before the segment
12901 // processing can be considerred complete. In that case, the final event should be
12902 // an "endedtimeline" event with the type "transmuxed."
12903
12904
12905 if (waitForEndedTimelineEvent) {
12906 return;
12907 }
12908
12909 transmuxer.onmessage = null;
12910 handleDone_({
12911 transmuxedData: transmuxedData,
12912 callback: onDone
12913 });
12914 /* eslint-disable no-use-before-define */
12915
12916 dequeue(transmuxer);
12917 /* eslint-enable */
12918 };
12919
12920 transmuxer.onmessage = handleMessage;
12921
12922 if (audioAppendStart) {
12923 transmuxer.postMessage({
12924 action: 'setAudioAppendStart',
12925 appendStart: audioAppendStart
12926 });
12927 } // allow empty arrays to be passed to clear out GOPs
12928
12929
12930 if (Array.isArray(gopsToAlignWith)) {
12931 transmuxer.postMessage({
12932 action: 'alignGopsWith',
12933 gopsToAlignWith: gopsToAlignWith
12934 });
12935 }
12936
12937 if (typeof remux !== 'undefined') {
12938 transmuxer.postMessage({
12939 action: 'setRemux',
12940 remux: remux
12941 });
12942 }
12943
12944 if (bytes.byteLength) {
12945 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
12946 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
12947 transmuxer.postMessage({
12948 action: 'push',
12949 // Send the typed-array of data as an ArrayBuffer so that
12950 // it can be sent as a "Transferable" and avoid the costly
12951 // memory copy
12952 data: buffer,
12953 // To recreate the original typed-array, we need information
12954 // about what portion of the ArrayBuffer it was a view into
12955 byteOffset: byteOffset,
12956 byteLength: bytes.byteLength
12957 }, [buffer]);
12958 }
12959
12960 if (isEndOfTimeline) {
12961 transmuxer.postMessage({
12962 action: 'endTimeline'
12963 });
12964 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
12965 // the end of the segment
12966
12967
12968 transmuxer.postMessage({
12969 action: 'flush'
12970 });
12971};
12972var dequeue = function dequeue(transmuxer) {
12973 transmuxer.currentTransmux = null;
12974
12975 if (transmuxer.transmuxQueue.length) {
12976 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
12977
12978 if (typeof transmuxer.currentTransmux === 'function') {
12979 transmuxer.currentTransmux();
12980 } else {
12981 processTransmux(transmuxer.currentTransmux);
12982 }
12983 }
12984};
12985var processAction = function processAction(transmuxer, action) {
12986 transmuxer.postMessage({
12987 action: action
12988 });
12989 dequeue(transmuxer);
12990};
12991var enqueueAction = function enqueueAction(action, transmuxer) {
12992 if (!transmuxer.currentTransmux) {
12993 transmuxer.currentTransmux = action;
12994 processAction(transmuxer, action);
12995 return;
12996 }
12997
12998 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
12999};
13000var reset = function reset(transmuxer) {
13001 enqueueAction('reset', transmuxer);
13002};
13003var endTimeline = function endTimeline(transmuxer) {
13004 enqueueAction('endTimeline', transmuxer);
13005};
13006var transmux = function transmux(options) {
13007 if (!options.transmuxer.currentTransmux) {
13008 options.transmuxer.currentTransmux = options;
13009 processTransmux(options);
13010 return;
13011 }
13012
13013 options.transmuxer.transmuxQueue.push(options);
13014};
13015var createTransmuxer = function createTransmuxer(options) {
13016 var transmuxer = new TransmuxWorker();
13017 transmuxer.currentTransmux = null;
13018 transmuxer.transmuxQueue = [];
13019 var term = transmuxer.terminate;
13020
13021 transmuxer.terminate = function () {
13022 transmuxer.currentTransmux = null;
13023 transmuxer.transmuxQueue.length = 0;
13024 return term.call(transmuxer);
13025 };
13026
13027 transmuxer.postMessage({
13028 action: 'init',
13029 options: options
13030 });
13031 return transmuxer;
13032};
13033var segmentTransmuxer = {
13034 reset: reset,
13035 endTimeline: endTimeline,
13036 transmux: transmux,
13037 createTransmuxer: createTransmuxer
13038};
13039
13040var workerCallback = function workerCallback(options) {
13041 var transmuxer = options.transmuxer;
13042 var endAction = options.endAction || options.action;
13043 var callback = options.callback;
13044
13045 var message = _extends({}, options, {
13046 endAction: null,
13047 transmuxer: null,
13048 callback: null
13049 });
13050
13051 var listenForEndEvent = function listenForEndEvent(event) {
13052 if (event.data.action !== endAction) {
13053 return;
13054 }
13055
13056 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
13057
13058 if (event.data.data) {
13059 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
13060
13061 if (options.data) {
13062 options.data = event.data.data;
13063 }
13064 }
13065
13066 callback(event.data);
13067 };
13068
13069 transmuxer.addEventListener('message', listenForEndEvent);
13070
13071 if (options.data) {
13072 var isArrayBuffer = options.data instanceof ArrayBuffer;
13073 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
13074 message.byteLength = options.data.byteLength;
13075 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
13076 transmuxer.postMessage(message, transfers);
13077 } else {
13078 transmuxer.postMessage(message);
13079 }
13080};
13081
13082var REQUEST_ERRORS = {
13083 FAILURE: 2,
13084 TIMEOUT: -101,
13085 ABORTED: -102
13086};
13087/**
13088 * Abort all requests
13089 *
13090 * @param {Object} activeXhrs - an object that tracks all XHR requests
13091 */
13092
13093var abortAll = function abortAll(activeXhrs) {
13094 activeXhrs.forEach(function (xhr) {
13095 xhr.abort();
13096 });
13097};
13098/**
13099 * Gather important bandwidth stats once a request has completed
13100 *
13101 * @param {Object} request - the XHR request from which to gather stats
13102 */
13103
13104
13105var getRequestStats = function getRequestStats(request) {
13106 return {
13107 bandwidth: request.bandwidth,
13108 bytesReceived: request.bytesReceived || 0,
13109 roundTripTime: request.roundTripTime || 0
13110 };
13111};
13112/**
13113 * If possible gather bandwidth stats as a request is in
13114 * progress
13115 *
13116 * @param {Event} progressEvent - an event object from an XHR's progress event
13117 */
13118
13119
13120var getProgressStats = function getProgressStats(progressEvent) {
13121 var request = progressEvent.target;
13122 var roundTripTime = Date.now() - request.requestTime;
13123 var stats = {
13124 bandwidth: Infinity,
13125 bytesReceived: 0,
13126 roundTripTime: roundTripTime || 0
13127 };
13128 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
13129 // because we should only use bandwidth stats on progress to determine when
13130 // abort a request early due to insufficient bandwidth
13131
13132 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
13133 return stats;
13134};
13135/**
13136 * Handle all error conditions in one place and return an object
13137 * with all the information
13138 *
13139 * @param {Error|null} error - if non-null signals an error occured with the XHR
13140 * @param {Object} request - the XHR request that possibly generated the error
13141 */
13142
13143
13144var handleErrors = function handleErrors(error, request) {
13145 if (request.timedout) {
13146 return {
13147 status: request.status,
13148 message: 'HLS request timed-out at URL: ' + request.uri,
13149 code: REQUEST_ERRORS.TIMEOUT,
13150 xhr: request
13151 };
13152 }
13153
13154 if (request.aborted) {
13155 return {
13156 status: request.status,
13157 message: 'HLS request aborted at URL: ' + request.uri,
13158 code: REQUEST_ERRORS.ABORTED,
13159 xhr: request
13160 };
13161 }
13162
13163 if (error) {
13164 return {
13165 status: request.status,
13166 message: 'HLS request errored at URL: ' + request.uri,
13167 code: REQUEST_ERRORS.FAILURE,
13168 xhr: request
13169 };
13170 }
13171
13172 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
13173 return {
13174 status: request.status,
13175 message: 'Empty HLS response at URL: ' + request.uri,
13176 code: REQUEST_ERRORS.FAILURE,
13177 xhr: request
13178 };
13179 }
13180
13181 return null;
13182};
13183/**
13184 * Handle responses for key data and convert the key data to the correct format
13185 * for the decryption step later
13186 *
13187 * @param {Object} segment - a simplified copy of the segmentInfo object
13188 * from SegmentLoader
13189 * @param {Array} objects - objects to add the key bytes to.
13190 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13191 * this request
13192 */
13193
13194
13195var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
13196 return function (error, request) {
13197 var response = request.response;
13198 var errorObj = handleErrors(error, request);
13199
13200 if (errorObj) {
13201 return finishProcessingFn(errorObj, segment);
13202 }
13203
13204 if (response.byteLength !== 16) {
13205 return finishProcessingFn({
13206 status: request.status,
13207 message: 'Invalid HLS key at URL: ' + request.uri,
13208 code: REQUEST_ERRORS.FAILURE,
13209 xhr: request
13210 }, segment);
13211 }
13212
13213 var view = new DataView(response);
13214 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
13215
13216 for (var i = 0; i < objects.length; i++) {
13217 objects[i].bytes = bytes;
13218 }
13219
13220 return finishProcessingFn(null, segment);
13221 };
13222};
13223
13224var parseInitSegment = function parseInitSegment(segment, _callback) {
13225 var type = detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
13226 // only know how to parse mp4 init segments at the moment
13227
13228 if (type !== 'mp4') {
13229 var uri = segment.map.resolvedUri || segment.map.uri;
13230 return _callback({
13231 internal: true,
13232 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
13233 code: REQUEST_ERRORS.FAILURE
13234 });
13235 }
13236
13237 workerCallback({
13238 action: 'probeMp4Tracks',
13239 data: segment.map.bytes,
13240 transmuxer: segment.transmuxer,
13241 callback: function callback(_ref) {
13242 var tracks = _ref.tracks,
13243 data = _ref.data;
13244 // transfer bytes back to us
13245 segment.map.bytes = data;
13246 tracks.forEach(function (track) {
13247 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
13248
13249 if (segment.map.tracks[track.type]) {
13250 return;
13251 }
13252
13253 segment.map.tracks[track.type] = track;
13254
13255 if (typeof track.id === 'number' && track.timescale) {
13256 segment.map.timescales = segment.map.timescales || {};
13257 segment.map.timescales[track.id] = track.timescale;
13258 }
13259 });
13260 return _callback(null);
13261 }
13262 });
13263};
13264/**
13265 * Handle init-segment responses
13266 *
13267 * @param {Object} segment - a simplified copy of the segmentInfo object
13268 * from SegmentLoader
13269 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13270 * this request
13271 */
13272
13273
13274var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
13275 var segment = _ref2.segment,
13276 finishProcessingFn = _ref2.finishProcessingFn;
13277 return function (error, request) {
13278 var errorObj = handleErrors(error, request);
13279
13280 if (errorObj) {
13281 return finishProcessingFn(errorObj, segment);
13282 }
13283
13284 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
13285 // until the key request is done to decrypt.
13286
13287 if (segment.map.key) {
13288 segment.map.encryptedBytes = bytes;
13289 return finishProcessingFn(null, segment);
13290 }
13291
13292 segment.map.bytes = bytes;
13293 parseInitSegment(segment, function (parseError) {
13294 if (parseError) {
13295 parseError.xhr = request;
13296 parseError.status = request.status;
13297 return finishProcessingFn(parseError, segment);
13298 }
13299
13300 finishProcessingFn(null, segment);
13301 });
13302 };
13303};
13304/**
13305 * Response handler for segment-requests being sure to set the correct
13306 * property depending on whether the segment is encryped or not
13307 * Also records and keeps track of stats that are used for ABR purposes
13308 *
13309 * @param {Object} segment - a simplified copy of the segmentInfo object
13310 * from SegmentLoader
13311 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13312 * this request
13313 */
13314
13315
13316var handleSegmentResponse = function handleSegmentResponse(_ref3) {
13317 var segment = _ref3.segment,
13318 finishProcessingFn = _ref3.finishProcessingFn,
13319 responseType = _ref3.responseType;
13320 return function (error, request) {
13321 var errorObj = handleErrors(error, request);
13322
13323 if (errorObj) {
13324 return finishProcessingFn(errorObj, segment);
13325 }
13326
13327 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
13328 // thrown for two primary cases:
13329 // 1. the mime type override stops working, or is not implemented for a specific
13330 // browser
13331 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
13332 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
13333 segment.stats = getRequestStats(request);
13334
13335 if (segment.key) {
13336 segment.encryptedBytes = new Uint8Array(newBytes);
13337 } else {
13338 segment.bytes = new Uint8Array(newBytes);
13339 }
13340
13341 return finishProcessingFn(null, segment);
13342 };
13343};
13344
13345var transmuxAndNotify = function transmuxAndNotify(_ref4) {
13346 var segment = _ref4.segment,
13347 bytes = _ref4.bytes,
13348 trackInfoFn = _ref4.trackInfoFn,
13349 timingInfoFn = _ref4.timingInfoFn,
13350 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
13351 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
13352 id3Fn = _ref4.id3Fn,
13353 captionsFn = _ref4.captionsFn,
13354 isEndOfTimeline = _ref4.isEndOfTimeline,
13355 endedTimelineFn = _ref4.endedTimelineFn,
13356 dataFn = _ref4.dataFn,
13357 doneFn = _ref4.doneFn,
13358 onTransmuxerLog = _ref4.onTransmuxerLog;
13359 var fmp4Tracks = segment.map && segment.map.tracks || {};
13360 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
13361 // One reason for this is that in the case of full segments, we want to trust start
13362 // times from the probe, rather than the transmuxer.
13363
13364 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
13365 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
13366 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
13367 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
13368
13369 var finish = function finish() {
13370 return transmux({
13371 bytes: bytes,
13372 transmuxer: segment.transmuxer,
13373 audioAppendStart: segment.audioAppendStart,
13374 gopsToAlignWith: segment.gopsToAlignWith,
13375 remux: isMuxed,
13376 onData: function onData(result) {
13377 result.type = result.type === 'combined' ? 'video' : result.type;
13378 dataFn(segment, result);
13379 },
13380 onTrackInfo: function onTrackInfo(trackInfo) {
13381 if (trackInfoFn) {
13382 if (isMuxed) {
13383 trackInfo.isMuxed = true;
13384 }
13385
13386 trackInfoFn(segment, trackInfo);
13387 }
13388 },
13389 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
13390 // we only want the first start value we encounter
13391 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
13392 audioStartFn(audioTimingInfo.start);
13393 audioStartFn = null;
13394 } // we want to continually update the end time
13395
13396
13397 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
13398 audioEndFn(audioTimingInfo.end);
13399 }
13400 },
13401 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
13402 // we only want the first start value we encounter
13403 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
13404 videoStartFn(videoTimingInfo.start);
13405 videoStartFn = null;
13406 } // we want to continually update the end time
13407
13408
13409 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
13410 videoEndFn(videoTimingInfo.end);
13411 }
13412 },
13413 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
13414 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
13415 },
13416 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
13417 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
13418 },
13419 onId3: function onId3(id3Frames, dispatchType) {
13420 id3Fn(segment, id3Frames, dispatchType);
13421 },
13422 onCaptions: function onCaptions(captions) {
13423 captionsFn(segment, [captions]);
13424 },
13425 isEndOfTimeline: isEndOfTimeline,
13426 onEndedTimeline: function onEndedTimeline() {
13427 endedTimelineFn();
13428 },
13429 onTransmuxerLog: onTransmuxerLog,
13430 onDone: function onDone(result) {
13431 if (!doneFn) {
13432 return;
13433 }
13434
13435 result.type = result.type === 'combined' ? 'video' : result.type;
13436 doneFn(null, segment, result);
13437 }
13438 });
13439 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
13440 // Meaning cached frame data may corrupt our notion of where this segment
13441 // really starts. To get around this, probe for the info needed.
13442
13443
13444 workerCallback({
13445 action: 'probeTs',
13446 transmuxer: segment.transmuxer,
13447 data: bytes,
13448 baseStartTime: segment.baseStartTime,
13449 callback: function callback(data) {
13450 segment.bytes = bytes = data.data;
13451 var probeResult = data.result;
13452
13453 if (probeResult) {
13454 trackInfoFn(segment, {
13455 hasAudio: probeResult.hasAudio,
13456 hasVideo: probeResult.hasVideo,
13457 isMuxed: isMuxed
13458 });
13459 trackInfoFn = null;
13460
13461 if (probeResult.hasAudio && !isMuxed) {
13462 audioStartFn(probeResult.audioStart);
13463 }
13464
13465 if (probeResult.hasVideo) {
13466 videoStartFn(probeResult.videoStart);
13467 }
13468
13469 audioStartFn = null;
13470 videoStartFn = null;
13471 }
13472
13473 finish();
13474 }
13475 });
13476};
13477
13478var handleSegmentBytes = function handleSegmentBytes(_ref5) {
13479 var segment = _ref5.segment,
13480 bytes = _ref5.bytes,
13481 trackInfoFn = _ref5.trackInfoFn,
13482 timingInfoFn = _ref5.timingInfoFn,
13483 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
13484 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
13485 id3Fn = _ref5.id3Fn,
13486 captionsFn = _ref5.captionsFn,
13487 isEndOfTimeline = _ref5.isEndOfTimeline,
13488 endedTimelineFn = _ref5.endedTimelineFn,
13489 dataFn = _ref5.dataFn,
13490 doneFn = _ref5.doneFn,
13491 onTransmuxerLog = _ref5.onTransmuxerLog;
13492 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
13493 // We should have a handler that fetches the number of bytes required
13494 // to check if something is fmp4. This will allow us to save bandwidth
13495 // because we can only blacklist a playlist and abort requests
13496 // by codec after trackinfo triggers.
13497
13498 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
13499 segment.isFmp4 = true;
13500 var tracks = segment.map.tracks;
13501 var trackInfo = {
13502 isFmp4: true,
13503 hasVideo: !!tracks.video,
13504 hasAudio: !!tracks.audio
13505 }; // if we have a audio track, with a codec that is not set to
13506 // encrypted audio
13507
13508 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
13509 trackInfo.audioCodec = tracks.audio.codec;
13510 } // if we have a video track, with a codec that is not set to
13511 // encrypted video
13512
13513
13514 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
13515 trackInfo.videoCodec = tracks.video.codec;
13516 }
13517
13518 if (tracks.video && tracks.audio) {
13519 trackInfo.isMuxed = true;
13520 } // since we don't support appending fmp4 data on progress, we know we have the full
13521 // segment here
13522
13523
13524 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
13525 // time. The end time can be roughly calculated by the receiver using the duration.
13526 //
13527 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
13528 // that is the true start of the segment (where the playback engine should begin
13529 // decoding).
13530
13531 var finishLoading = function finishLoading(captions) {
13532 // if the track still has audio at this point it is only possible
13533 // for it to be audio only. See `tracks.video && tracks.audio` if statement
13534 // above.
13535 // we make sure to use segment.bytes here as that
13536 dataFn(segment, {
13537 data: bytesAsUint8Array,
13538 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
13539 });
13540
13541 if (captions && captions.length) {
13542 captionsFn(segment, captions);
13543 }
13544
13545 doneFn(null, segment, {});
13546 };
13547
13548 workerCallback({
13549 action: 'probeMp4StartTime',
13550 timescales: segment.map.timescales,
13551 data: bytesAsUint8Array,
13552 transmuxer: segment.transmuxer,
13553 callback: function callback(_ref6) {
13554 var data = _ref6.data,
13555 startTime = _ref6.startTime;
13556 // transfer bytes back to us
13557 bytes = data.buffer;
13558 segment.bytes = bytesAsUint8Array = data;
13559
13560 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
13561 timingInfoFn(segment, 'audio', 'start', startTime);
13562 }
13563
13564 if (trackInfo.hasVideo) {
13565 timingInfoFn(segment, 'video', 'start', startTime);
13566 } // Run through the CaptionParser in case there are captions.
13567 // Initialize CaptionParser if it hasn't been yet
13568
13569
13570 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
13571 finishLoading();
13572 return;
13573 }
13574
13575 workerCallback({
13576 action: 'pushMp4Captions',
13577 endAction: 'mp4Captions',
13578 transmuxer: segment.transmuxer,
13579 data: bytesAsUint8Array,
13580 timescales: segment.map.timescales,
13581 trackIds: [tracks.video.id],
13582 callback: function callback(message) {
13583 // transfer bytes back to us
13584 bytes = message.data.buffer;
13585 segment.bytes = bytesAsUint8Array = message.data;
13586 message.logs.forEach(function (log) {
13587 onTransmuxerLog(videojs.mergeOptions(log, {
13588 stream: 'mp4CaptionParser'
13589 }));
13590 });
13591 finishLoading(message.captions);
13592 }
13593 });
13594 }
13595 });
13596 return;
13597 } // VTT or other segments that don't need processing
13598
13599
13600 if (!segment.transmuxer) {
13601 doneFn(null, segment, {});
13602 return;
13603 }
13604
13605 if (typeof segment.container === 'undefined') {
13606 segment.container = detectContainerForBytes(bytesAsUint8Array);
13607 }
13608
13609 if (segment.container !== 'ts' && segment.container !== 'aac') {
13610 trackInfoFn(segment, {
13611 hasAudio: false,
13612 hasVideo: false
13613 });
13614 doneFn(null, segment, {});
13615 return;
13616 } // ts or aac
13617
13618
13619 transmuxAndNotify({
13620 segment: segment,
13621 bytes: bytes,
13622 trackInfoFn: trackInfoFn,
13623 timingInfoFn: timingInfoFn,
13624 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13625 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13626 id3Fn: id3Fn,
13627 captionsFn: captionsFn,
13628 isEndOfTimeline: isEndOfTimeline,
13629 endedTimelineFn: endedTimelineFn,
13630 dataFn: dataFn,
13631 doneFn: doneFn,
13632 onTransmuxerLog: onTransmuxerLog
13633 });
13634};
13635
13636var decrypt = function decrypt(_ref7, callback) {
13637 var id = _ref7.id,
13638 key = _ref7.key,
13639 encryptedBytes = _ref7.encryptedBytes,
13640 decryptionWorker = _ref7.decryptionWorker;
13641
13642 var decryptionHandler = function decryptionHandler(event) {
13643 if (event.data.source === id) {
13644 decryptionWorker.removeEventListener('message', decryptionHandler);
13645 var decrypted = event.data.decrypted;
13646 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
13647 }
13648 };
13649
13650 decryptionWorker.addEventListener('message', decryptionHandler);
13651 var keyBytes;
13652
13653 if (key.bytes.slice) {
13654 keyBytes = key.bytes.slice();
13655 } else {
13656 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
13657 } // incrementally decrypt the bytes
13658
13659
13660 decryptionWorker.postMessage(createTransferableMessage({
13661 source: id,
13662 encrypted: encryptedBytes,
13663 key: keyBytes,
13664 iv: key.iv
13665 }), [encryptedBytes.buffer, keyBytes.buffer]);
13666};
13667/**
13668 * Decrypt the segment via the decryption web worker
13669 *
13670 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13671 * routines
13672 * @param {Object} segment - a simplified copy of the segmentInfo object
13673 * from SegmentLoader
13674 * @param {Function} trackInfoFn - a callback that receives track info
13675 * @param {Function} timingInfoFn - a callback that receives timing info
13676 * @param {Function} videoSegmentTimingInfoFn
13677 * a callback that receives video timing info based on media times and
13678 * any adjustments made by the transmuxer
13679 * @param {Function} audioSegmentTimingInfoFn
13680 * a callback that receives audio timing info based on media times and
13681 * any adjustments made by the transmuxer
13682 * @param {boolean} isEndOfTimeline
13683 * true if this segment represents the last segment in a timeline
13684 * @param {Function} endedTimelineFn
13685 * a callback made when a timeline is ended, will only be called if
13686 * isEndOfTimeline is true
13687 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13688 * and ready to use
13689 * @param {Function} doneFn - a callback that is executed after decryption has completed
13690 */
13691
13692
13693var decryptSegment = function decryptSegment(_ref8) {
13694 var decryptionWorker = _ref8.decryptionWorker,
13695 segment = _ref8.segment,
13696 trackInfoFn = _ref8.trackInfoFn,
13697 timingInfoFn = _ref8.timingInfoFn,
13698 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
13699 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
13700 id3Fn = _ref8.id3Fn,
13701 captionsFn = _ref8.captionsFn,
13702 isEndOfTimeline = _ref8.isEndOfTimeline,
13703 endedTimelineFn = _ref8.endedTimelineFn,
13704 dataFn = _ref8.dataFn,
13705 doneFn = _ref8.doneFn,
13706 onTransmuxerLog = _ref8.onTransmuxerLog;
13707 decrypt({
13708 id: segment.requestId,
13709 key: segment.key,
13710 encryptedBytes: segment.encryptedBytes,
13711 decryptionWorker: decryptionWorker
13712 }, function (decryptedBytes) {
13713 segment.bytes = decryptedBytes;
13714 handleSegmentBytes({
13715 segment: segment,
13716 bytes: segment.bytes,
13717 trackInfoFn: trackInfoFn,
13718 timingInfoFn: timingInfoFn,
13719 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13720 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13721 id3Fn: id3Fn,
13722 captionsFn: captionsFn,
13723 isEndOfTimeline: isEndOfTimeline,
13724 endedTimelineFn: endedTimelineFn,
13725 dataFn: dataFn,
13726 doneFn: doneFn,
13727 onTransmuxerLog: onTransmuxerLog
13728 });
13729 });
13730};
13731/**
13732 * This function waits for all XHRs to finish (with either success or failure)
13733 * before continueing processing via it's callback. The function gathers errors
13734 * from each request into a single errors array so that the error status for
13735 * each request can be examined later.
13736 *
13737 * @param {Object} activeXhrs - an object that tracks all XHR requests
13738 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13739 * routines
13740 * @param {Function} trackInfoFn - a callback that receives track info
13741 * @param {Function} timingInfoFn - a callback that receives timing info
13742 * @param {Function} videoSegmentTimingInfoFn
13743 * a callback that receives video timing info based on media times and
13744 * any adjustments made by the transmuxer
13745 * @param {Function} audioSegmentTimingInfoFn
13746 * a callback that receives audio timing info based on media times and
13747 * any adjustments made by the transmuxer
13748 * @param {Function} id3Fn - a callback that receives ID3 metadata
13749 * @param {Function} captionsFn - a callback that receives captions
13750 * @param {boolean} isEndOfTimeline
13751 * true if this segment represents the last segment in a timeline
13752 * @param {Function} endedTimelineFn
13753 * a callback made when a timeline is ended, will only be called if
13754 * isEndOfTimeline is true
13755 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13756 * and ready to use
13757 * @param {Function} doneFn - a callback that is executed after all resources have been
13758 * downloaded and any decryption completed
13759 */
13760
13761
13762var waitForCompletion = function waitForCompletion(_ref9) {
13763 var activeXhrs = _ref9.activeXhrs,
13764 decryptionWorker = _ref9.decryptionWorker,
13765 trackInfoFn = _ref9.trackInfoFn,
13766 timingInfoFn = _ref9.timingInfoFn,
13767 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
13768 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
13769 id3Fn = _ref9.id3Fn,
13770 captionsFn = _ref9.captionsFn,
13771 isEndOfTimeline = _ref9.isEndOfTimeline,
13772 endedTimelineFn = _ref9.endedTimelineFn,
13773 dataFn = _ref9.dataFn,
13774 doneFn = _ref9.doneFn,
13775 onTransmuxerLog = _ref9.onTransmuxerLog;
13776 var count = 0;
13777 var didError = false;
13778 return function (error, segment) {
13779 if (didError) {
13780 return;
13781 }
13782
13783 if (error) {
13784 didError = true; // If there are errors, we have to abort any outstanding requests
13785
13786 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
13787 // handle the aborted events from those requests, there are some cases where we may
13788 // never get an aborted event. For instance, if the network connection is lost and
13789 // there were two requests, the first may have triggered an error immediately, while
13790 // the second request remains unsent. In that case, the aborted algorithm will not
13791 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
13792 //
13793 // We also can't rely on the ready state of the XHR, since the request that
13794 // triggered the connection error may also show as a ready state of 0 (unsent).
13795 // Therefore, we have to finish this group of requests immediately after the first
13796 // seen error.
13797
13798 return doneFn(error, segment);
13799 }
13800
13801 count += 1;
13802
13803 if (count === activeXhrs.length) {
13804 var segmentFinish = function segmentFinish() {
13805 if (segment.encryptedBytes) {
13806 return decryptSegment({
13807 decryptionWorker: decryptionWorker,
13808 segment: segment,
13809 trackInfoFn: trackInfoFn,
13810 timingInfoFn: timingInfoFn,
13811 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13812 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13813 id3Fn: id3Fn,
13814 captionsFn: captionsFn,
13815 isEndOfTimeline: isEndOfTimeline,
13816 endedTimelineFn: endedTimelineFn,
13817 dataFn: dataFn,
13818 doneFn: doneFn,
13819 onTransmuxerLog: onTransmuxerLog
13820 });
13821 } // Otherwise, everything is ready just continue
13822
13823
13824 handleSegmentBytes({
13825 segment: segment,
13826 bytes: segment.bytes,
13827 trackInfoFn: trackInfoFn,
13828 timingInfoFn: timingInfoFn,
13829 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13830 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13831 id3Fn: id3Fn,
13832 captionsFn: captionsFn,
13833 isEndOfTimeline: isEndOfTimeline,
13834 endedTimelineFn: endedTimelineFn,
13835 dataFn: dataFn,
13836 doneFn: doneFn,
13837 onTransmuxerLog: onTransmuxerLog
13838 });
13839 }; // Keep track of when *all* of the requests have completed
13840
13841
13842 segment.endOfAllRequests = Date.now();
13843
13844 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
13845 return decrypt({
13846 decryptionWorker: decryptionWorker,
13847 // add -init to the "id" to differentiate between segment
13848 // and init segment decryption, just in case they happen
13849 // at the same time at some point in the future.
13850 id: segment.requestId + '-init',
13851 encryptedBytes: segment.map.encryptedBytes,
13852 key: segment.map.key
13853 }, function (decryptedBytes) {
13854 segment.map.bytes = decryptedBytes;
13855 parseInitSegment(segment, function (parseError) {
13856 if (parseError) {
13857 abortAll(activeXhrs);
13858 return doneFn(parseError, segment);
13859 }
13860
13861 segmentFinish();
13862 });
13863 });
13864 }
13865
13866 segmentFinish();
13867 }
13868 };
13869};
13870/**
13871 * Calls the abort callback if any request within the batch was aborted. Will only call
13872 * the callback once per batch of requests, even if multiple were aborted.
13873 *
13874 * @param {Object} loadendState - state to check to see if the abort function was called
13875 * @param {Function} abortFn - callback to call for abort
13876 */
13877
13878
13879var handleLoadEnd = function handleLoadEnd(_ref10) {
13880 var loadendState = _ref10.loadendState,
13881 abortFn = _ref10.abortFn;
13882 return function (event) {
13883 var request = event.target;
13884
13885 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
13886 abortFn();
13887 loadendState.calledAbortFn = true;
13888 }
13889 };
13890};
13891/**
13892 * Simple progress event callback handler that gathers some stats before
13893 * executing a provided callback with the `segment` object
13894 *
13895 * @param {Object} segment - a simplified copy of the segmentInfo object
13896 * from SegmentLoader
13897 * @param {Function} progressFn - a callback that is executed each time a progress event
13898 * is received
13899 * @param {Function} trackInfoFn - a callback that receives track info
13900 * @param {Function} timingInfoFn - a callback that receives timing info
13901 * @param {Function} videoSegmentTimingInfoFn
13902 * a callback that receives video timing info based on media times and
13903 * any adjustments made by the transmuxer
13904 * @param {Function} audioSegmentTimingInfoFn
13905 * a callback that receives audio timing info based on media times and
13906 * any adjustments made by the transmuxer
13907 * @param {boolean} isEndOfTimeline
13908 * true if this segment represents the last segment in a timeline
13909 * @param {Function} endedTimelineFn
13910 * a callback made when a timeline is ended, will only be called if
13911 * isEndOfTimeline is true
13912 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13913 * and ready to use
13914 * @param {Event} event - the progress event object from XMLHttpRequest
13915 */
13916
13917
13918var handleProgress = function handleProgress(_ref11) {
13919 var segment = _ref11.segment,
13920 progressFn = _ref11.progressFn;
13921 _ref11.trackInfoFn;
13922 _ref11.timingInfoFn;
13923 _ref11.videoSegmentTimingInfoFn;
13924 _ref11.audioSegmentTimingInfoFn;
13925 _ref11.id3Fn;
13926 _ref11.captionsFn;
13927 _ref11.isEndOfTimeline;
13928 _ref11.endedTimelineFn;
13929 _ref11.dataFn;
13930 return function (event) {
13931 var request = event.target;
13932
13933 if (request.aborted) {
13934 return;
13935 }
13936
13937 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
13938
13939 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
13940 segment.stats.firstBytesReceivedAt = Date.now();
13941 }
13942
13943 return progressFn(event, segment);
13944 };
13945};
13946/**
13947 * Load all resources and does any processing necessary for a media-segment
13948 *
13949 * Features:
13950 * decrypts the media-segment if it has a key uri and an iv
13951 * aborts *all* requests if *any* one request fails
13952 *
13953 * The segment object, at minimum, has the following format:
13954 * {
13955 * resolvedUri: String,
13956 * [transmuxer]: Object,
13957 * [byterange]: {
13958 * offset: Number,
13959 * length: Number
13960 * },
13961 * [key]: {
13962 * resolvedUri: String
13963 * [byterange]: {
13964 * offset: Number,
13965 * length: Number
13966 * },
13967 * iv: {
13968 * bytes: Uint32Array
13969 * }
13970 * },
13971 * [map]: {
13972 * resolvedUri: String,
13973 * [byterange]: {
13974 * offset: Number,
13975 * length: Number
13976 * },
13977 * [bytes]: Uint8Array
13978 * }
13979 * }
13980 * ...where [name] denotes optional properties
13981 *
13982 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
13983 * @param {Object} xhrOptions - the base options to provide to all xhr requests
13984 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
13985 * decryption routines
13986 * @param {Object} segment - a simplified copy of the segmentInfo object
13987 * from SegmentLoader
13988 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
13989 * aborted
13990 * @param {Function} progressFn - a callback that receives progress events from the main
13991 * segment's xhr request
13992 * @param {Function} trackInfoFn - a callback that receives track info
13993 * @param {Function} timingInfoFn - a callback that receives timing info
13994 * @param {Function} videoSegmentTimingInfoFn
13995 * a callback that receives video timing info based on media times and
13996 * any adjustments made by the transmuxer
13997 * @param {Function} audioSegmentTimingInfoFn
13998 * a callback that receives audio timing info based on media times and
13999 * any adjustments made by the transmuxer
14000 * @param {Function} id3Fn - a callback that receives ID3 metadata
14001 * @param {Function} captionsFn - a callback that receives captions
14002 * @param {boolean} isEndOfTimeline
14003 * true if this segment represents the last segment in a timeline
14004 * @param {Function} endedTimelineFn
14005 * a callback made when a timeline is ended, will only be called if
14006 * isEndOfTimeline is true
14007 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
14008 * request, transmuxed if needed
14009 * @param {Function} doneFn - a callback that is executed only once all requests have
14010 * succeeded or failed
14011 * @return {Function} a function that, when invoked, immediately aborts all
14012 * outstanding requests
14013 */
14014
14015
14016var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
14017 var xhr = _ref12.xhr,
14018 xhrOptions = _ref12.xhrOptions,
14019 decryptionWorker = _ref12.decryptionWorker,
14020 segment = _ref12.segment,
14021 abortFn = _ref12.abortFn,
14022 progressFn = _ref12.progressFn,
14023 trackInfoFn = _ref12.trackInfoFn,
14024 timingInfoFn = _ref12.timingInfoFn,
14025 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
14026 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
14027 id3Fn = _ref12.id3Fn,
14028 captionsFn = _ref12.captionsFn,
14029 isEndOfTimeline = _ref12.isEndOfTimeline,
14030 endedTimelineFn = _ref12.endedTimelineFn,
14031 dataFn = _ref12.dataFn,
14032 doneFn = _ref12.doneFn,
14033 onTransmuxerLog = _ref12.onTransmuxerLog;
14034 var activeXhrs = [];
14035 var finishProcessingFn = waitForCompletion({
14036 activeXhrs: activeXhrs,
14037 decryptionWorker: decryptionWorker,
14038 trackInfoFn: trackInfoFn,
14039 timingInfoFn: timingInfoFn,
14040 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14041 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14042 id3Fn: id3Fn,
14043 captionsFn: captionsFn,
14044 isEndOfTimeline: isEndOfTimeline,
14045 endedTimelineFn: endedTimelineFn,
14046 dataFn: dataFn,
14047 doneFn: doneFn,
14048 onTransmuxerLog: onTransmuxerLog
14049 }); // optionally, request the decryption key
14050
14051 if (segment.key && !segment.key.bytes) {
14052 var objects = [segment.key];
14053
14054 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
14055 objects.push(segment.map.key);
14056 }
14057
14058 var keyRequestOptions = videojs.mergeOptions(xhrOptions, {
14059 uri: segment.key.resolvedUri,
14060 responseType: 'arraybuffer'
14061 });
14062 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
14063 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
14064 activeXhrs.push(keyXhr);
14065 } // optionally, request the associated media init segment
14066
14067
14068 if (segment.map && !segment.map.bytes) {
14069 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
14070
14071 if (differentMapKey) {
14072 var mapKeyRequestOptions = videojs.mergeOptions(xhrOptions, {
14073 uri: segment.map.key.resolvedUri,
14074 responseType: 'arraybuffer'
14075 });
14076 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
14077 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
14078 activeXhrs.push(mapKeyXhr);
14079 }
14080
14081 var initSegmentOptions = videojs.mergeOptions(xhrOptions, {
14082 uri: segment.map.resolvedUri,
14083 responseType: 'arraybuffer',
14084 headers: segmentXhrHeaders(segment.map)
14085 });
14086 var initSegmentRequestCallback = handleInitSegmentResponse({
14087 segment: segment,
14088 finishProcessingFn: finishProcessingFn
14089 });
14090 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
14091 activeXhrs.push(initSegmentXhr);
14092 }
14093
14094 var segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
14095 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
14096 responseType: 'arraybuffer',
14097 headers: segmentXhrHeaders(segment)
14098 });
14099 var segmentRequestCallback = handleSegmentResponse({
14100 segment: segment,
14101 finishProcessingFn: finishProcessingFn,
14102 responseType: segmentRequestOptions.responseType
14103 });
14104 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
14105 segmentXhr.addEventListener('progress', handleProgress({
14106 segment: segment,
14107 progressFn: progressFn,
14108 trackInfoFn: trackInfoFn,
14109 timingInfoFn: timingInfoFn,
14110 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14111 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14112 id3Fn: id3Fn,
14113 captionsFn: captionsFn,
14114 isEndOfTimeline: isEndOfTimeline,
14115 endedTimelineFn: endedTimelineFn,
14116 dataFn: dataFn
14117 }));
14118 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
14119 // multiple times, provide a shared state object
14120
14121 var loadendState = {};
14122 activeXhrs.forEach(function (activeXhr) {
14123 activeXhr.addEventListener('loadend', handleLoadEnd({
14124 loadendState: loadendState,
14125 abortFn: abortFn
14126 }));
14127 });
14128 return function () {
14129 return abortAll(activeXhrs);
14130 };
14131};
14132
14133/**
14134 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
14135 * codec strings, or translating codec strings into objects that can be examined.
14136 */
14137var logFn$1 = logger('CodecUtils');
14138/**
14139 * Returns a set of codec strings parsed from the playlist or the default
14140 * codec strings if no codecs were specified in the playlist
14141 *
14142 * @param {Playlist} media the current media playlist
14143 * @return {Object} an object with the video and audio codecs
14144 */
14145
14146var getCodecs = function getCodecs(media) {
14147 // if the codecs were explicitly specified, use them instead of the
14148 // defaults
14149 var mediaAttributes = media.attributes || {};
14150
14151 if (mediaAttributes.CODECS) {
14152 return parseCodecs(mediaAttributes.CODECS);
14153 }
14154};
14155
14156var isMaat = function isMaat(master, media) {
14157 var mediaAttributes = media.attributes || {};
14158 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14159};
14160var isMuxed = function isMuxed(master, media) {
14161 if (!isMaat(master, media)) {
14162 return true;
14163 }
14164
14165 var mediaAttributes = media.attributes || {};
14166 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14167
14168 for (var groupId in audioGroup) {
14169 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
14170 // or there are listed playlists (the case for DASH, as the manifest will have already
14171 // provided all of the details necessary to generate the audio playlist, as opposed to
14172 // HLS' externally requested playlists), then the content is demuxed.
14173 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
14174 return true;
14175 }
14176 }
14177
14178 return false;
14179};
14180var unwrapCodecList = function unwrapCodecList(codecList) {
14181 var codecs = {};
14182 codecList.forEach(function (_ref) {
14183 var mediaType = _ref.mediaType,
14184 type = _ref.type,
14185 details = _ref.details;
14186 codecs[mediaType] = codecs[mediaType] || [];
14187 codecs[mediaType].push(translateLegacyCodec("" + type + details));
14188 });
14189 Object.keys(codecs).forEach(function (mediaType) {
14190 if (codecs[mediaType].length > 1) {
14191 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
14192 codecs[mediaType] = null;
14193 return;
14194 }
14195
14196 codecs[mediaType] = codecs[mediaType][0];
14197 });
14198 return codecs;
14199};
14200var codecCount = function codecCount(codecObj) {
14201 var count = 0;
14202
14203 if (codecObj.audio) {
14204 count++;
14205 }
14206
14207 if (codecObj.video) {
14208 count++;
14209 }
14210
14211 return count;
14212};
14213/**
14214 * Calculates the codec strings for a working configuration of
14215 * SourceBuffers to play variant streams in a master playlist. If
14216 * there is no possible working configuration, an empty object will be
14217 * returned.
14218 *
14219 * @param master {Object} the m3u8 object for the master playlist
14220 * @param media {Object} the m3u8 object for the variant playlist
14221 * @return {Object} the codec strings.
14222 *
14223 * @private
14224 */
14225
14226var codecsForPlaylist = function codecsForPlaylist(master, media) {
14227 var mediaAttributes = media.attributes || {};
14228 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
14229 // Put another way, there is no way to have a video-only multiple-audio HLS!
14230
14231 if (isMaat(master, media) && !codecInfo.audio) {
14232 if (!isMuxed(master, media)) {
14233 // It is possible for codecs to be specified on the audio media group playlist but
14234 // not on the rendition playlist. This is mostly the case for DASH, where audio and
14235 // video are always separate (and separately specified).
14236 var defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
14237
14238 if (defaultCodecs.audio) {
14239 codecInfo.audio = defaultCodecs.audio;
14240 }
14241 }
14242 }
14243
14244 return codecInfo;
14245};
14246
14247var logFn = logger('PlaylistSelector');
14248
14249var representationToString = function representationToString(representation) {
14250 if (!representation || !representation.playlist) {
14251 return;
14252 }
14253
14254 var playlist = representation.playlist;
14255 return JSON.stringify({
14256 id: playlist.id,
14257 bandwidth: representation.bandwidth,
14258 width: representation.width,
14259 height: representation.height,
14260 codecs: playlist.attributes && playlist.attributes.CODECS || ''
14261 });
14262}; // Utilities
14263
14264/**
14265 * Returns the CSS value for the specified property on an element
14266 * using `getComputedStyle`. Firefox has a long-standing issue where
14267 * getComputedStyle() may return null when running in an iframe with
14268 * `display: none`.
14269 *
14270 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
14271 * @param {HTMLElement} el the htmlelement to work on
14272 * @param {string} the proprety to get the style for
14273 */
14274
14275
14276var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
14277 if (!el) {
14278 return '';
14279 }
14280
14281 var result = window.getComputedStyle(el);
14282
14283 if (!result) {
14284 return '';
14285 }
14286
14287 return result[property];
14288};
14289/**
14290 * Resuable stable sort function
14291 *
14292 * @param {Playlists} array
14293 * @param {Function} sortFn Different comparators
14294 * @function stableSort
14295 */
14296
14297
14298var stableSort = function stableSort(array, sortFn) {
14299 var newArray = array.slice();
14300 array.sort(function (left, right) {
14301 var cmp = sortFn(left, right);
14302
14303 if (cmp === 0) {
14304 return newArray.indexOf(left) - newArray.indexOf(right);
14305 }
14306
14307 return cmp;
14308 });
14309};
14310/**
14311 * A comparator function to sort two playlist object by bandwidth.
14312 *
14313 * @param {Object} left a media playlist object
14314 * @param {Object} right a media playlist object
14315 * @return {number} Greater than zero if the bandwidth attribute of
14316 * left is greater than the corresponding attribute of right. Less
14317 * than zero if the bandwidth of right is greater than left and
14318 * exactly zero if the two are equal.
14319 */
14320
14321
14322var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
14323 var leftBandwidth;
14324 var rightBandwidth;
14325
14326 if (left.attributes.BANDWIDTH) {
14327 leftBandwidth = left.attributes.BANDWIDTH;
14328 }
14329
14330 leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
14331
14332 if (right.attributes.BANDWIDTH) {
14333 rightBandwidth = right.attributes.BANDWIDTH;
14334 }
14335
14336 rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
14337 return leftBandwidth - rightBandwidth;
14338};
14339/**
14340 * A comparator function to sort two playlist object by resolution (width).
14341 *
14342 * @param {Object} left a media playlist object
14343 * @param {Object} right a media playlist object
14344 * @return {number} Greater than zero if the resolution.width attribute of
14345 * left is greater than the corresponding attribute of right. Less
14346 * than zero if the resolution.width of right is greater than left and
14347 * exactly zero if the two are equal.
14348 */
14349
14350var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
14351 var leftWidth;
14352 var rightWidth;
14353
14354 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
14355 leftWidth = left.attributes.RESOLUTION.width;
14356 }
14357
14358 leftWidth = leftWidth || window.Number.MAX_VALUE;
14359
14360 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
14361 rightWidth = right.attributes.RESOLUTION.width;
14362 }
14363
14364 rightWidth = rightWidth || window.Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
14365 // have the same media dimensions/ resolution
14366
14367 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
14368 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
14369 }
14370
14371 return leftWidth - rightWidth;
14372};
14373/**
14374 * Chooses the appropriate media playlist based on bandwidth and player size
14375 *
14376 * @param {Object} master
14377 * Object representation of the master manifest
14378 * @param {number} playerBandwidth
14379 * Current calculated bandwidth of the player
14380 * @param {number} playerWidth
14381 * Current width of the player element (should account for the device pixel ratio)
14382 * @param {number} playerHeight
14383 * Current height of the player element (should account for the device pixel ratio)
14384 * @param {boolean} limitRenditionByPlayerDimensions
14385 * True if the player width and height should be used during the selection, false otherwise
14386 * @param {Object} masterPlaylistController
14387 * the current masterPlaylistController object
14388 * @return {Playlist} the highest bitrate playlist less than the
14389 * currently detected bandwidth, accounting for some amount of
14390 * bandwidth variance
14391 */
14392
14393var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
14394 // If we end up getting called before `master` is available, exit early
14395 if (!master) {
14396 return;
14397 }
14398
14399 var options = {
14400 bandwidth: playerBandwidth,
14401 width: playerWidth,
14402 height: playerHeight,
14403 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
14404 };
14405 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
14406
14407 if (Playlist.isAudioOnly(master)) {
14408 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
14409 // at the buttom of this function for debugging.
14410
14411 options.audioOnly = true;
14412 } // convert the playlists to an intermediary representation to make comparisons easier
14413
14414
14415 var sortedPlaylistReps = playlists.map(function (playlist) {
14416 var bandwidth;
14417 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
14418 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
14419 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
14420 bandwidth = bandwidth || window.Number.MAX_VALUE;
14421 return {
14422 bandwidth: bandwidth,
14423 width: width,
14424 height: height,
14425 playlist: playlist
14426 };
14427 });
14428 stableSort(sortedPlaylistReps, function (left, right) {
14429 return left.bandwidth - right.bandwidth;
14430 }); // filter out any playlists that have been excluded due to
14431 // incompatible configurations
14432
14433 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14434 return !Playlist.isIncompatible(rep.playlist);
14435 }); // filter out any playlists that have been disabled manually through the representations
14436 // api or blacklisted temporarily due to playback errors.
14437
14438 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14439 return Playlist.isEnabled(rep.playlist);
14440 });
14441
14442 if (!enabledPlaylistReps.length) {
14443 // if there are no enabled playlists, then they have all been blacklisted or disabled
14444 // by the user through the representations api. In this case, ignore blacklisting and
14445 // fallback to what the user wants by using playlists the user has not disabled.
14446 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14447 return !Playlist.isDisabled(rep.playlist);
14448 });
14449 } // filter out any variant that has greater effective bitrate
14450 // than the current estimated bandwidth
14451
14452
14453 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
14454 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
14455 });
14456 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
14457 // and then taking the very first element
14458
14459 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
14460 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14461 })[0]; // if we're not going to limit renditions by player size, make an early decision.
14462
14463 if (limitRenditionByPlayerDimensions === false) {
14464 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14465
14466 if (_chosenRep && _chosenRep.playlist) {
14467 var type = 'sortedPlaylistReps';
14468
14469 if (bandwidthBestRep) {
14470 type = 'bandwidthBestRep';
14471 }
14472
14473 if (enabledPlaylistReps[0]) {
14474 type = 'enabledPlaylistReps';
14475 }
14476
14477 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
14478 return _chosenRep.playlist;
14479 }
14480
14481 logFn('could not choose a playlist with options', options);
14482 return null;
14483 } // filter out playlists without resolution information
14484
14485
14486 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
14487 return rep.width && rep.height;
14488 }); // sort variants by resolution
14489
14490 stableSort(haveResolution, function (left, right) {
14491 return left.width - right.width;
14492 }); // if we have the exact resolution as the player use it
14493
14494 var resolutionBestRepList = haveResolution.filter(function (rep) {
14495 return rep.width === playerWidth && rep.height === playerHeight;
14496 });
14497 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
14498
14499 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
14500 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14501 })[0];
14502 var resolutionPlusOneList;
14503 var resolutionPlusOneSmallest;
14504 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
14505 // if there is no match of exact resolution
14506
14507 if (!resolutionBestRep) {
14508 resolutionPlusOneList = haveResolution.filter(function (rep) {
14509 return rep.width > playerWidth || rep.height > playerHeight;
14510 }); // find all the variants have the same smallest resolution
14511
14512 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
14513 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
14514 }); // ensure that we also pick the highest bandwidth variant that
14515 // is just-larger-than the video player
14516
14517 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
14518 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
14519 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14520 })[0];
14521 }
14522
14523 var leastPixelDiffRep; // If this selector proves to be better than others,
14524 // resolutionPlusOneRep and resolutionBestRep and all
14525 // the code involving them should be removed.
14526
14527 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
14528 // find the variant that is closest to the player's pixel size
14529 var leastPixelDiffList = haveResolution.map(function (rep) {
14530 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
14531 return rep;
14532 }); // get the highest bandwidth, closest resolution playlist
14533
14534 stableSort(leastPixelDiffList, function (left, right) {
14535 // sort by highest bandwidth if pixelDiff is the same
14536 if (left.pixelDiff === right.pixelDiff) {
14537 return right.bandwidth - left.bandwidth;
14538 }
14539
14540 return left.pixelDiff - right.pixelDiff;
14541 });
14542 leastPixelDiffRep = leastPixelDiffList[0];
14543 } // fallback chain of variants
14544
14545
14546 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14547
14548 if (chosenRep && chosenRep.playlist) {
14549 var _type = 'sortedPlaylistReps';
14550
14551 if (leastPixelDiffRep) {
14552 _type = 'leastPixelDiffRep';
14553 } else if (resolutionPlusOneRep) {
14554 _type = 'resolutionPlusOneRep';
14555 } else if (resolutionBestRep) {
14556 _type = 'resolutionBestRep';
14557 } else if (bandwidthBestRep) {
14558 _type = 'bandwidthBestRep';
14559 } else if (enabledPlaylistReps[0]) {
14560 _type = 'enabledPlaylistReps';
14561 }
14562
14563 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
14564 return chosenRep.playlist;
14565 }
14566
14567 logFn('could not choose a playlist with options', options);
14568 return null;
14569};
14570
14571/**
14572 * Chooses the appropriate media playlist based on the most recent
14573 * bandwidth estimate and the player size.
14574 *
14575 * Expects to be called within the context of an instance of VhsHandler
14576 *
14577 * @return {Playlist} the highest bitrate playlist less than the
14578 * currently detected bandwidth, accounting for some amount of
14579 * bandwidth variance
14580 */
14581
14582var lastBandwidthSelector = function lastBandwidthSelector() {
14583 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
14584 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14585};
14586/**
14587 * Chooses the appropriate media playlist based on an
14588 * exponential-weighted moving average of the bandwidth after
14589 * filtering for player size.
14590 *
14591 * Expects to be called within the context of an instance of VhsHandler
14592 *
14593 * @param {number} decay - a number between 0 and 1. Higher values of
14594 * this parameter will cause previous bandwidth estimates to lose
14595 * significance more quickly.
14596 * @return {Function} a function which can be invoked to create a new
14597 * playlist selector function.
14598 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
14599 */
14600
14601var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
14602 var average = -1;
14603 var lastSystemBandwidth = -1;
14604
14605 if (decay < 0 || decay > 1) {
14606 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
14607 }
14608
14609 return function () {
14610 var pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
14611
14612 if (average < 0) {
14613 average = this.systemBandwidth;
14614 lastSystemBandwidth = this.systemBandwidth;
14615 } // stop the average value from decaying for every 250ms
14616 // when the systemBandwidth is constant
14617 // and
14618 // stop average from setting to a very low value when the
14619 // systemBandwidth becomes 0 in case of chunk cancellation
14620
14621
14622 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
14623 average = decay * this.systemBandwidth + (1 - decay) * average;
14624 lastSystemBandwidth = this.systemBandwidth;
14625 }
14626
14627 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14628 };
14629};
14630/**
14631 * Chooses the appropriate media playlist based on the potential to rebuffer
14632 *
14633 * @param {Object} settings
14634 * Object of information required to use this selector
14635 * @param {Object} settings.master
14636 * Object representation of the master manifest
14637 * @param {number} settings.currentTime
14638 * The current time of the player
14639 * @param {number} settings.bandwidth
14640 * Current measured bandwidth
14641 * @param {number} settings.duration
14642 * Duration of the media
14643 * @param {number} settings.segmentDuration
14644 * Segment duration to be used in round trip time calculations
14645 * @param {number} settings.timeUntilRebuffer
14646 * Time left in seconds until the player has to rebuffer
14647 * @param {number} settings.currentTimeline
14648 * The current timeline segments are being loaded from
14649 * @param {SyncController} settings.syncController
14650 * SyncController for determining if we have a sync point for a given playlist
14651 * @return {Object|null}
14652 * {Object} return.playlist
14653 * The highest bandwidth playlist with the least amount of rebuffering
14654 * {Number} return.rebufferingImpact
14655 * The amount of time in seconds switching to this playlist will rebuffer. A
14656 * negative value means that switching will cause zero rebuffering.
14657 */
14658
14659var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
14660 var master = settings.master,
14661 currentTime = settings.currentTime,
14662 bandwidth = settings.bandwidth,
14663 duration = settings.duration,
14664 segmentDuration = settings.segmentDuration,
14665 timeUntilRebuffer = settings.timeUntilRebuffer,
14666 currentTimeline = settings.currentTimeline,
14667 syncController = settings.syncController; // filter out any playlists that have been excluded due to
14668 // incompatible configurations
14669
14670 var compatiblePlaylists = master.playlists.filter(function (playlist) {
14671 return !Playlist.isIncompatible(playlist);
14672 }); // filter out any playlists that have been disabled manually through the representations
14673 // api or blacklisted temporarily due to playback errors.
14674
14675 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
14676
14677 if (!enabledPlaylists.length) {
14678 // if there are no enabled playlists, then they have all been blacklisted or disabled
14679 // by the user through the representations api. In this case, ignore blacklisting and
14680 // fallback to what the user wants by using playlists the user has not disabled.
14681 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
14682 return !Playlist.isDisabled(playlist);
14683 });
14684 }
14685
14686 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
14687 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
14688 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
14689 // sync request first. This will double the request time
14690
14691 var numRequests = syncPoint ? 1 : 2;
14692 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
14693 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
14694 return {
14695 playlist: playlist,
14696 rebufferingImpact: rebufferingImpact
14697 };
14698 });
14699 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
14700 return estimate.rebufferingImpact <= 0;
14701 }); // Sort by bandwidth DESC
14702
14703 stableSort(noRebufferingPlaylists, function (a, b) {
14704 return comparePlaylistBandwidth(b.playlist, a.playlist);
14705 });
14706
14707 if (noRebufferingPlaylists.length) {
14708 return noRebufferingPlaylists[0];
14709 }
14710
14711 stableSort(rebufferingEstimates, function (a, b) {
14712 return a.rebufferingImpact - b.rebufferingImpact;
14713 });
14714 return rebufferingEstimates[0] || null;
14715};
14716/**
14717 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
14718 * one with video. If no renditions with video exist, return the lowest audio rendition.
14719 *
14720 * Expects to be called within the context of an instance of VhsHandler
14721 *
14722 * @return {Object|null}
14723 * {Object} return.playlist
14724 * The lowest bitrate playlist that contains a video codec. If no such rendition
14725 * exists pick the lowest audio rendition.
14726 */
14727
14728var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
14729 var _this = this;
14730
14731 // filter out any playlists that have been excluded due to
14732 // incompatible configurations or playback errors
14733 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
14734
14735 stableSort(playlists, function (a, b) {
14736 return comparePlaylistBandwidth(a, b);
14737 }); // Parse and assume that playlists with no video codec have no video
14738 // (this is not necessarily true, although it is generally true).
14739 //
14740 // If an entire manifest has no valid videos everything will get filtered
14741 // out.
14742
14743 var playlistsWithVideo = playlists.filter(function (playlist) {
14744 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
14745 });
14746 return playlistsWithVideo[0] || null;
14747};
14748
14749/**
14750 * Combine all segments into a single Uint8Array
14751 *
14752 * @param {Object} segmentObj
14753 * @return {Uint8Array} concatenated bytes
14754 * @private
14755 */
14756var concatSegments = function concatSegments(segmentObj) {
14757 var offset = 0;
14758 var tempBuffer;
14759
14760 if (segmentObj.bytes) {
14761 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
14762
14763 segmentObj.segments.forEach(function (segment) {
14764 tempBuffer.set(segment, offset);
14765 offset += segment.byteLength;
14766 });
14767 }
14768
14769 return tempBuffer;
14770};
14771
14772/**
14773 * @file text-tracks.js
14774 */
14775/**
14776 * Create captions text tracks on video.js if they do not exist
14777 *
14778 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14779 * @param {Object} tech the video.js tech
14780 * @param {Object} captionStream the caption stream to create
14781 * @private
14782 */
14783
14784var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
14785 if (!inbandTextTracks[captionStream]) {
14786 tech.trigger({
14787 type: 'usage',
14788 name: 'vhs-608'
14789 });
14790 tech.trigger({
14791 type: 'usage',
14792 name: 'hls-608'
14793 });
14794 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
14795
14796 if (/^cc708_/.test(captionStream)) {
14797 instreamId = 'SERVICE' + captionStream.split('_')[1];
14798 }
14799
14800 var track = tech.textTracks().getTrackById(instreamId);
14801
14802 if (track) {
14803 // Resuse an existing track with a CC# id because this was
14804 // very likely created by videojs-contrib-hls from information
14805 // in the m3u8 for us to use
14806 inbandTextTracks[captionStream] = track;
14807 } else {
14808 // This section gets called when we have caption services that aren't specified in the manifest.
14809 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
14810 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
14811 var label = captionStream;
14812 var language = captionStream;
14813 var def = false;
14814 var captionService = captionServices[instreamId];
14815
14816 if (captionService) {
14817 label = captionService.label;
14818 language = captionService.language;
14819 def = captionService.default;
14820 } // Otherwise, create a track with the default `CC#` label and
14821 // without a language
14822
14823
14824 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
14825 kind: 'captions',
14826 id: instreamId,
14827 // TODO: investigate why this doesn't seem to turn the caption on by default
14828 default: def,
14829 label: label,
14830 language: language
14831 }, false).track;
14832 }
14833 }
14834};
14835/**
14836 * Add caption text track data to a source handler given an array of captions
14837 *
14838 * @param {Object}
14839 * @param {Object} inbandTextTracks the inband text tracks
14840 * @param {number} timestampOffset the timestamp offset of the source buffer
14841 * @param {Array} captionArray an array of caption data
14842 * @private
14843 */
14844
14845var addCaptionData = function addCaptionData(_ref) {
14846 var inbandTextTracks = _ref.inbandTextTracks,
14847 captionArray = _ref.captionArray,
14848 timestampOffset = _ref.timestampOffset;
14849
14850 if (!captionArray) {
14851 return;
14852 }
14853
14854 var Cue = window.WebKitDataCue || window.VTTCue;
14855 captionArray.forEach(function (caption) {
14856 var track = caption.stream;
14857 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
14858 });
14859};
14860/**
14861 * Define properties on a cue for backwards compatability,
14862 * but warn the user that the way that they are using it
14863 * is depricated and will be removed at a later date.
14864 *
14865 * @param {Cue} cue the cue to add the properties on
14866 * @private
14867 */
14868
14869var deprecateOldCue = function deprecateOldCue(cue) {
14870 Object.defineProperties(cue.frame, {
14871 id: {
14872 get: function get() {
14873 videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
14874 return cue.value.key;
14875 }
14876 },
14877 value: {
14878 get: function get() {
14879 videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
14880 return cue.value.data;
14881 }
14882 },
14883 privateData: {
14884 get: function get() {
14885 videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
14886 return cue.value.data;
14887 }
14888 }
14889 });
14890};
14891/**
14892 * Add metadata text track data to a source handler given an array of metadata
14893 *
14894 * @param {Object}
14895 * @param {Object} inbandTextTracks the inband text tracks
14896 * @param {Array} metadataArray an array of meta data
14897 * @param {number} timestampOffset the timestamp offset of the source buffer
14898 * @param {number} videoDuration the duration of the video
14899 * @private
14900 */
14901
14902
14903var addMetadata = function addMetadata(_ref2) {
14904 var inbandTextTracks = _ref2.inbandTextTracks,
14905 metadataArray = _ref2.metadataArray,
14906 timestampOffset = _ref2.timestampOffset,
14907 videoDuration = _ref2.videoDuration;
14908
14909 if (!metadataArray) {
14910 return;
14911 }
14912
14913 var Cue = window.WebKitDataCue || window.VTTCue;
14914 var metadataTrack = inbandTextTracks.metadataTrack_;
14915
14916 if (!metadataTrack) {
14917 return;
14918 }
14919
14920 metadataArray.forEach(function (metadata) {
14921 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
14922 // ignore this bit of metadata.
14923 // This likely occurs when you have an non-timed ID3 tag like TIT2,
14924 // which is the "Title/Songname/Content description" frame
14925
14926 if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
14927 return;
14928 }
14929
14930 metadata.frames.forEach(function (frame) {
14931 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
14932 cue.frame = frame;
14933 cue.value = frame;
14934 deprecateOldCue(cue);
14935 metadataTrack.addCue(cue);
14936 });
14937 });
14938
14939 if (!metadataTrack.cues || !metadataTrack.cues.length) {
14940 return;
14941 } // Updating the metadeta cues so that
14942 // the endTime of each cue is the startTime of the next cue
14943 // the endTime of last cue is the duration of the video
14944
14945
14946 var cues = metadataTrack.cues;
14947 var cuesArray = []; // Create a copy of the TextTrackCueList...
14948 // ...disregarding cues with a falsey value
14949
14950 for (var i = 0; i < cues.length; i++) {
14951 if (cues[i]) {
14952 cuesArray.push(cues[i]);
14953 }
14954 } // Group cues by their startTime value
14955
14956
14957 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
14958 var timeSlot = obj[cue.startTime] || [];
14959 timeSlot.push(cue);
14960 obj[cue.startTime] = timeSlot;
14961 return obj;
14962 }, {}); // Sort startTimes by ascending order
14963
14964 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
14965 return Number(a) - Number(b);
14966 }); // Map each cue group's endTime to the next group's startTime
14967
14968 sortedStartTimes.forEach(function (startTime, idx) {
14969 var cueGroup = cuesGroupedByStartTime[startTime];
14970 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
14971
14972 cueGroup.forEach(function (cue) {
14973 cue.endTime = nextTime;
14974 });
14975 });
14976};
14977/**
14978 * Create metadata text track on video.js if it does not exist
14979 *
14980 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14981 * @param {string} dispatchType the inband metadata track dispatch type
14982 * @param {Object} tech the video.js tech
14983 * @private
14984 */
14985
14986var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
14987 if (inbandTextTracks.metadataTrack_) {
14988 return;
14989 }
14990
14991 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
14992 kind: 'metadata',
14993 label: 'Timed Metadata'
14994 }, false).track;
14995 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
14996};
14997/**
14998 * Remove cues from a track on video.js.
14999 *
15000 * @param {Double} start start of where we should remove the cue
15001 * @param {Double} end end of where the we should remove the cue
15002 * @param {Object} track the text track to remove the cues from
15003 * @private
15004 */
15005
15006var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
15007 var i;
15008 var cue;
15009
15010 if (!track) {
15011 return;
15012 }
15013
15014 if (!track.cues) {
15015 return;
15016 }
15017
15018 i = track.cues.length;
15019
15020 while (i--) {
15021 cue = track.cues[i]; // Remove any cue within the provided start and end time
15022
15023 if (cue.startTime >= start && cue.endTime <= end) {
15024 track.removeCue(cue);
15025 }
15026 }
15027};
15028/**
15029 * Remove duplicate cues from a track on video.js (a cue is considered a
15030 * duplicate if it has the same time interval and text as another)
15031 *
15032 * @param {Object} track the text track to remove the duplicate cues from
15033 * @private
15034 */
15035
15036var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
15037 var cues = track.cues;
15038
15039 if (!cues) {
15040 return;
15041 }
15042
15043 for (var i = 0; i < cues.length; i++) {
15044 var duplicates = [];
15045 var occurrences = 0;
15046
15047 for (var j = 0; j < cues.length; j++) {
15048 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
15049 occurrences++;
15050
15051 if (occurrences > 1) {
15052 duplicates.push(cues[j]);
15053 }
15054 }
15055 }
15056
15057 if (duplicates.length) {
15058 duplicates.forEach(function (dupe) {
15059 return track.removeCue(dupe);
15060 });
15061 }
15062 }
15063};
15064
15065/**
15066 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
15067 * front of current time.
15068 *
15069 * @param {Array} buffer
15070 * The current buffer of gop information
15071 * @param {number} currentTime
15072 * The current time
15073 * @param {Double} mapping
15074 * Offset to map display time to stream presentation time
15075 * @return {Array}
15076 * List of gops considered safe to append over
15077 */
15078
15079var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
15080 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
15081 return [];
15082 } // pts value for current time + 3 seconds to give a bit more wiggle room
15083
15084
15085 var currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
15086 var i;
15087
15088 for (i = 0; i < buffer.length; i++) {
15089 if (buffer[i].pts > currentTimePts) {
15090 break;
15091 }
15092 }
15093
15094 return buffer.slice(i);
15095};
15096/**
15097 * Appends gop information (timing and byteLength) received by the transmuxer for the
15098 * gops appended in the last call to appendBuffer
15099 *
15100 * @param {Array} buffer
15101 * The current buffer of gop information
15102 * @param {Array} gops
15103 * List of new gop information
15104 * @param {boolean} replace
15105 * If true, replace the buffer with the new gop information. If false, append the
15106 * new gop information to the buffer in the right location of time.
15107 * @return {Array}
15108 * Updated list of gop information
15109 */
15110
15111var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
15112 if (!gops.length) {
15113 return buffer;
15114 }
15115
15116 if (replace) {
15117 // If we are in safe append mode, then completely overwrite the gop buffer
15118 // with the most recent appeneded data. This will make sure that when appending
15119 // future segments, we only try to align with gops that are both ahead of current
15120 // time and in the last segment appended.
15121 return gops.slice();
15122 }
15123
15124 var start = gops[0].pts;
15125 var i = 0;
15126
15127 for (i; i < buffer.length; i++) {
15128 if (buffer[i].pts >= start) {
15129 break;
15130 }
15131 }
15132
15133 return buffer.slice(0, i).concat(gops);
15134};
15135/**
15136 * Removes gop information in buffer that overlaps with provided start and end
15137 *
15138 * @param {Array} buffer
15139 * The current buffer of gop information
15140 * @param {Double} start
15141 * position to start the remove at
15142 * @param {Double} end
15143 * position to end the remove at
15144 * @param {Double} mapping
15145 * Offset to map display time to stream presentation time
15146 */
15147
15148var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
15149 var startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
15150 var endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
15151 var updatedBuffer = buffer.slice();
15152 var i = buffer.length;
15153
15154 while (i--) {
15155 if (buffer[i].pts <= endPts) {
15156 break;
15157 }
15158 }
15159
15160 if (i === -1) {
15161 // no removal because end of remove range is before start of buffer
15162 return updatedBuffer;
15163 }
15164
15165 var j = i + 1;
15166
15167 while (j--) {
15168 if (buffer[j].pts <= startPts) {
15169 break;
15170 }
15171 } // clamp remove range start to 0 index
15172
15173
15174 j = Math.max(j, 0);
15175 updatedBuffer.splice(j, i - j + 1);
15176 return updatedBuffer;
15177};
15178
15179var shallowEqual = function shallowEqual(a, b) {
15180 // if both are undefined
15181 // or one or the other is undefined
15182 // they are not equal
15183 if (!a && !b || !a && b || a && !b) {
15184 return false;
15185 } // they are the same object and thus, equal
15186
15187
15188 if (a === b) {
15189 return true;
15190 } // sort keys so we can make sure they have
15191 // all the same keys later.
15192
15193
15194 var akeys = Object.keys(a).sort();
15195 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
15196
15197 if (akeys.length !== bkeys.length) {
15198 return false;
15199 }
15200
15201 for (var i = 0; i < akeys.length; i++) {
15202 var key = akeys[i]; // different sorted keys, not equal
15203
15204 if (key !== bkeys[i]) {
15205 return false;
15206 } // different values, not equal
15207
15208
15209 if (a[key] !== b[key]) {
15210 return false;
15211 }
15212 }
15213
15214 return true;
15215};
15216
15217// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
15218var QUOTA_EXCEEDED_ERR = 22;
15219
15220/**
15221 * The segment loader has no recourse except to fetch a segment in the
15222 * current playlist and use the internal timestamps in that segment to
15223 * generate a syncPoint. This function returns a good candidate index
15224 * for that process.
15225 *
15226 * @param {Array} segments - the segments array from a playlist.
15227 * @return {number} An index of a segment from the playlist to load
15228 */
15229
15230var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
15231 segments = segments || [];
15232 var timelineSegments = [];
15233 var time = 0;
15234
15235 for (var i = 0; i < segments.length; i++) {
15236 var segment = segments[i];
15237
15238 if (currentTimeline === segment.timeline) {
15239 timelineSegments.push(i);
15240 time += segment.duration;
15241
15242 if (time > targetTime) {
15243 return i;
15244 }
15245 }
15246 }
15247
15248 if (timelineSegments.length === 0) {
15249 return 0;
15250 } // default to the last timeline segment
15251
15252
15253 return timelineSegments[timelineSegments.length - 1];
15254}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
15255// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
15256// as a start to prevent any potential issues with removing content too close to the
15257// playhead.
15258
15259var MIN_BACK_BUFFER = 1; // in ms
15260
15261var CHECK_BUFFER_DELAY = 500;
15262
15263var finite = function finite(num) {
15264 return typeof num === 'number' && isFinite(num);
15265}; // With most content hovering around 30fps, if a segment has a duration less than a half
15266// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
15267// not accurately reflect the rest of the content.
15268
15269
15270var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
15271var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
15272 // Although these checks should most likely cover non 'main' types, for now it narrows
15273 // the scope of our checks.
15274 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
15275 return null;
15276 }
15277
15278 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
15279 return 'Neither audio nor video found in segment.';
15280 }
15281
15282 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
15283 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
15284 }
15285
15286 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
15287 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
15288 }
15289
15290 return null;
15291};
15292/**
15293 * Calculates a time value that is safe to remove from the back buffer without interrupting
15294 * playback.
15295 *
15296 * @param {TimeRange} seekable
15297 * The current seekable range
15298 * @param {number} currentTime
15299 * The current time of the player
15300 * @param {number} targetDuration
15301 * The target duration of the current playlist
15302 * @return {number}
15303 * Time that is safe to remove from the back buffer without interrupting playback
15304 */
15305
15306var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
15307 // 30 seconds before the playhead provides a safe default for trimming.
15308 //
15309 // Choosing a reasonable default is particularly important for high bitrate content and
15310 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
15311 // throw an APPEND_BUFFER_ERR.
15312 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
15313
15314 if (seekable.length) {
15315 // Some live playlists may have a shorter window of content than the full allowed back
15316 // buffer. For these playlists, don't save content that's no longer within the window.
15317 trimTime = Math.max(trimTime, seekable.start(0));
15318 } // Don't remove within target duration of the current time to avoid the possibility of
15319 // removing the GOP currently being played, as removing it can cause playback stalls.
15320
15321
15322 var maxTrimTime = currentTime - targetDuration;
15323 return Math.min(maxTrimTime, trimTime);
15324};
15325
15326var segmentInfoString = function segmentInfoString(segmentInfo) {
15327 var startOfSegment = segmentInfo.startOfSegment,
15328 duration = segmentInfo.duration,
15329 segment = segmentInfo.segment,
15330 part = segmentInfo.part,
15331 _segmentInfo$playlist = segmentInfo.playlist,
15332 seq = _segmentInfo$playlist.mediaSequence,
15333 id = _segmentInfo$playlist.id,
15334 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
15335 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
15336 index = segmentInfo.mediaIndex,
15337 partIndex = segmentInfo.partIndex,
15338 timeline = segmentInfo.timeline;
15339 var segmentLen = segments.length - 1;
15340 var selection = 'mediaIndex/partIndex increment';
15341
15342 if (segmentInfo.getMediaInfoForTime) {
15343 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
15344 } else if (segmentInfo.isSyncRequest) {
15345 selection = 'getSyncSegmentCandidate (isSyncRequest)';
15346 }
15347
15348 var hasPartIndex = typeof partIndex === 'number';
15349 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
15350 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
15351 preloadSegment: segment
15352 }) - 1 : 0;
15353 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
15354};
15355
15356var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
15357 return mediaType + "TimingInfo";
15358};
15359/**
15360 * Returns the timestamp offset to use for the segment.
15361 *
15362 * @param {number} segmentTimeline
15363 * The timeline of the segment
15364 * @param {number} currentTimeline
15365 * The timeline currently being followed by the loader
15366 * @param {number} startOfSegment
15367 * The estimated segment start
15368 * @param {TimeRange[]} buffered
15369 * The loader's buffer
15370 * @param {boolean} overrideCheck
15371 * If true, no checks are made to see if the timestamp offset value should be set,
15372 * but sets it directly to a value.
15373 *
15374 * @return {number|null}
15375 * Either a number representing a new timestamp offset, or null if the segment is
15376 * part of the same timeline
15377 */
15378
15379
15380var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
15381 var segmentTimeline = _ref.segmentTimeline,
15382 currentTimeline = _ref.currentTimeline,
15383 startOfSegment = _ref.startOfSegment,
15384 buffered = _ref.buffered,
15385 overrideCheck = _ref.overrideCheck;
15386
15387 // Check to see if we are crossing a discontinuity to see if we need to set the
15388 // timestamp offset on the transmuxer and source buffer.
15389 //
15390 // Previously, we changed the timestampOffset if the start of this segment was less than
15391 // the currently set timestampOffset, but this isn't desirable as it can produce bad
15392 // behavior, especially around long running live streams.
15393 if (!overrideCheck && segmentTimeline === currentTimeline) {
15394 return null;
15395 } // When changing renditions, it's possible to request a segment on an older timeline. For
15396 // instance, given two renditions with the following:
15397 //
15398 // #EXTINF:10
15399 // segment1
15400 // #EXT-X-DISCONTINUITY
15401 // #EXTINF:10
15402 // segment2
15403 // #EXTINF:10
15404 // segment3
15405 //
15406 // And the current player state:
15407 //
15408 // current time: 8
15409 // buffer: 0 => 20
15410 //
15411 // The next segment on the current rendition would be segment3, filling the buffer from
15412 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
15413 // then the next segment to be requested will be segment1 from the new rendition in
15414 // order to fill time 8 and onwards. Using the buffered end would result in repeated
15415 // content (since it would position segment1 of the new rendition starting at 20s). This
15416 // case can be identified when the new segment's timeline is a prior value. Instead of
15417 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
15418 // more accurate to the actual start time of the segment.
15419
15420
15421 if (segmentTimeline < currentTimeline) {
15422 return startOfSegment;
15423 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
15424 // value uses the end of the last segment if it is available. While this value
15425 // should often be correct, it's better to rely on the buffered end, as the new
15426 // content post discontinuity should line up with the buffered end as if it were
15427 // time 0 for the new content.
15428
15429
15430 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
15431};
15432/**
15433 * Returns whether or not the loader should wait for a timeline change from the timeline
15434 * change controller before processing the segment.
15435 *
15436 * Primary timing in VHS goes by video. This is different from most media players, as
15437 * audio is more often used as the primary timing source. For the foreseeable future, VHS
15438 * will continue to use video as the primary timing source, due to the current logic and
15439 * expectations built around it.
15440
15441 * Since the timing follows video, in order to maintain sync, the video loader is
15442 * responsible for setting both audio and video source buffer timestamp offsets.
15443 *
15444 * Setting different values for audio and video source buffers could lead to
15445 * desyncing. The following examples demonstrate some of the situations where this
15446 * distinction is important. Note that all of these cases involve demuxed content. When
15447 * content is muxed, the audio and video are packaged together, therefore syncing
15448 * separate media playlists is not an issue.
15449 *
15450 * CASE 1: Audio prepares to load a new timeline before video:
15451 *
15452 * Timeline: 0 1
15453 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15454 * Audio Loader: ^
15455 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15456 * Video Loader ^
15457 *
15458 * In the above example, the audio loader is preparing to load the 6th segment, the first
15459 * after a discontinuity, while the video loader is still loading the 5th segment, before
15460 * the discontinuity.
15461 *
15462 * If the audio loader goes ahead and loads and appends the 6th segment before the video
15463 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
15464 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
15465 * the audio loader must provide the audioAppendStart value to trim the content in the
15466 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
15467 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
15468 * segment until that value is provided.
15469 *
15470 * CASE 2: Video prepares to load a new timeline before audio:
15471 *
15472 * Timeline: 0 1
15473 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15474 * Audio Loader: ^
15475 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15476 * Video Loader ^
15477 *
15478 * In the above example, the video loader is preparing to load the 6th segment, the first
15479 * after a discontinuity, while the audio loader is still loading the 5th segment, before
15480 * the discontinuity.
15481 *
15482 * If the video loader goes ahead and loads and appends the 6th segment, then once the
15483 * segment is loaded and processed, both the video and audio timestamp offsets will be
15484 * set, since video is used as the primary timing source. This is to ensure content lines
15485 * up appropriately, as any modifications to the video timing are reflected by audio when
15486 * the video loader sets the audio and video timestamp offsets to the same value. However,
15487 * setting the timestamp offset for audio before audio has had a chance to change
15488 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
15489 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
15490 *
15491 * CASE 3: When seeking, audio prepares to load a new timeline before video
15492 *
15493 * Timeline: 0 1
15494 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15495 * Audio Loader: ^
15496 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15497 * Video Loader ^
15498 *
15499 * In the above example, both audio and video loaders are loading segments from timeline
15500 * 0, but imagine that the seek originated from timeline 1.
15501 *
15502 * When seeking to a new timeline, the timestamp offset will be set based on the expected
15503 * segment start of the loaded video segment. In order to maintain sync, the audio loader
15504 * must wait for the video loader to load its segment and update both the audio and video
15505 * timestamp offsets before it may load and append its own segment. This is the case
15506 * whether the seek results in a mismatched segment request (e.g., the audio loader
15507 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
15508 * loaders choose to load the same segment index from each playlist, as the segments may
15509 * not be aligned perfectly, even for matching segment indexes.
15510 *
15511 * @param {Object} timelinechangeController
15512 * @param {number} currentTimeline
15513 * The timeline currently being followed by the loader
15514 * @param {number} segmentTimeline
15515 * The timeline of the segment being loaded
15516 * @param {('main'|'audio')} loaderType
15517 * The loader type
15518 * @param {boolean} audioDisabled
15519 * Whether the audio is disabled for the loader. This should only be true when the
15520 * loader may have muxed audio in its segment, but should not append it, e.g., for
15521 * the main loader when an alternate audio playlist is active.
15522 *
15523 * @return {boolean}
15524 * Whether the loader should wait for a timeline change from the timeline change
15525 * controller before processing the segment
15526 */
15527
15528var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
15529 var timelineChangeController = _ref2.timelineChangeController,
15530 currentTimeline = _ref2.currentTimeline,
15531 segmentTimeline = _ref2.segmentTimeline,
15532 loaderType = _ref2.loaderType,
15533 audioDisabled = _ref2.audioDisabled;
15534
15535 if (currentTimeline === segmentTimeline) {
15536 return false;
15537 }
15538
15539 if (loaderType === 'audio') {
15540 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
15541 type: 'main'
15542 }); // Audio loader should wait if:
15543 //
15544 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
15545 // * main hasn't yet changed to the timeline audio is looking to load
15546
15547 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
15548 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
15549 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
15550 // loader's segments (or the content is audio/video only and handled by the main
15551 // loader).
15552
15553
15554 if (loaderType === 'main' && audioDisabled) {
15555 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
15556 type: 'audio'
15557 }); // Main loader should wait for the audio loader if audio is not pending a timeline
15558 // change to the current timeline.
15559 //
15560 // Since the main loader is responsible for setting the timestamp offset for both
15561 // audio and video, the main loader must wait for audio to be about to change to its
15562 // timeline before setting the offset, otherwise, if audio is behind in loading,
15563 // segments from the previous timeline would be adjusted by the new timestamp offset.
15564 //
15565 // This requirement means that video will not cross a timeline until the audio is
15566 // about to cross to it, so that way audio and video will always cross the timeline
15567 // together.
15568 //
15569 // In addition to normal timeline changes, these rules also apply to the start of a
15570 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
15571 // that these rules apply to the first timeline change because if they did not, it's
15572 // possible that the main loader will cross two timelines before the audio loader has
15573 // crossed one. Logic may be implemented to handle the startup as a special case, but
15574 // it's easier to simply treat all timeline changes the same.
15575
15576 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
15577 return false;
15578 }
15579
15580 return true;
15581 }
15582
15583 return false;
15584};
15585var mediaDuration = function mediaDuration(audioTimingInfo, videoTimingInfo) {
15586 var audioDuration = audioTimingInfo && typeof audioTimingInfo.start === 'number' && typeof audioTimingInfo.end === 'number' ? audioTimingInfo.end - audioTimingInfo.start : 0;
15587 var videoDuration = videoTimingInfo && typeof videoTimingInfo.start === 'number' && typeof videoTimingInfo.end === 'number' ? videoTimingInfo.end - videoTimingInfo.start : 0;
15588 return Math.max(audioDuration, videoDuration);
15589};
15590var segmentTooLong = function segmentTooLong(_ref3) {
15591 var segmentDuration = _ref3.segmentDuration,
15592 maxDuration = _ref3.maxDuration;
15593
15594 // 0 duration segments are most likely due to metadata only segments or a lack of
15595 // information.
15596 if (!segmentDuration) {
15597 return false;
15598 } // For HLS:
15599 //
15600 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
15601 // The EXTINF duration of each Media Segment in the Playlist
15602 // file, when rounded to the nearest integer, MUST be less than or equal
15603 // to the target duration; longer segments can trigger playback stalls
15604 // or other errors.
15605 //
15606 // For DASH, the mpd-parser uses the largest reported segment duration as the target
15607 // duration. Although that reported duration is occasionally approximate (i.e., not
15608 // exact), a strict check may report that a segment is too long more often in DASH.
15609
15610
15611 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
15612};
15613var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
15614 // Right now we aren't following DASH's timing model exactly, so only perform
15615 // this check for HLS content.
15616 if (sourceType !== 'hls') {
15617 return null;
15618 }
15619
15620 var segmentDuration = mediaDuration(segmentInfo.audioTimingInfo, segmentInfo.videoTimingInfo); // Don't report if we lack information.
15621 //
15622 // If the segment has a duration of 0 it is either a lack of information or a
15623 // metadata only segment and shouldn't be reported here.
15624
15625 if (!segmentDuration) {
15626 return null;
15627 }
15628
15629 var targetDuration = segmentInfo.playlist.targetDuration;
15630 var isSegmentWayTooLong = segmentTooLong({
15631 segmentDuration: segmentDuration,
15632 maxDuration: targetDuration * 2
15633 });
15634 var isSegmentSlightlyTooLong = segmentTooLong({
15635 segmentDuration: segmentDuration,
15636 maxDuration: targetDuration
15637 });
15638 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
15639
15640 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
15641 return {
15642 severity: isSegmentWayTooLong ? 'warn' : 'info',
15643 message: segmentTooLongMessage
15644 };
15645 }
15646
15647 return null;
15648};
15649/**
15650 * An object that manages segment loading and appending.
15651 *
15652 * @class SegmentLoader
15653 * @param {Object} options required and optional options
15654 * @extends videojs.EventTarget
15655 */
15656
15657var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
15658 _inheritsLoose(SegmentLoader, _videojs$EventTarget);
15659
15660 function SegmentLoader(settings, options) {
15661 var _this;
15662
15663 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
15664
15665 if (!settings) {
15666 throw new TypeError('Initialization settings are required');
15667 }
15668
15669 if (typeof settings.currentTime !== 'function') {
15670 throw new TypeError('No currentTime getter specified');
15671 }
15672
15673 if (!settings.mediaSource) {
15674 throw new TypeError('No MediaSource specified');
15675 } // public properties
15676
15677
15678 _this.bandwidth = settings.bandwidth;
15679 _this.throughput = {
15680 rate: 0,
15681 count: 0
15682 };
15683 _this.roundTrip = NaN;
15684
15685 _this.resetStats_();
15686
15687 _this.mediaIndex = null;
15688 _this.partIndex = null; // private settings
15689
15690 _this.hasPlayed_ = settings.hasPlayed;
15691 _this.currentTime_ = settings.currentTime;
15692 _this.seekable_ = settings.seekable;
15693 _this.seeking_ = settings.seeking;
15694 _this.duration_ = settings.duration;
15695 _this.mediaSource_ = settings.mediaSource;
15696 _this.vhs_ = settings.vhs;
15697 _this.loaderType_ = settings.loaderType;
15698 _this.currentMediaInfo_ = void 0;
15699 _this.startingMediaInfo_ = void 0;
15700 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
15701 _this.goalBufferLength_ = settings.goalBufferLength;
15702 _this.sourceType_ = settings.sourceType;
15703 _this.sourceUpdater_ = settings.sourceUpdater;
15704 _this.inbandTextTracks_ = settings.inbandTextTracks;
15705 _this.state_ = 'INIT';
15706 _this.timelineChangeController_ = settings.timelineChangeController;
15707 _this.shouldSaveSegmentTimingInfo_ = true;
15708 _this.parse708captions_ = settings.parse708captions;
15709 _this.captionServices_ = settings.captionServices;
15710 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
15711
15712 _this.checkBufferTimeout_ = null;
15713 _this.error_ = void 0;
15714 _this.currentTimeline_ = -1;
15715 _this.pendingSegment_ = null;
15716 _this.xhrOptions_ = null;
15717 _this.pendingSegments_ = [];
15718 _this.audioDisabled_ = false;
15719 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
15720
15721 _this.gopBuffer_ = [];
15722 _this.timeMapping_ = 0;
15723 _this.safeAppend_ = videojs.browser.IE_VERSION >= 11;
15724 _this.appendInitSegment_ = {
15725 audio: true,
15726 video: true
15727 };
15728 _this.playlistOfLastInitSegment_ = {
15729 audio: null,
15730 video: null
15731 };
15732 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
15733 // information yet to start the loading process (e.g., if the audio loader wants to
15734 // load a segment from the next timeline but the main loader hasn't yet crossed that
15735 // timeline), then the load call will be added to the queue until it is ready to be
15736 // processed.
15737
15738 _this.loadQueue_ = [];
15739 _this.metadataQueue_ = {
15740 id3: [],
15741 caption: []
15742 };
15743 _this.waitingOnRemove_ = false;
15744 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
15745
15746 _this.activeInitSegmentId_ = null;
15747 _this.initSegments_ = {}; // HLSe playback
15748
15749 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
15750 _this.keyCache_ = {};
15751 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
15752 // between a time in the display time and a segment index within
15753 // a playlist
15754
15755 _this.syncController_ = settings.syncController;
15756 _this.syncPoint_ = {
15757 segmentIndex: 0,
15758 time: 0
15759 };
15760 _this.transmuxer_ = _this.createTransmuxer_();
15761
15762 _this.triggerSyncInfoUpdate_ = function () {
15763 return _this.trigger('syncinfoupdate');
15764 };
15765
15766 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
15767
15768 _this.mediaSource_.addEventListener('sourceopen', function () {
15769 if (!_this.isEndOfStream_()) {
15770 _this.ended_ = false;
15771 }
15772 }); // ...for determining the fetch location
15773
15774
15775 _this.fetchAtBuffer_ = false;
15776 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
15777 Object.defineProperty(_assertThisInitialized(_this), 'state', {
15778 get: function get() {
15779 return this.state_;
15780 },
15781 set: function set(newState) {
15782 if (newState !== this.state_) {
15783 this.logger_(this.state_ + " -> " + newState);
15784 this.state_ = newState;
15785 this.trigger('statechange');
15786 }
15787 }
15788 });
15789
15790 _this.sourceUpdater_.on('ready', function () {
15791 if (_this.hasEnoughInfoToAppend_()) {
15792 _this.processCallQueue_();
15793 }
15794 }); // Only the main loader needs to listen for pending timeline changes, as the main
15795 // loader should wait for audio to be ready to change its timeline so that both main
15796 // and audio timelines change together. For more details, see the
15797 // shouldWaitForTimelineChange function.
15798
15799
15800 if (_this.loaderType_ === 'main') {
15801 _this.timelineChangeController_.on('pendingtimelinechange', function () {
15802 if (_this.hasEnoughInfoToAppend_()) {
15803 _this.processCallQueue_();
15804 }
15805 });
15806 } // The main loader only listens on pending timeline changes, but the audio loader,
15807 // since its loads follow main, needs to listen on timeline changes. For more details,
15808 // see the shouldWaitForTimelineChange function.
15809
15810
15811 if (_this.loaderType_ === 'audio') {
15812 _this.timelineChangeController_.on('timelinechange', function () {
15813 if (_this.hasEnoughInfoToLoad_()) {
15814 _this.processLoadQueue_();
15815 }
15816
15817 if (_this.hasEnoughInfoToAppend_()) {
15818 _this.processCallQueue_();
15819 }
15820 });
15821 }
15822
15823 return _this;
15824 }
15825
15826 var _proto = SegmentLoader.prototype;
15827
15828 _proto.createTransmuxer_ = function createTransmuxer_() {
15829 return segmentTransmuxer.createTransmuxer({
15830 remux: false,
15831 alignGopsAtEnd: this.safeAppend_,
15832 keepOriginalTimestamps: true,
15833 parse708captions: this.parse708captions_,
15834 captionServices: this.captionServices_
15835 });
15836 }
15837 /**
15838 * reset all of our media stats
15839 *
15840 * @private
15841 */
15842 ;
15843
15844 _proto.resetStats_ = function resetStats_() {
15845 this.mediaBytesTransferred = 0;
15846 this.mediaRequests = 0;
15847 this.mediaRequestsAborted = 0;
15848 this.mediaRequestsTimedout = 0;
15849 this.mediaRequestsErrored = 0;
15850 this.mediaTransferDuration = 0;
15851 this.mediaSecondsLoaded = 0;
15852 this.mediaAppends = 0;
15853 }
15854 /**
15855 * dispose of the SegmentLoader and reset to the default state
15856 */
15857 ;
15858
15859 _proto.dispose = function dispose() {
15860 this.trigger('dispose');
15861 this.state = 'DISPOSED';
15862 this.pause();
15863 this.abort_();
15864
15865 if (this.transmuxer_) {
15866 this.transmuxer_.terminate();
15867 }
15868
15869 this.resetStats_();
15870
15871 if (this.checkBufferTimeout_) {
15872 window.clearTimeout(this.checkBufferTimeout_);
15873 }
15874
15875 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
15876 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
15877 }
15878
15879 this.off();
15880 };
15881
15882 _proto.setAudio = function setAudio(enable) {
15883 this.audioDisabled_ = !enable;
15884
15885 if (enable) {
15886 this.appendInitSegment_.audio = true;
15887 } else {
15888 // remove current track audio if it gets disabled
15889 this.sourceUpdater_.removeAudio(0, this.duration_());
15890 }
15891 }
15892 /**
15893 * abort anything that is currently doing on with the SegmentLoader
15894 * and reset to a default state
15895 */
15896 ;
15897
15898 _proto.abort = function abort() {
15899 if (this.state !== 'WAITING') {
15900 if (this.pendingSegment_) {
15901 this.pendingSegment_ = null;
15902 }
15903
15904 return;
15905 }
15906
15907 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
15908 // since we are no longer "waiting" on any requests. XHR callback is not always run
15909 // when the request is aborted. This will prevent the loader from being stuck in the
15910 // WAITING state indefinitely.
15911
15912 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
15913 // next segment
15914
15915 if (!this.paused()) {
15916 this.monitorBuffer_();
15917 }
15918 }
15919 /**
15920 * abort all pending xhr requests and null any pending segements
15921 *
15922 * @private
15923 */
15924 ;
15925
15926 _proto.abort_ = function abort_() {
15927 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
15928 this.pendingSegment_.abortRequests();
15929 } // clear out the segment being processed
15930
15931
15932 this.pendingSegment_ = null;
15933 this.callQueue_ = [];
15934 this.loadQueue_ = [];
15935 this.metadataQueue_.id3 = [];
15936 this.metadataQueue_.caption = [];
15937 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
15938 this.waitingOnRemove_ = false;
15939 window.clearTimeout(this.quotaExceededErrorRetryTimeout_);
15940 this.quotaExceededErrorRetryTimeout_ = null;
15941 };
15942
15943 _proto.checkForAbort_ = function checkForAbort_(requestId) {
15944 // If the state is APPENDING, then aborts will not modify the state, meaning the first
15945 // callback that happens should reset the state to READY so that loading can continue.
15946 if (this.state === 'APPENDING' && !this.pendingSegment_) {
15947 this.state = 'READY';
15948 return true;
15949 }
15950
15951 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
15952 return true;
15953 }
15954
15955 return false;
15956 }
15957 /**
15958 * set an error on the segment loader and null out any pending segements
15959 *
15960 * @param {Error} error the error to set on the SegmentLoader
15961 * @return {Error} the error that was set or that is currently set
15962 */
15963 ;
15964
15965 _proto.error = function error(_error) {
15966 if (typeof _error !== 'undefined') {
15967 this.logger_('error occurred:', _error);
15968 this.error_ = _error;
15969 }
15970
15971 this.pendingSegment_ = null;
15972 return this.error_;
15973 };
15974
15975 _proto.endOfStream = function endOfStream() {
15976 this.ended_ = true;
15977
15978 if (this.transmuxer_) {
15979 // need to clear out any cached data to prepare for the new segment
15980 segmentTransmuxer.reset(this.transmuxer_);
15981 }
15982
15983 this.gopBuffer_.length = 0;
15984 this.pause();
15985 this.trigger('ended');
15986 }
15987 /**
15988 * Indicates which time ranges are buffered
15989 *
15990 * @return {TimeRange}
15991 * TimeRange object representing the current buffered ranges
15992 */
15993 ;
15994
15995 _proto.buffered_ = function buffered_() {
15996 var trackInfo = this.getMediaInfo_();
15997
15998 if (!this.sourceUpdater_ || !trackInfo) {
15999 return videojs.createTimeRanges();
16000 }
16001
16002 if (this.loaderType_ === 'main') {
16003 var hasAudio = trackInfo.hasAudio,
16004 hasVideo = trackInfo.hasVideo,
16005 isMuxed = trackInfo.isMuxed;
16006
16007 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
16008 return this.sourceUpdater_.buffered();
16009 }
16010
16011 if (hasVideo) {
16012 return this.sourceUpdater_.videoBuffered();
16013 }
16014 } // One case that can be ignored for now is audio only with alt audio,
16015 // as we don't yet have proper support for that.
16016
16017
16018 return this.sourceUpdater_.audioBuffered();
16019 }
16020 /**
16021 * Gets and sets init segment for the provided map
16022 *
16023 * @param {Object} map
16024 * The map object representing the init segment to get or set
16025 * @param {boolean=} set
16026 * If true, the init segment for the provided map should be saved
16027 * @return {Object}
16028 * map object for desired init segment
16029 */
16030 ;
16031
16032 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
16033 if (set === void 0) {
16034 set = false;
16035 }
16036
16037 if (!map) {
16038 return null;
16039 }
16040
16041 var id = initSegmentId(map);
16042 var storedMap = this.initSegments_[id];
16043
16044 if (set && !storedMap && map.bytes) {
16045 this.initSegments_[id] = storedMap = {
16046 resolvedUri: map.resolvedUri,
16047 byterange: map.byterange,
16048 bytes: map.bytes,
16049 tracks: map.tracks,
16050 timescales: map.timescales
16051 };
16052 }
16053
16054 return storedMap || map;
16055 }
16056 /**
16057 * Gets and sets key for the provided key
16058 *
16059 * @param {Object} key
16060 * The key object representing the key to get or set
16061 * @param {boolean=} set
16062 * If true, the key for the provided key should be saved
16063 * @return {Object}
16064 * Key object for desired key
16065 */
16066 ;
16067
16068 _proto.segmentKey = function segmentKey(key, set) {
16069 if (set === void 0) {
16070 set = false;
16071 }
16072
16073 if (!key) {
16074 return null;
16075 }
16076
16077 var id = segmentKeyId(key);
16078 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
16079 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
16080
16081 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
16082 this.keyCache_[id] = storedKey = {
16083 resolvedUri: key.resolvedUri,
16084 bytes: key.bytes
16085 };
16086 }
16087
16088 var result = {
16089 resolvedUri: (storedKey || key).resolvedUri
16090 };
16091
16092 if (storedKey) {
16093 result.bytes = storedKey.bytes;
16094 }
16095
16096 return result;
16097 }
16098 /**
16099 * Returns true if all configuration required for loading is present, otherwise false.
16100 *
16101 * @return {boolean} True if the all configuration is ready for loading
16102 * @private
16103 */
16104 ;
16105
16106 _proto.couldBeginLoading_ = function couldBeginLoading_() {
16107 return this.playlist_ && !this.paused();
16108 }
16109 /**
16110 * load a playlist and start to fill the buffer
16111 */
16112 ;
16113
16114 _proto.load = function load() {
16115 // un-pause
16116 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
16117 // specified
16118
16119 if (!this.playlist_) {
16120 return;
16121 } // if all the configuration is ready, initialize and begin loading
16122
16123
16124 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16125 return this.init_();
16126 } // if we're in the middle of processing a segment already, don't
16127 // kick off an additional segment request
16128
16129
16130 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
16131 return;
16132 }
16133
16134 this.state = 'READY';
16135 }
16136 /**
16137 * Once all the starting parameters have been specified, begin
16138 * operation. This method should only be invoked from the INIT
16139 * state.
16140 *
16141 * @private
16142 */
16143 ;
16144
16145 _proto.init_ = function init_() {
16146 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
16147 // audio data from the muxed content should be removed
16148
16149 this.resetEverything();
16150 return this.monitorBuffer_();
16151 }
16152 /**
16153 * set a playlist on the segment loader
16154 *
16155 * @param {PlaylistLoader} media the playlist to set on the segment loader
16156 */
16157 ;
16158
16159 _proto.playlist = function playlist(newPlaylist, options) {
16160 if (options === void 0) {
16161 options = {};
16162 }
16163
16164 if (!newPlaylist) {
16165 return;
16166 }
16167
16168 var oldPlaylist = this.playlist_;
16169 var segmentInfo = this.pendingSegment_;
16170 this.playlist_ = newPlaylist;
16171 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
16172 // is always our zero-time so force a sync update each time the playlist
16173 // is refreshed from the server
16174 //
16175 // Use the INIT state to determine if playback has started, as the playlist sync info
16176 // should be fixed once requests begin (as sync points are generated based on sync
16177 // info), but not before then.
16178
16179 if (this.state === 'INIT') {
16180 newPlaylist.syncInfo = {
16181 mediaSequence: newPlaylist.mediaSequence,
16182 time: 0
16183 }; // Setting the date time mapping means mapping the program date time (if available)
16184 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
16185 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
16186 // be updated as the playlist is refreshed before the loader starts loading, the
16187 // program date time mapping needs to be updated as well.
16188 //
16189 // This mapping is only done for the main loader because a program date time should
16190 // map equivalently between playlists.
16191
16192 if (this.loaderType_ === 'main') {
16193 this.syncController_.setDateTimeMappingForStart(newPlaylist);
16194 }
16195 }
16196
16197 var oldId = null;
16198
16199 if (oldPlaylist) {
16200 if (oldPlaylist.id) {
16201 oldId = oldPlaylist.id;
16202 } else if (oldPlaylist.uri) {
16203 oldId = oldPlaylist.uri;
16204 }
16205 }
16206
16207 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
16208 // in LIVE, we always want to update with new playlists (including refreshes)
16209
16210 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
16211 // buffering now
16212
16213 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16214 return this.init_();
16215 }
16216
16217 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
16218 if (this.mediaIndex !== null) {
16219 // we must "resync" the segment loader when we switch renditions and
16220 // the segment loader is already synced to the previous rendition
16221 this.resyncLoader();
16222 }
16223
16224 this.currentMediaInfo_ = void 0;
16225 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
16226
16227 return;
16228 } // we reloaded the same playlist so we are in a live scenario
16229 // and we will likely need to adjust the mediaIndex
16230
16231
16232 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
16233 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
16234 // this is important because we can abort a request and this value must be
16235 // equal to the last appended mediaIndex
16236
16237 if (this.mediaIndex !== null) {
16238 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
16239 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
16240 // new playlist was incremented by 1.
16241
16242 if (this.mediaIndex < 0) {
16243 this.mediaIndex = null;
16244 this.partIndex = null;
16245 } else {
16246 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
16247 // unless parts fell off of the playlist for this segment.
16248 // In that case we need to reset partIndex and resync
16249
16250 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
16251 var mediaIndex = this.mediaIndex;
16252 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
16253 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
16254 // as the part was dropped from our current playlists segment.
16255 // The mediaIndex will still be valid so keep that around.
16256
16257 this.mediaIndex = mediaIndex;
16258 }
16259 }
16260 } // update the mediaIndex on the SegmentInfo object
16261 // this is important because we will update this.mediaIndex with this value
16262 // in `handleAppendsDone_` after the segment has been successfully appended
16263
16264
16265 if (segmentInfo) {
16266 segmentInfo.mediaIndex -= mediaSequenceDiff;
16267
16268 if (segmentInfo.mediaIndex < 0) {
16269 segmentInfo.mediaIndex = null;
16270 segmentInfo.partIndex = null;
16271 } else {
16272 // we need to update the referenced segment so that timing information is
16273 // saved for the new playlist's segment, however, if the segment fell off the
16274 // playlist, we can leave the old reference and just lose the timing info
16275 if (segmentInfo.mediaIndex >= 0) {
16276 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
16277 }
16278
16279 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
16280 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
16281 }
16282 }
16283 }
16284
16285 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
16286 }
16287 /**
16288 * Prevent the loader from fetching additional segments. If there
16289 * is a segment request outstanding, it will finish processing
16290 * before the loader halts. A segment loader can be unpaused by
16291 * calling load().
16292 */
16293 ;
16294
16295 _proto.pause = function pause() {
16296 if (this.checkBufferTimeout_) {
16297 window.clearTimeout(this.checkBufferTimeout_);
16298 this.checkBufferTimeout_ = null;
16299 }
16300 }
16301 /**
16302 * Returns whether the segment loader is fetching additional
16303 * segments when given the opportunity. This property can be
16304 * modified through calls to pause() and load().
16305 */
16306 ;
16307
16308 _proto.paused = function paused() {
16309 return this.checkBufferTimeout_ === null;
16310 }
16311 /**
16312 * Delete all the buffered data and reset the SegmentLoader
16313 *
16314 * @param {Function} [done] an optional callback to be executed when the remove
16315 * operation is complete
16316 */
16317 ;
16318
16319 _proto.resetEverything = function resetEverything(done) {
16320 this.ended_ = false;
16321 this.appendInitSegment_ = {
16322 audio: true,
16323 video: true
16324 };
16325 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
16326 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
16327 // we then clamp the value to duration if necessary.
16328
16329 this.remove(0, Infinity, done); // clears fmp4 captions
16330
16331 if (this.transmuxer_) {
16332 this.transmuxer_.postMessage({
16333 action: 'clearAllMp4Captions'
16334 }); // reset the cache in the transmuxer
16335
16336 this.transmuxer_.postMessage({
16337 action: 'reset'
16338 });
16339 }
16340 }
16341 /**
16342 * Force the SegmentLoader to resync and start loading around the currentTime instead
16343 * of starting at the end of the buffer
16344 *
16345 * Useful for fast quality changes
16346 */
16347 ;
16348
16349 _proto.resetLoader = function resetLoader() {
16350 this.fetchAtBuffer_ = false;
16351 this.resyncLoader();
16352 }
16353 /**
16354 * Force the SegmentLoader to restart synchronization and make a conservative guess
16355 * before returning to the simple walk-forward method
16356 */
16357 ;
16358
16359 _proto.resyncLoader = function resyncLoader() {
16360 if (this.transmuxer_) {
16361 // need to clear out any cached data to prepare for the new segment
16362 segmentTransmuxer.reset(this.transmuxer_);
16363 }
16364
16365 this.mediaIndex = null;
16366 this.partIndex = null;
16367 this.syncPoint_ = null;
16368 this.isPendingTimestampOffset_ = false;
16369 this.callQueue_ = [];
16370 this.loadQueue_ = [];
16371 this.metadataQueue_.id3 = [];
16372 this.metadataQueue_.caption = [];
16373 this.abort();
16374
16375 if (this.transmuxer_) {
16376 this.transmuxer_.postMessage({
16377 action: 'clearParsedMp4Captions'
16378 });
16379 }
16380 }
16381 /**
16382 * Remove any data in the source buffer between start and end times
16383 *
16384 * @param {number} start - the start time of the region to remove from the buffer
16385 * @param {number} end - the end time of the region to remove from the buffer
16386 * @param {Function} [done] - an optional callback to be executed when the remove
16387 * @param {boolean} force - force all remove operations to happen
16388 * operation is complete
16389 */
16390 ;
16391
16392 _proto.remove = function remove(start, end, done, force) {
16393 if (done === void 0) {
16394 done = function done() {};
16395 }
16396
16397 if (force === void 0) {
16398 force = false;
16399 }
16400
16401 // clamp end to duration if we need to remove everything.
16402 // This is due to a browser bug that causes issues if we remove to Infinity.
16403 // videojs/videojs-contrib-hls#1225
16404 if (end === Infinity) {
16405 end = this.duration_();
16406 } // skip removes that would throw an error
16407 // commonly happens during a rendition switch at the start of a video
16408 // from start 0 to end 0
16409
16410
16411 if (end <= start) {
16412 this.logger_('skipping remove because end ${end} is <= start ${start}');
16413 return;
16414 }
16415
16416 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
16417 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
16418
16419 return;
16420 } // set it to one to complete this function's removes
16421
16422
16423 var removesRemaining = 1;
16424
16425 var removeFinished = function removeFinished() {
16426 removesRemaining--;
16427
16428 if (removesRemaining === 0) {
16429 done();
16430 }
16431 };
16432
16433 if (force || !this.audioDisabled_) {
16434 removesRemaining++;
16435 this.sourceUpdater_.removeAudio(start, end, removeFinished);
16436 } // While it would be better to only remove video if the main loader has video, this
16437 // should be safe with audio only as removeVideo will call back even if there's no
16438 // video buffer.
16439 //
16440 // In theory we can check to see if there's video before calling the remove, but in
16441 // the event that we're switching between renditions and from video to audio only
16442 // (when we add support for that), we may need to clear the video contents despite
16443 // what the new media will contain.
16444
16445
16446 if (force || this.loaderType_ === 'main') {
16447 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
16448 removesRemaining++;
16449 this.sourceUpdater_.removeVideo(start, end, removeFinished);
16450 } // remove any captions and ID3 tags
16451
16452
16453 for (var track in this.inbandTextTracks_) {
16454 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
16455 }
16456
16457 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
16458
16459 removeFinished();
16460 }
16461 /**
16462 * (re-)schedule monitorBufferTick_ to run as soon as possible
16463 *
16464 * @private
16465 */
16466 ;
16467
16468 _proto.monitorBuffer_ = function monitorBuffer_() {
16469 if (this.checkBufferTimeout_) {
16470 window.clearTimeout(this.checkBufferTimeout_);
16471 }
16472
16473 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), 1);
16474 }
16475 /**
16476 * As long as the SegmentLoader is in the READY state, periodically
16477 * invoke fillBuffer_().
16478 *
16479 * @private
16480 */
16481 ;
16482
16483 _proto.monitorBufferTick_ = function monitorBufferTick_() {
16484 if (this.state === 'READY') {
16485 this.fillBuffer_();
16486 }
16487
16488 if (this.checkBufferTimeout_) {
16489 window.clearTimeout(this.checkBufferTimeout_);
16490 }
16491
16492 this.checkBufferTimeout_ = window.setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
16493 }
16494 /**
16495 * fill the buffer with segements unless the sourceBuffers are
16496 * currently updating
16497 *
16498 * Note: this function should only ever be called by monitorBuffer_
16499 * and never directly
16500 *
16501 * @private
16502 */
16503 ;
16504
16505 _proto.fillBuffer_ = function fillBuffer_() {
16506 // TODO since the source buffer maintains a queue, and we shouldn't call this function
16507 // except when we're ready for the next segment, this check can most likely be removed
16508 if (this.sourceUpdater_.updating()) {
16509 return;
16510 } // see if we need to begin loading immediately
16511
16512
16513 var segmentInfo = this.chooseNextRequest_();
16514
16515 if (!segmentInfo) {
16516 return;
16517 }
16518
16519 if (typeof segmentInfo.timestampOffset === 'number') {
16520 this.isPendingTimestampOffset_ = false;
16521 this.timelineChangeController_.pendingTimelineChange({
16522 type: this.loaderType_,
16523 from: this.currentTimeline_,
16524 to: segmentInfo.timeline
16525 });
16526 }
16527
16528 this.loadSegment_(segmentInfo);
16529 }
16530 /**
16531 * Determines if we should call endOfStream on the media source based
16532 * on the state of the buffer or if appened segment was the final
16533 * segment in the playlist.
16534 *
16535 * @param {number} [mediaIndex] the media index of segment we last appended
16536 * @param {Object} [playlist] a media playlist object
16537 * @return {boolean} do we need to call endOfStream on the MediaSource
16538 */
16539 ;
16540
16541 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
16542 if (mediaIndex === void 0) {
16543 mediaIndex = this.mediaIndex;
16544 }
16545
16546 if (playlist === void 0) {
16547 playlist = this.playlist_;
16548 }
16549
16550 if (partIndex === void 0) {
16551 partIndex = this.partIndex;
16552 }
16553
16554 if (!playlist || !this.mediaSource_) {
16555 return false;
16556 }
16557
16558 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
16559
16560 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
16561
16562 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
16563 // so that MediaSources can trigger the `ended` event when it runs out of
16564 // buffered data instead of waiting for me
16565
16566 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
16567 }
16568 /**
16569 * Determines what request should be made given current segment loader state.
16570 *
16571 * @return {Object} a request object that describes the segment/part to load
16572 */
16573 ;
16574
16575 _proto.chooseNextRequest_ = function chooseNextRequest_() {
16576 var bufferedEnd = lastBufferedEnd(this.buffered_()) || 0;
16577 var bufferedTime = Math.max(0, bufferedEnd - this.currentTime_());
16578 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
16579 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
16580 var segments = this.playlist_.segments; // return no segment if:
16581 // 1. we don't have segments
16582 // 2. The video has not yet played and we already downloaded a segment
16583 // 3. we already have enough buffered time
16584
16585 if (!segments.length || preloaded || haveEnoughBuffer) {
16586 return null;
16587 }
16588
16589 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
16590 var next = {
16591 partIndex: null,
16592 mediaIndex: null,
16593 startOfSegment: null,
16594 playlist: this.playlist_,
16595 isSyncRequest: Boolean(!this.syncPoint_)
16596 };
16597
16598 if (next.isSyncRequest) {
16599 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
16600 } else if (this.mediaIndex !== null) {
16601 var segment = segments[this.mediaIndex];
16602 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
16603 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
16604
16605 if (segment.parts && segment.parts[partIndex + 1]) {
16606 next.mediaIndex = this.mediaIndex;
16607 next.partIndex = partIndex + 1;
16608 } else {
16609 next.mediaIndex = this.mediaIndex + 1;
16610 }
16611 } else {
16612 // Find the segment containing the end of the buffer or current time.
16613 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
16614 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
16615 playlist: this.playlist_,
16616 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
16617 startingPartIndex: this.syncPoint_.partIndex,
16618 startingSegmentIndex: this.syncPoint_.segmentIndex,
16619 startTime: this.syncPoint_.time
16620 }),
16621 segmentIndex = _Playlist$getMediaInf.segmentIndex,
16622 startTime = _Playlist$getMediaInf.startTime,
16623 _partIndex = _Playlist$getMediaInf.partIndex;
16624
16625 next.getMediaInfoForTime = this.fetchAtBuffer_ ? 'bufferedEnd' : 'currentTime';
16626 next.mediaIndex = segmentIndex;
16627 next.startOfSegment = startTime;
16628 next.partIndex = _partIndex;
16629 }
16630
16631 var nextSegment = segments[next.mediaIndex];
16632 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
16633 // the next partIndex is invalid do not choose a next segment.
16634
16635 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
16636 return null;
16637 } // if the next segment has parts, and we don't have a partIndex.
16638 // Set partIndex to 0
16639
16640
16641 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
16642 next.partIndex = 0;
16643 }
16644
16645 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
16646 // 1. this is the last segment in the playlist
16647 // 2. end of stream has been called on the media source already
16648 // 3. the player is not seeking
16649
16650 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
16651 return null;
16652 }
16653
16654 return this.generateSegmentInfo_(next);
16655 };
16656
16657 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
16658 var playlist = options.playlist,
16659 mediaIndex = options.mediaIndex,
16660 startOfSegment = options.startOfSegment,
16661 isSyncRequest = options.isSyncRequest,
16662 partIndex = options.partIndex,
16663 forceTimestampOffset = options.forceTimestampOffset,
16664 getMediaInfoForTime = options.getMediaInfoForTime;
16665 var segment = playlist.segments[mediaIndex];
16666 var part = typeof partIndex === 'number' && segment.parts[partIndex];
16667 var segmentInfo = {
16668 requestId: 'segment-loader-' + Math.random(),
16669 // resolve the segment URL relative to the playlist
16670 uri: part && part.resolvedUri || segment.resolvedUri,
16671 // the segment's mediaIndex at the time it was requested
16672 mediaIndex: mediaIndex,
16673 partIndex: part ? partIndex : null,
16674 // whether or not to update the SegmentLoader's state with this
16675 // segment's mediaIndex
16676 isSyncRequest: isSyncRequest,
16677 startOfSegment: startOfSegment,
16678 // the segment's playlist
16679 playlist: playlist,
16680 // unencrypted bytes of the segment
16681 bytes: null,
16682 // when a key is defined for this segment, the encrypted bytes
16683 encryptedBytes: null,
16684 // The target timestampOffset for this segment when we append it
16685 // to the source buffer
16686 timestampOffset: null,
16687 // The timeline that the segment is in
16688 timeline: segment.timeline,
16689 // The expected duration of the segment in seconds
16690 duration: part && part.duration || segment.duration,
16691 // retain the segment in case the playlist updates while doing an async process
16692 segment: segment,
16693 part: part,
16694 byteLength: 0,
16695 transmuxer: this.transmuxer_,
16696 // type of getMediaInfoForTime that was used to get this segment
16697 getMediaInfoForTime: getMediaInfoForTime
16698 };
16699 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
16700 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
16701 segmentTimeline: segment.timeline,
16702 currentTimeline: this.currentTimeline_,
16703 startOfSegment: startOfSegment,
16704 buffered: this.buffered_(),
16705 overrideCheck: overrideCheck
16706 });
16707 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
16708
16709 if (typeof audioBufferedEnd === 'number') {
16710 // since the transmuxer is using the actual timing values, but the buffer is
16711 // adjusted by the timestamp offset, we must adjust the value here
16712 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
16713 }
16714
16715 if (this.sourceUpdater_.videoBuffered().length) {
16716 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
16717 // adjusted by the timestmap offset, we must adjust the value here
16718 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
16719 }
16720
16721 return segmentInfo;
16722 } // get the timestampoffset for a segment,
16723 // added so that vtt segment loader can override and prevent
16724 // adding timestamp offsets.
16725 ;
16726
16727 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
16728 return timestampOffsetForSegment(options);
16729 }
16730 /**
16731 * Determines if the network has enough bandwidth to complete the current segment
16732 * request in a timely manner. If not, the request will be aborted early and bandwidth
16733 * updated to trigger a playlist switch.
16734 *
16735 * @param {Object} stats
16736 * Object containing stats about the request timing and size
16737 * @private
16738 */
16739 ;
16740
16741 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
16742 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
16743 // TODO: Replace using timeout with a boolean indicating whether this playlist is
16744 // the lowestEnabledRendition.
16745 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
16746 !this.playlist_.attributes.BANDWIDTH) {
16747 return;
16748 } // Wait at least 1 second since the first byte of data has been received before
16749 // using the calculated bandwidth from the progress event to allow the bitrate
16750 // to stabilize
16751
16752
16753 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
16754 return;
16755 }
16756
16757 var currentTime = this.currentTime_();
16758 var measuredBandwidth = stats.bandwidth;
16759 var segmentDuration = this.pendingSegment_.duration;
16760 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
16761 // if we are only left with less than 1 second when the request completes.
16762 // A negative timeUntilRebuffering indicates we are already rebuffering
16763
16764 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
16765 // is larger than the estimated time until the player runs out of forward buffer
16766
16767 if (requestTimeRemaining <= timeUntilRebuffer$1) {
16768 return;
16769 }
16770
16771 var switchCandidate = minRebufferMaxBandwidthSelector({
16772 master: this.vhs_.playlists.master,
16773 currentTime: currentTime,
16774 bandwidth: measuredBandwidth,
16775 duration: this.duration_(),
16776 segmentDuration: segmentDuration,
16777 timeUntilRebuffer: timeUntilRebuffer$1,
16778 currentTimeline: this.currentTimeline_,
16779 syncController: this.syncController_
16780 });
16781
16782 if (!switchCandidate) {
16783 return;
16784 }
16785
16786 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
16787 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
16788 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
16789 // potential round trip time of the new request so that we are not too aggressive
16790 // with switching to a playlist that might save us a fraction of a second.
16791
16792 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
16793 minimumTimeSaving = 1;
16794 }
16795
16796 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
16797 return;
16798 } // set the bandwidth to that of the desired playlist being sure to scale by
16799 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
16800 // don't trigger a bandwidthupdate as the bandwidth is artifial
16801
16802
16803 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
16804 this.trigger('earlyabort');
16805 };
16806
16807 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
16808 this.logger_("Aborting " + segmentInfoString(segmentInfo));
16809 this.mediaRequestsAborted += 1;
16810 }
16811 /**
16812 * XHR `progress` event handler
16813 *
16814 * @param {Event}
16815 * The XHR `progress` event
16816 * @param {Object} simpleSegment
16817 * A simplified segment object copy
16818 * @private
16819 */
16820 ;
16821
16822 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
16823 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16824
16825 if (this.checkForAbort_(simpleSegment.requestId)) {
16826 return;
16827 }
16828
16829 this.trigger('progress');
16830 };
16831
16832 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
16833 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16834
16835 if (this.checkForAbort_(simpleSegment.requestId)) {
16836 return;
16837 }
16838
16839 if (this.checkForIllegalMediaSwitch(trackInfo)) {
16840 return;
16841 }
16842
16843 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
16844 // Guard against cases where we're not getting track info at all until we are
16845 // certain that all streams will provide it.
16846
16847 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
16848 this.appendInitSegment_ = {
16849 audio: true,
16850 video: true
16851 };
16852 this.startingMediaInfo_ = trackInfo;
16853 this.currentMediaInfo_ = trackInfo;
16854 this.logger_('trackinfo update', trackInfo);
16855 this.trigger('trackinfo');
16856 } // trackinfo may cause an abort if the trackinfo
16857 // causes a codec change to an unsupported codec.
16858
16859
16860 if (this.checkForAbort_(simpleSegment.requestId)) {
16861 return;
16862 } // set trackinfo on the pending segment so that
16863 // it can append.
16864
16865
16866 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
16867
16868 if (this.hasEnoughInfoToAppend_()) {
16869 this.processCallQueue_();
16870 }
16871 };
16872
16873 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
16874 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16875
16876 if (this.checkForAbort_(simpleSegment.requestId)) {
16877 return;
16878 }
16879
16880 var segmentInfo = this.pendingSegment_;
16881 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
16882 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
16883 segmentInfo[timingInfoProperty][timeType] = time;
16884 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
16885
16886 if (this.hasEnoughInfoToAppend_()) {
16887 this.processCallQueue_();
16888 }
16889 };
16890
16891 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
16892 var _this2 = this;
16893
16894 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16895
16896 if (this.checkForAbort_(simpleSegment.requestId)) {
16897 return;
16898 } // This could only happen with fmp4 segments, but
16899 // should still not happen in general
16900
16901
16902 if (captionData.length === 0) {
16903 this.logger_('SegmentLoader received no captions from a caption event');
16904 return;
16905 }
16906
16907 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
16908 // can be adjusted by the timestamp offset
16909
16910 if (!segmentInfo.hasAppendedData_) {
16911 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
16912 return;
16913 }
16914
16915 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
16916 var captionTracks = {}; // get total start/end and captions for each track/stream
16917
16918 captionData.forEach(function (caption) {
16919 // caption.stream is actually a track name...
16920 // set to the existing values in tracks or default values
16921 captionTracks[caption.stream] = captionTracks[caption.stream] || {
16922 // Infinity, as any other value will be less than this
16923 startTime: Infinity,
16924 captions: [],
16925 // 0 as an other value will be more than this
16926 endTime: 0
16927 };
16928 var captionTrack = captionTracks[caption.stream];
16929 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
16930 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
16931 captionTrack.captions.push(caption);
16932 });
16933 Object.keys(captionTracks).forEach(function (trackName) {
16934 var _captionTracks$trackN = captionTracks[trackName],
16935 startTime = _captionTracks$trackN.startTime,
16936 endTime = _captionTracks$trackN.endTime,
16937 captions = _captionTracks$trackN.captions;
16938 var inbandTextTracks = _this2.inbandTextTracks_;
16939
16940 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
16941
16942 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
16943 // We do this because a rendition change that also changes the timescale for captions
16944 // will result in captions being re-parsed for certain segments. If we add them again
16945 // without clearing we will have two of the same captions visible.
16946
16947 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
16948 addCaptionData({
16949 captionArray: captions,
16950 inbandTextTracks: inbandTextTracks,
16951 timestampOffset: timestampOffset
16952 });
16953 }); // Reset stored captions since we added parsed
16954 // captions to a text track at this point
16955
16956 if (this.transmuxer_) {
16957 this.transmuxer_.postMessage({
16958 action: 'clearParsedMp4Captions'
16959 });
16960 }
16961 };
16962
16963 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
16964 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16965
16966 if (this.checkForAbort_(simpleSegment.requestId)) {
16967 return;
16968 }
16969
16970 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
16971
16972 if (!segmentInfo.hasAppendedData_) {
16973 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
16974 return;
16975 }
16976
16977 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
16978 // audio/video source with a metadata track, and an alt audio with a metadata track.
16979 // However, this probably won't happen, and if it does it can be handled then.
16980
16981 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
16982 addMetadata({
16983 inbandTextTracks: this.inbandTextTracks_,
16984 metadataArray: id3Frames,
16985 timestampOffset: timestampOffset,
16986 videoDuration: this.duration_()
16987 });
16988 };
16989
16990 _proto.processMetadataQueue_ = function processMetadataQueue_() {
16991 this.metadataQueue_.id3.forEach(function (fn) {
16992 return fn();
16993 });
16994 this.metadataQueue_.caption.forEach(function (fn) {
16995 return fn();
16996 });
16997 this.metadataQueue_.id3 = [];
16998 this.metadataQueue_.caption = [];
16999 };
17000
17001 _proto.processCallQueue_ = function processCallQueue_() {
17002 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
17003 // functions may check the length of the load queue and default to pushing themselves
17004 // back onto the queue.
17005
17006 this.callQueue_ = [];
17007 callQueue.forEach(function (fun) {
17008 return fun();
17009 });
17010 };
17011
17012 _proto.processLoadQueue_ = function processLoadQueue_() {
17013 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
17014 // functions may check the length of the load queue and default to pushing themselves
17015 // back onto the queue.
17016
17017 this.loadQueue_ = [];
17018 loadQueue.forEach(function (fun) {
17019 return fun();
17020 });
17021 }
17022 /**
17023 * Determines whether the loader has enough info to load the next segment.
17024 *
17025 * @return {boolean}
17026 * Whether or not the loader has enough info to load the next segment
17027 */
17028 ;
17029
17030 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
17031 // Since primary timing goes by video, only the audio loader potentially needs to wait
17032 // to load.
17033 if (this.loaderType_ !== 'audio') {
17034 return true;
17035 }
17036
17037 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
17038 // enough info to load.
17039
17040 if (!segmentInfo) {
17041 return false;
17042 } // The first segment can and should be loaded immediately so that source buffers are
17043 // created together (before appending). Source buffer creation uses the presence of
17044 // audio and video data to determine whether to create audio/video source buffers, and
17045 // uses processed (transmuxed or parsed) media to determine the types required.
17046
17047
17048 if (!this.getCurrentMediaInfo_()) {
17049 return true;
17050 }
17051
17052 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
17053 // can be requested and downloaded and only wait before it is transmuxed or parsed.
17054 // But in practice, there are a few reasons why it is better to wait until a loader
17055 // is ready to append that segment before requesting and downloading:
17056 //
17057 // 1. Because audio and main loaders cross discontinuities together, if this loader
17058 // is waiting for the other to catch up, then instead of requesting another
17059 // segment and using up more bandwidth, by not yet loading, more bandwidth is
17060 // allotted to the loader currently behind.
17061 // 2. media-segment-request doesn't have to have logic to consider whether a segment
17062 // is ready to be processed or not, isolating the queueing behavior to the loader.
17063 // 3. The audio loader bases some of its segment properties on timing information
17064 // provided by the main loader, meaning that, if the logic for waiting on
17065 // processing was in media-segment-request, then it would also need to know how
17066 // to re-generate the segment information after the main loader caught up.
17067 shouldWaitForTimelineChange({
17068 timelineChangeController: this.timelineChangeController_,
17069 currentTimeline: this.currentTimeline_,
17070 segmentTimeline: segmentInfo.timeline,
17071 loaderType: this.loaderType_,
17072 audioDisabled: this.audioDisabled_
17073 })) {
17074 return false;
17075 }
17076
17077 return true;
17078 };
17079
17080 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
17081 if (segmentInfo === void 0) {
17082 segmentInfo = this.pendingSegment_;
17083 }
17084
17085 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
17086 };
17087
17088 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
17089 if (segmentInfo === void 0) {
17090 segmentInfo = this.pendingSegment_;
17091 }
17092
17093 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
17094 };
17095
17096 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
17097 if (!this.sourceUpdater_.ready()) {
17098 return false;
17099 } // If content needs to be removed or the loader is waiting on an append reattempt,
17100 // then no additional content should be appended until the prior append is resolved.
17101
17102
17103 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
17104 return false;
17105 }
17106
17107 var segmentInfo = this.pendingSegment_;
17108 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
17109 // we do not have information on this specific
17110 // segment yet
17111
17112 if (!segmentInfo || !trackInfo) {
17113 return false;
17114 }
17115
17116 var hasAudio = trackInfo.hasAudio,
17117 hasVideo = trackInfo.hasVideo,
17118 isMuxed = trackInfo.isMuxed;
17119
17120 if (hasVideo && !segmentInfo.videoTimingInfo) {
17121 return false;
17122 } // muxed content only relies on video timing information for now.
17123
17124
17125 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
17126 return false;
17127 }
17128
17129 if (shouldWaitForTimelineChange({
17130 timelineChangeController: this.timelineChangeController_,
17131 currentTimeline: this.currentTimeline_,
17132 segmentTimeline: segmentInfo.timeline,
17133 loaderType: this.loaderType_,
17134 audioDisabled: this.audioDisabled_
17135 })) {
17136 return false;
17137 }
17138
17139 return true;
17140 };
17141
17142 _proto.handleData_ = function handleData_(simpleSegment, result) {
17143 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17144
17145 if (this.checkForAbort_(simpleSegment.requestId)) {
17146 return;
17147 } // If there's anything in the call queue, then this data came later and should be
17148 // executed after the calls currently queued.
17149
17150
17151 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
17152 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
17153 return;
17154 }
17155
17156 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
17157
17158 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
17159
17160 this.updateMediaSecondsLoaded_(segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
17161 // logic may change behavior depending on the state, and changing state too early may
17162 // inflate our estimates of bandwidth. In the future this should be re-examined to
17163 // note more granular states.
17164 // don't process and append data if the mediaSource is closed
17165
17166 if (this.mediaSource_.readyState === 'closed') {
17167 return;
17168 } // if this request included an initialization segment, save that data
17169 // to the initSegment cache
17170
17171
17172 if (simpleSegment.map) {
17173 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
17174
17175 segmentInfo.segment.map = simpleSegment.map;
17176 } // if this request included a segment key, save that data in the cache
17177
17178
17179 if (simpleSegment.key) {
17180 this.segmentKey(simpleSegment.key, true);
17181 }
17182
17183 segmentInfo.isFmp4 = simpleSegment.isFmp4;
17184 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
17185
17186 if (segmentInfo.isFmp4) {
17187 this.trigger('fmp4');
17188 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
17189 } else {
17190 var trackInfo = this.getCurrentMediaInfo_();
17191 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
17192 var firstVideoFrameTimeForData;
17193
17194 if (useVideoTimingInfo) {
17195 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
17196 } // Segment loader knows more about segment timing than the transmuxer (in certain
17197 // aspects), so make any changes required for a more accurate start time.
17198 // Don't set the end time yet, as the segment may not be finished processing.
17199
17200
17201 segmentInfo.timingInfo.start = this.trueSegmentStart_({
17202 currentStart: segmentInfo.timingInfo.start,
17203 playlist: segmentInfo.playlist,
17204 mediaIndex: segmentInfo.mediaIndex,
17205 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
17206 useVideoTimingInfo: useVideoTimingInfo,
17207 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
17208 videoTimingInfo: segmentInfo.videoTimingInfo,
17209 audioTimingInfo: segmentInfo.audioTimingInfo
17210 });
17211 } // Init segments for audio and video only need to be appended in certain cases. Now
17212 // that data is about to be appended, we can check the final cases to determine
17213 // whether we should append an init segment.
17214
17215
17216 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
17217 // as we use the start of the segment to offset the best guess (playlist provided)
17218 // timestamp offset.
17219
17220 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
17221 // be appended or not.
17222
17223 if (segmentInfo.isSyncRequest) {
17224 // first save/update our timing info for this segment.
17225 // this is what allows us to choose an accurate segment
17226 // and the main reason we make a sync request.
17227 this.updateTimingInfoEnd_(segmentInfo);
17228 this.syncController_.saveSegmentTimingInfo({
17229 segmentInfo: segmentInfo,
17230 shouldSaveTimelineMapping: this.loaderType_ === 'main'
17231 });
17232 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
17233 // after taking into account its timing info, do not append it.
17234
17235 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
17236 this.logger_('sync segment was incorrect, not appending');
17237 return;
17238 } // otherwise append it like any other segment as our guess was correct.
17239
17240
17241 this.logger_('sync segment was correct, appending');
17242 } // Save some state so that in the future anything waiting on first append (and/or
17243 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
17244 // we need some notion of whether the timestamp offset or other relevant information
17245 // has had a chance to be set.
17246
17247
17248 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
17249
17250 this.processMetadataQueue_();
17251 this.appendData_(segmentInfo, result);
17252 };
17253
17254 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
17255 // alt audio doesn't manage timestamp offset
17256 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
17257 // segment for each chunk
17258 !segmentInfo.changedTimestampOffset) {
17259 // if the timestamp offset changed, the timeline may have changed, so we have to re-
17260 // append init segments
17261 this.appendInitSegment_ = {
17262 audio: true,
17263 video: true
17264 };
17265 }
17266
17267 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
17268 // make sure we append init segment on playlist changes, in case the media config
17269 // changed
17270 this.appendInitSegment_[type] = true;
17271 }
17272 };
17273
17274 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
17275 var type = _ref4.type,
17276 initSegment = _ref4.initSegment,
17277 map = _ref4.map,
17278 playlist = _ref4.playlist;
17279
17280 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
17281 // (Section 3) required to parse the applicable Media Segments. It applies to every
17282 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
17283 // or until the end of the playlist."
17284 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
17285 if (map) {
17286 var id = initSegmentId(map);
17287
17288 if (this.activeInitSegmentId_ === id) {
17289 // don't need to re-append the init segment if the ID matches
17290 return null;
17291 } // a map-specified init segment takes priority over any transmuxed (or otherwise
17292 // obtained) init segment
17293 //
17294 // this also caches the init segment for later use
17295
17296
17297 initSegment = this.initSegmentForMap(map, true).bytes;
17298 this.activeInitSegmentId_ = id;
17299 } // We used to always prepend init segments for video, however, that shouldn't be
17300 // necessary. Instead, we should only append on changes, similar to what we've always
17301 // done for audio. This is more important (though may not be that important) for
17302 // frame-by-frame appending for LHLS, simply because of the increased quantity of
17303 // appends.
17304
17305
17306 if (initSegment && this.appendInitSegment_[type]) {
17307 // Make sure we track the playlist that we last used for the init segment, so that
17308 // we can re-append the init segment in the event that we get data from a new
17309 // playlist. Discontinuities and track changes are handled in other sections.
17310 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
17311
17312 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
17313 // we are appending the muxer init segment
17314
17315 this.activeInitSegmentId_ = null;
17316 return initSegment;
17317 }
17318
17319 return null;
17320 };
17321
17322 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
17323 var _this3 = this;
17324
17325 var segmentInfo = _ref5.segmentInfo,
17326 type = _ref5.type,
17327 bytes = _ref5.bytes;
17328 var audioBuffered = this.sourceUpdater_.audioBuffered();
17329 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
17330 // should be cleared out during the buffer removals. However, log in case it helps
17331 // debug.
17332
17333 if (audioBuffered.length > 1) {
17334 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
17335 }
17336
17337 if (videoBuffered.length > 1) {
17338 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
17339 }
17340
17341 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
17342 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
17343 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
17344 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
17345
17346 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
17347 // Can't remove enough buffer to make room for new segment (or the browser doesn't
17348 // allow for appends of segments this size). In the future, it may be possible to
17349 // split up the segment and append in pieces, but for now, error out this playlist
17350 // in an attempt to switch to a more manageable rendition.
17351 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
17352 this.error({
17353 message: 'Quota exceeded error with append of a single segment of content',
17354 excludeUntil: Infinity
17355 });
17356 this.trigger('error');
17357 return;
17358 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
17359 // that the segment-loader should block on future events until this one is handled, so
17360 // that it doesn't keep moving onto further segments. Adding the call to the call
17361 // queue will prevent further appends until waitingOnRemove_ and
17362 // quotaExceededErrorRetryTimeout_ are cleared.
17363 //
17364 // Note that this will only block the current loader. In the case of demuxed content,
17365 // the other load may keep filling as fast as possible. In practice, this should be
17366 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
17367 // source buffer, or video fills without enough room for audio to append (and without
17368 // the availability of clearing out seconds of back buffer to make room for audio).
17369 // But it might still be good to handle this case in the future as a TODO.
17370
17371
17372 this.waitingOnRemove_ = true;
17373 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
17374 segmentInfo: segmentInfo,
17375 type: type,
17376 bytes: bytes
17377 }));
17378 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
17379 // before retrying.
17380
17381 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
17382 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
17383 this.remove(0, timeToRemoveUntil, function () {
17384 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
17385
17386 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
17387 // attempts (since we can't clear less than the minimum)
17388
17389 _this3.quotaExceededErrorRetryTimeout_ = window.setTimeout(function () {
17390 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
17391
17392 _this3.quotaExceededErrorRetryTimeout_ = null;
17393
17394 _this3.processCallQueue_();
17395 }, MIN_BACK_BUFFER * 1000);
17396 }, true);
17397 };
17398
17399 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
17400 var segmentInfo = _ref6.segmentInfo,
17401 type = _ref6.type,
17402 bytes = _ref6.bytes;
17403
17404 // if there's no error, nothing to do
17405 if (!error) {
17406 return;
17407 }
17408
17409 if (error.code === QUOTA_EXCEEDED_ERR) {
17410 this.handleQuotaExceededError_({
17411 segmentInfo: segmentInfo,
17412 type: type,
17413 bytes: bytes
17414 }); // A quota exceeded error should be recoverable with a future re-append, so no need
17415 // to trigger an append error.
17416
17417 return;
17418 }
17419
17420 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
17421 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
17422 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
17423 //
17424 // Trigger a special error so that it can be handled separately from normal,
17425 // recoverable errors.
17426
17427 this.trigger('appenderror');
17428 };
17429
17430 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
17431 var segmentInfo = _ref7.segmentInfo,
17432 type = _ref7.type,
17433 initSegment = _ref7.initSegment,
17434 data = _ref7.data,
17435 bytes = _ref7.bytes;
17436
17437 // If this is a re-append, bytes were already created and don't need to be recreated
17438 if (!bytes) {
17439 var segments = [data];
17440 var byteLength = data.byteLength;
17441
17442 if (initSegment) {
17443 // if the media initialization segment is changing, append it before the content
17444 // segment
17445 segments.unshift(initSegment);
17446 byteLength += initSegment.byteLength;
17447 } // Technically we should be OK appending the init segment separately, however, we
17448 // haven't yet tested that, and prepending is how we have always done things.
17449
17450
17451 bytes = concatSegments({
17452 bytes: byteLength,
17453 segments: segments
17454 });
17455 }
17456
17457 this.sourceUpdater_.appendBuffer({
17458 segmentInfo: segmentInfo,
17459 type: type,
17460 bytes: bytes
17461 }, this.handleAppendError_.bind(this, {
17462 segmentInfo: segmentInfo,
17463 type: type,
17464 bytes: bytes
17465 }));
17466 };
17467
17468 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
17469 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
17470 return;
17471 }
17472
17473 var segment = this.pendingSegment_.segment;
17474 var timingInfoProperty = type + "TimingInfo";
17475
17476 if (!segment[timingInfoProperty]) {
17477 segment[timingInfoProperty] = {};
17478 }
17479
17480 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
17481 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
17482 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
17483 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
17484 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
17485
17486 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
17487 };
17488
17489 _proto.appendData_ = function appendData_(segmentInfo, result) {
17490 var type = result.type,
17491 data = result.data;
17492
17493 if (!data || !data.byteLength) {
17494 return;
17495 }
17496
17497 if (type === 'audio' && this.audioDisabled_) {
17498 return;
17499 }
17500
17501 var initSegment = this.getInitSegmentAndUpdateState_({
17502 type: type,
17503 initSegment: result.initSegment,
17504 playlist: segmentInfo.playlist,
17505 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
17506 });
17507 this.appendToSourceBuffer_({
17508 segmentInfo: segmentInfo,
17509 type: type,
17510 initSegment: initSegment,
17511 data: data
17512 });
17513 }
17514 /**
17515 * load a specific segment from a request into the buffer
17516 *
17517 * @private
17518 */
17519 ;
17520
17521 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
17522 var _this4 = this;
17523
17524 this.state = 'WAITING';
17525 this.pendingSegment_ = segmentInfo;
17526 this.trimBackBuffer_(segmentInfo);
17527
17528 if (typeof segmentInfo.timestampOffset === 'number') {
17529 if (this.transmuxer_) {
17530 this.transmuxer_.postMessage({
17531 action: 'clearAllMp4Captions'
17532 });
17533 }
17534 }
17535
17536 if (!this.hasEnoughInfoToLoad_()) {
17537 this.loadQueue_.push(function () {
17538 // regenerate the audioAppendStart, timestampOffset, etc as they
17539 // may have changed since this function was added to the queue.
17540 var options = _extends({}, segmentInfo, {
17541 forceTimestampOffset: true
17542 });
17543
17544 _extends(segmentInfo, _this4.generateSegmentInfo_(options));
17545
17546 _this4.isPendingTimestampOffset_ = false;
17547
17548 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
17549 });
17550 return;
17551 }
17552
17553 this.updateTransmuxerAndRequestSegment_(segmentInfo);
17554 };
17555
17556 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
17557 var _this5 = this;
17558
17559 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
17560 // the transmuxer still needs to be updated before then.
17561 //
17562 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
17563 // offset must be passed to the transmuxer for stream correcting adjustments.
17564 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
17565 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
17566
17567 segmentInfo.gopsToAlignWith = [];
17568 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
17569
17570 this.transmuxer_.postMessage({
17571 action: 'reset'
17572 });
17573 this.transmuxer_.postMessage({
17574 action: 'setTimestampOffset',
17575 timestampOffset: segmentInfo.timestampOffset
17576 });
17577 }
17578
17579 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
17580 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
17581 var isWalkingForward = this.mediaIndex !== null;
17582 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
17583 // the first timeline
17584 segmentInfo.timeline > 0;
17585 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
17586 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
17587 // then this init segment has never been seen before and should be appended.
17588 //
17589 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
17590 // both to true and leave the decision of whether to append the init segment to append time.
17591
17592 if (simpleSegment.map && !simpleSegment.map.bytes) {
17593 this.logger_('going to request init segment.');
17594 this.appendInitSegment_ = {
17595 video: true,
17596 audio: true
17597 };
17598 }
17599
17600 segmentInfo.abortRequests = mediaSegmentRequest({
17601 xhr: this.vhs_.xhr,
17602 xhrOptions: this.xhrOptions_,
17603 decryptionWorker: this.decrypter_,
17604 segment: simpleSegment,
17605 abortFn: this.handleAbort_.bind(this, segmentInfo),
17606 progressFn: this.handleProgress_.bind(this),
17607 trackInfoFn: this.handleTrackInfo_.bind(this),
17608 timingInfoFn: this.handleTimingInfo_.bind(this),
17609 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
17610 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
17611 captionsFn: this.handleCaptions_.bind(this),
17612 isEndOfTimeline: isEndOfTimeline,
17613 endedTimelineFn: function endedTimelineFn() {
17614 _this5.logger_('received endedtimeline callback');
17615 },
17616 id3Fn: this.handleId3_.bind(this),
17617 dataFn: this.handleData_.bind(this),
17618 doneFn: this.segmentRequestFinished_.bind(this),
17619 onTransmuxerLog: function onTransmuxerLog(_ref8) {
17620 var message = _ref8.message,
17621 level = _ref8.level,
17622 stream = _ref8.stream;
17623
17624 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
17625 }
17626 });
17627 }
17628 /**
17629 * trim the back buffer so that we don't have too much data
17630 * in the source buffer
17631 *
17632 * @private
17633 *
17634 * @param {Object} segmentInfo - the current segment
17635 */
17636 ;
17637
17638 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
17639 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
17640 // buffer and a very conservative "garbage collector"
17641 // We manually clear out the old buffer to ensure
17642 // we don't trigger the QuotaExceeded error
17643 // on the source buffer during subsequent appends
17644
17645 if (removeToTime > 0) {
17646 this.remove(0, removeToTime);
17647 }
17648 }
17649 /**
17650 * created a simplified copy of the segment object with just the
17651 * information necessary to perform the XHR and decryption
17652 *
17653 * @private
17654 *
17655 * @param {Object} segmentInfo - the current segment
17656 * @return {Object} a simplified segment object copy
17657 */
17658 ;
17659
17660 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
17661 var segment = segmentInfo.segment;
17662 var part = segmentInfo.part;
17663 var simpleSegment = {
17664 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
17665 byterange: part ? part.byterange : segment.byterange,
17666 requestId: segmentInfo.requestId,
17667 transmuxer: segmentInfo.transmuxer,
17668 audioAppendStart: segmentInfo.audioAppendStart,
17669 gopsToAlignWith: segmentInfo.gopsToAlignWith,
17670 part: segmentInfo.part
17671 };
17672 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
17673
17674 if (previousSegment && previousSegment.timeline === segment.timeline) {
17675 // The baseStartTime of a segment is used to handle rollover when probing the TS
17676 // segment to retrieve timing information. Since the probe only looks at the media's
17677 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
17678 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
17679 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
17680 // seconds of media time, so should be used here. The previous segment is used since
17681 // the end of the previous segment should represent the beginning of the current
17682 // segment, so long as they are on the same timeline.
17683 if (previousSegment.videoTimingInfo) {
17684 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
17685 } else if (previousSegment.audioTimingInfo) {
17686 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
17687 }
17688 }
17689
17690 if (segment.key) {
17691 // if the media sequence is greater than 2^32, the IV will be incorrect
17692 // assuming 10s segments, that would be about 1300 years
17693 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
17694 simpleSegment.key = this.segmentKey(segment.key);
17695 simpleSegment.key.iv = iv;
17696 }
17697
17698 if (segment.map) {
17699 simpleSegment.map = this.initSegmentForMap(segment.map);
17700 }
17701
17702 return simpleSegment;
17703 };
17704
17705 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
17706 // every request counts as a media request even if it has been aborted
17707 // or canceled due to a timeout
17708 this.mediaRequests += 1;
17709
17710 if (stats) {
17711 this.mediaBytesTransferred += stats.bytesReceived;
17712 this.mediaTransferDuration += stats.roundTripTime;
17713 }
17714 };
17715
17716 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
17717 // byteLength will be used for throughput, and should be based on bytes receieved,
17718 // which we only know at the end of the request and should reflect total bytes
17719 // downloaded rather than just bytes processed from components of the segment
17720 this.pendingSegment_.byteLength = stats.bytesReceived;
17721
17722 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
17723 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
17724 return;
17725 }
17726
17727 this.bandwidth = stats.bandwidth;
17728 this.roundTrip = stats.roundTripTime;
17729 };
17730
17731 _proto.handleTimeout_ = function handleTimeout_() {
17732 // although the VTT segment loader bandwidth isn't really used, it's good to
17733 // maintain functinality between segment loaders
17734 this.mediaRequestsTimedout += 1;
17735 this.bandwidth = 1;
17736 this.roundTrip = NaN;
17737 this.trigger('bandwidthupdate');
17738 }
17739 /**
17740 * Handle the callback from the segmentRequest function and set the
17741 * associated SegmentLoader state and errors if necessary
17742 *
17743 * @private
17744 */
17745 ;
17746
17747 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
17748 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
17749 // check the call queue directly since this function doesn't need to deal with any
17750 // data, and can continue even if the source buffers are not set up and we didn't get
17751 // any data from the segment
17752 if (this.callQueue_.length) {
17753 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
17754 return;
17755 }
17756
17757 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
17758
17759 if (!this.pendingSegment_) {
17760 return;
17761 } // the request was aborted and the SegmentLoader has already started
17762 // another request. this can happen when the timeout for an aborted
17763 // request triggers due to a limitation in the XHR library
17764 // do not count this as any sort of request or we risk double-counting
17765
17766
17767 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
17768 return;
17769 } // an error occurred from the active pendingSegment_ so reset everything
17770
17771
17772 if (error) {
17773 this.pendingSegment_ = null;
17774 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
17775
17776 if (error.code === REQUEST_ERRORS.ABORTED) {
17777 return;
17778 }
17779
17780 this.pause(); // the error is really just that at least one of the requests timed-out
17781 // set the bandwidth to a very low value and trigger an ABR switch to
17782 // take emergency action
17783
17784 if (error.code === REQUEST_ERRORS.TIMEOUT) {
17785 this.handleTimeout_();
17786 return;
17787 } // if control-flow has arrived here, then the error is real
17788 // emit an error event to blacklist the current playlist
17789
17790
17791 this.mediaRequestsErrored += 1;
17792 this.error(error);
17793 this.trigger('error');
17794 return;
17795 }
17796
17797 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
17798 // generated for ABR purposes
17799
17800 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
17801 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
17802
17803 if (result.gopInfo) {
17804 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
17805 } // Although we may have already started appending on progress, we shouldn't switch the
17806 // state away from loading until we are officially done loading the segment data.
17807
17808
17809 this.state = 'APPENDING'; // used for testing
17810
17811 this.trigger('appending');
17812 this.waitForAppendsToComplete_(segmentInfo);
17813 };
17814
17815 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
17816 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
17817
17818 if (timelineMapping !== null) {
17819 this.timeMapping_ = timelineMapping;
17820 }
17821 };
17822
17823 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
17824 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
17825 this.mediaSecondsLoaded += segment.end - segment.start;
17826 } else {
17827 this.mediaSecondsLoaded += segment.duration;
17828 }
17829 };
17830
17831 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
17832 if (timestampOffset === null) {
17833 return false;
17834 } // note that we're potentially using the same timestamp offset for both video and
17835 // audio
17836
17837
17838 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
17839 return true;
17840 }
17841
17842 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
17843 return true;
17844 }
17845
17846 return false;
17847 };
17848
17849 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
17850 var currentStart = _ref9.currentStart,
17851 playlist = _ref9.playlist,
17852 mediaIndex = _ref9.mediaIndex,
17853 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
17854 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
17855 useVideoTimingInfo = _ref9.useVideoTimingInfo,
17856 videoTimingInfo = _ref9.videoTimingInfo,
17857 audioTimingInfo = _ref9.audioTimingInfo;
17858
17859 if (typeof currentStart !== 'undefined') {
17860 // if start was set once, keep using it
17861 return currentStart;
17862 }
17863
17864 if (!useVideoTimingInfo) {
17865 return audioTimingInfo.start;
17866 }
17867
17868 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
17869 // within that segment. Since the transmuxer maintains a cache of incomplete data
17870 // from and/or the last frame seen, the start time may reflect a frame that starts
17871 // in the previous segment. Check for that case and ensure the start time is
17872 // accurate for the segment.
17873
17874 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
17875 return firstVideoFrameTimeForData;
17876 }
17877
17878 return videoTimingInfo.start;
17879 };
17880
17881 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
17882 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
17883
17884 if (!trackInfo) {
17885 this.error({
17886 message: 'No starting media returned, likely due to an unsupported media format.',
17887 blacklistDuration: Infinity
17888 });
17889 this.trigger('error');
17890 return;
17891 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
17892 // on each queue this loader is responsible for to ensure that the appends are
17893 // complete.
17894
17895
17896 var hasAudio = trackInfo.hasAudio,
17897 hasVideo = trackInfo.hasVideo,
17898 isMuxed = trackInfo.isMuxed;
17899 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
17900 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
17901 segmentInfo.waitingOnAppends = 0; // segments with no data
17902
17903 if (!segmentInfo.hasAppendedData_) {
17904 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
17905 // When there's no audio or video data in the segment, there's no audio or video
17906 // timing information.
17907 //
17908 // If there's no audio or video timing information, then the timestamp offset
17909 // can't be adjusted to the appropriate value for the transmuxer and source
17910 // buffers.
17911 //
17912 // Therefore, the next segment should be used to set the timestamp offset.
17913 this.isPendingTimestampOffset_ = true;
17914 } // override settings for metadata only segments
17915
17916
17917 segmentInfo.timingInfo = {
17918 start: 0
17919 };
17920 segmentInfo.waitingOnAppends++;
17921
17922 if (!this.isPendingTimestampOffset_) {
17923 // update the timestampoffset
17924 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
17925 // no video/audio data.
17926
17927 this.processMetadataQueue_();
17928 } // append is "done" instantly with no data.
17929
17930
17931 this.checkAppendsDone_(segmentInfo);
17932 return;
17933 } // Since source updater could call back synchronously, do the increments first.
17934
17935
17936 if (waitForVideo) {
17937 segmentInfo.waitingOnAppends++;
17938 }
17939
17940 if (waitForAudio) {
17941 segmentInfo.waitingOnAppends++;
17942 }
17943
17944 if (waitForVideo) {
17945 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
17946 }
17947
17948 if (waitForAudio) {
17949 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
17950 }
17951 };
17952
17953 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
17954 if (this.checkForAbort_(segmentInfo.requestId)) {
17955 return;
17956 }
17957
17958 segmentInfo.waitingOnAppends--;
17959
17960 if (segmentInfo.waitingOnAppends === 0) {
17961 this.handleAppendsDone_();
17962 }
17963 };
17964
17965 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
17966 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
17967
17968 if (illegalMediaSwitchError) {
17969 this.error({
17970 message: illegalMediaSwitchError,
17971 blacklistDuration: Infinity
17972 });
17973 this.trigger('error');
17974 return true;
17975 }
17976
17977 return false;
17978 };
17979
17980 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
17981 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
17982 // priority, timing-wise, so we must wait
17983 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
17984 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
17985 this.loaderType_ !== 'main') {
17986 return;
17987 }
17988
17989 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
17990 // the timing info here comes from video. In the event that the audio is longer than
17991 // the video, this will trim the start of the audio.
17992 // This also trims any offset from 0 at the beginning of the media
17993
17994 segmentInfo.timestampOffset -= segmentInfo.timingInfo.start; // In the event that there are part segment downloads, each will try to update the
17995 // timestamp offset. Retaining this bit of state prevents us from updating in the
17996 // future (within the same segment), however, there may be a better way to handle it.
17997
17998 segmentInfo.changedTimestampOffset = true;
17999
18000 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18001 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
18002 didChange = true;
18003 }
18004
18005 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18006 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
18007 didChange = true;
18008 }
18009
18010 if (didChange) {
18011 this.trigger('timestampoffset');
18012 }
18013 };
18014
18015 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
18016 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
18017 var trackInfo = this.getMediaInfo_();
18018 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
18019 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
18020
18021 if (!prioritizedTimingInfo) {
18022 return;
18023 }
18024
18025 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
18026 // current example is the case of fmp4), so use the rough duration to calculate an
18027 // end time.
18028 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
18029 }
18030 /**
18031 * callback to run when appendBuffer is finished. detects if we are
18032 * in a good state to do things with the data we got, or if we need
18033 * to wait for more
18034 *
18035 * @private
18036 */
18037 ;
18038
18039 _proto.handleAppendsDone_ = function handleAppendsDone_() {
18040 // appendsdone can cause an abort
18041 if (this.pendingSegment_) {
18042 this.trigger('appendsdone');
18043 }
18044
18045 if (!this.pendingSegment_) {
18046 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
18047 // all appending cases?
18048
18049 if (!this.paused()) {
18050 this.monitorBuffer_();
18051 }
18052
18053 return;
18054 }
18055
18056 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
18057 // best to wait until all appends are done so we're sure that the primary media is
18058 // finished (and we have its end time).
18059
18060 this.updateTimingInfoEnd_(segmentInfo);
18061
18062 if (this.shouldSaveSegmentTimingInfo_) {
18063 // Timeline mappings should only be saved for the main loader. This is for multiple
18064 // reasons:
18065 //
18066 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
18067 // and the main loader try to save the timeline mapping, whichever comes later
18068 // will overwrite the first. In theory this is OK, as the mappings should be the
18069 // same, however, it breaks for (2)
18070 // 2) In the event of a live stream, the initial live point will make for a somewhat
18071 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
18072 // the mapping will be off for one of the streams, dependent on which one was
18073 // first saved (see (1)).
18074 // 3) Primary timing goes by video in VHS, so the mapping should be video.
18075 //
18076 // Since the audio loader will wait for the main loader to load the first segment,
18077 // the main loader will save the first timeline mapping, and ensure that there won't
18078 // be a case where audio loads two segments without saving a mapping (thus leading
18079 // to missing segment timing info).
18080 this.syncController_.saveSegmentTimingInfo({
18081 segmentInfo: segmentInfo,
18082 shouldSaveTimelineMapping: this.loaderType_ === 'main'
18083 });
18084 }
18085
18086 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
18087
18088 if (segmentDurationMessage) {
18089 if (segmentDurationMessage.severity === 'warn') {
18090 videojs.log.warn(segmentDurationMessage.message);
18091 } else {
18092 this.logger_(segmentDurationMessage.message);
18093 }
18094 }
18095
18096 this.recordThroughput_(segmentInfo);
18097 this.pendingSegment_ = null;
18098 this.state = 'READY';
18099
18100 if (segmentInfo.isSyncRequest) {
18101 this.trigger('syncinfoupdate'); // if the sync request was not appended
18102 // then it was not the correct segment.
18103 // throw it away and use the data it gave us
18104 // to get the correct one.
18105
18106 if (!segmentInfo.hasAppendedData_) {
18107 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
18108 return;
18109 }
18110 }
18111
18112 this.logger_("Appended " + segmentInfoString(segmentInfo));
18113 this.addSegmentMetadataCue_(segmentInfo);
18114 this.fetchAtBuffer_ = true;
18115
18116 if (this.currentTimeline_ !== segmentInfo.timeline) {
18117 this.timelineChangeController_.lastTimelineChange({
18118 type: this.loaderType_,
18119 from: this.currentTimeline_,
18120 to: segmentInfo.timeline
18121 }); // If audio is not disabled, the main segment loader is responsible for updating
18122 // the audio timeline as well. If the content is video only, this won't have any
18123 // impact.
18124
18125 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
18126 this.timelineChangeController_.lastTimelineChange({
18127 type: 'audio',
18128 from: this.currentTimeline_,
18129 to: segmentInfo.timeline
18130 });
18131 }
18132 }
18133
18134 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
18135 // the following conditional otherwise it may consider this a bad "guess"
18136 // and attempt to resync when the post-update seekable window and live
18137 // point would mean that this was the perfect segment to fetch
18138
18139 this.trigger('syncinfoupdate');
18140 var segment = segmentInfo.segment; // If we previously appended a segment that ends more than 3 targetDurations before
18141 // the currentTime_ that means that our conservative guess was too conservative.
18142 // In that case, reset the loader state so that we try to use any information gained
18143 // from the previous request to create a new, more accurate, sync-point.
18144
18145 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
18146 this.resetEverything();
18147 return;
18148 }
18149
18150 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
18151 // and conservatively guess
18152
18153 if (isWalkingForward) {
18154 this.trigger('bandwidthupdate');
18155 }
18156
18157 this.trigger('progress');
18158 this.mediaIndex = segmentInfo.mediaIndex;
18159 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
18160 // buffer, end the stream. this ensures the "ended" event will
18161 // fire if playback reaches that point.
18162
18163 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
18164 this.endOfStream();
18165 } // used for testing
18166
18167
18168 this.trigger('appended');
18169
18170 if (segmentInfo.hasAppendedData_) {
18171 this.mediaAppends++;
18172 }
18173
18174 if (!this.paused()) {
18175 this.monitorBuffer_();
18176 }
18177 }
18178 /**
18179 * Records the current throughput of the decrypt, transmux, and append
18180 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
18181 * moving average of the throughput. `throughput.count` is the number of
18182 * data points in the average.
18183 *
18184 * @private
18185 * @param {Object} segmentInfo the object returned by loadSegment
18186 */
18187 ;
18188
18189 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
18190 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
18191 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
18192 return;
18193 }
18194
18195 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
18196 // by zero in the case where the throughput is ridiculously high
18197
18198 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
18199
18200 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
18201 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
18202
18203 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
18204 }
18205 /**
18206 * Adds a cue to the segment-metadata track with some metadata information about the
18207 * segment
18208 *
18209 * @private
18210 * @param {Object} segmentInfo
18211 * the object returned by loadSegment
18212 * @method addSegmentMetadataCue_
18213 */
18214 ;
18215
18216 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
18217 if (!this.segmentMetadataTrack_) {
18218 return;
18219 }
18220
18221 var segment = segmentInfo.segment;
18222 var start = segment.start;
18223 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
18224
18225 if (!finite(start) || !finite(end)) {
18226 return;
18227 }
18228
18229 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
18230 var Cue = window.WebKitDataCue || window.VTTCue;
18231 var value = {
18232 custom: segment.custom,
18233 dateTimeObject: segment.dateTimeObject,
18234 dateTimeString: segment.dateTimeString,
18235 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
18236 resolution: segmentInfo.playlist.attributes.RESOLUTION,
18237 codecs: segmentInfo.playlist.attributes.CODECS,
18238 byteLength: segmentInfo.byteLength,
18239 uri: segmentInfo.uri,
18240 timeline: segmentInfo.timeline,
18241 playlist: segmentInfo.playlist.id,
18242 start: start,
18243 end: end
18244 };
18245 var data = JSON.stringify(value);
18246 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
18247 // the differences of WebKitDataCue in safari and VTTCue in other browsers
18248
18249 cue.value = value;
18250 this.segmentMetadataTrack_.addCue(cue);
18251 };
18252
18253 return SegmentLoader;
18254}(videojs.EventTarget);
18255
18256function noop() {}
18257
18258var toTitleCase = function toTitleCase(string) {
18259 if (typeof string !== 'string') {
18260 return string;
18261 }
18262
18263 return string.replace(/./, function (w) {
18264 return w.toUpperCase();
18265 });
18266};
18267
18268var bufferTypes = ['video', 'audio'];
18269
18270var _updating = function updating(type, sourceUpdater) {
18271 var sourceBuffer = sourceUpdater[type + "Buffer"];
18272 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
18273};
18274
18275var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
18276 for (var i = 0; i < queue.length; i++) {
18277 var queueEntry = queue[i];
18278
18279 if (queueEntry.type === 'mediaSource') {
18280 // If the next entry is a media source entry (uses multiple source buffers), block
18281 // processing to allow it to go through first.
18282 return null;
18283 }
18284
18285 if (queueEntry.type === type) {
18286 return i;
18287 }
18288 }
18289
18290 return null;
18291};
18292
18293var shiftQueue = function shiftQueue(type, sourceUpdater) {
18294 if (sourceUpdater.queue.length === 0) {
18295 return;
18296 }
18297
18298 var queueIndex = 0;
18299 var queueEntry = sourceUpdater.queue[queueIndex];
18300
18301 if (queueEntry.type === 'mediaSource') {
18302 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
18303 sourceUpdater.queue.shift();
18304 queueEntry.action(sourceUpdater);
18305
18306 if (queueEntry.doneFn) {
18307 queueEntry.doneFn();
18308 } // Only specific source buffer actions must wait for async updateend events. Media
18309 // Source actions process synchronously. Therefore, both audio and video source
18310 // buffers are now clear to process the next queue entries.
18311
18312
18313 shiftQueue('audio', sourceUpdater);
18314 shiftQueue('video', sourceUpdater);
18315 } // Media Source actions require both source buffers, so if the media source action
18316 // couldn't process yet (because one or both source buffers are busy), block other
18317 // queue actions until both are available and the media source action can process.
18318
18319
18320 return;
18321 }
18322
18323 if (type === 'mediaSource') {
18324 // If the queue was shifted by a media source action (this happens when pushing a
18325 // media source action onto the queue), then it wasn't from an updateend event from an
18326 // audio or video source buffer, so there's no change from previous state, and no
18327 // processing should be done.
18328 return;
18329 } // Media source queue entries don't need to consider whether the source updater is
18330 // started (i.e., source buffers are created) as they don't need the source buffers, but
18331 // source buffer queue entries do.
18332
18333
18334 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
18335 return;
18336 }
18337
18338 if (queueEntry.type !== type) {
18339 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
18340
18341 if (queueIndex === null) {
18342 // Either there's no queue entry that uses this source buffer type in the queue, or
18343 // there's a media source queue entry before the next entry of this type, in which
18344 // case wait for that action to process first.
18345 return;
18346 }
18347
18348 queueEntry = sourceUpdater.queue[queueIndex];
18349 }
18350
18351 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
18352 //
18353 // The queue pending operation must be set before the action is performed in the event
18354 // that the action results in a synchronous event that is acted upon. For instance, if
18355 // an exception is thrown that can be handled, it's possible that new actions will be
18356 // appended to an empty queue and immediately executed, but would not have the correct
18357 // pending information if this property was set after the action was performed.
18358
18359 sourceUpdater.queuePending[type] = queueEntry;
18360 queueEntry.action(type, sourceUpdater);
18361
18362 if (!queueEntry.doneFn) {
18363 // synchronous operation, process next entry
18364 sourceUpdater.queuePending[type] = null;
18365 shiftQueue(type, sourceUpdater);
18366 return;
18367 }
18368};
18369
18370var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
18371 var buffer = sourceUpdater[type + "Buffer"];
18372 var titleType = toTitleCase(type);
18373
18374 if (!buffer) {
18375 return;
18376 }
18377
18378 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18379 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18380 sourceUpdater.codecs[type] = null;
18381 sourceUpdater[type + "Buffer"] = null;
18382};
18383
18384var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
18385 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
18386};
18387
18388var actions = {
18389 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
18390 return function (type, sourceUpdater) {
18391 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18392 // or the media source does not contain this source buffer.
18393
18394 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18395 return;
18396 }
18397
18398 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
18399
18400 try {
18401 sourceBuffer.appendBuffer(bytes);
18402 } catch (e) {
18403 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
18404 sourceUpdater.queuePending[type] = null;
18405 onError(e);
18406 }
18407 };
18408 },
18409 remove: function remove(start, end) {
18410 return function (type, sourceUpdater) {
18411 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18412 // or the media source does not contain this source buffer.
18413
18414 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18415 return;
18416 }
18417
18418 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
18419
18420 try {
18421 sourceBuffer.remove(start, end);
18422 } catch (e) {
18423 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
18424 }
18425 };
18426 },
18427 timestampOffset: function timestampOffset(offset) {
18428 return function (type, sourceUpdater) {
18429 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18430 // or the media source does not contain this source buffer.
18431
18432 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18433 return;
18434 }
18435
18436 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
18437 sourceBuffer.timestampOffset = offset;
18438 };
18439 },
18440 callback: function callback(_callback) {
18441 return function (type, sourceUpdater) {
18442 _callback();
18443 };
18444 },
18445 endOfStream: function endOfStream(error) {
18446 return function (sourceUpdater) {
18447 if (sourceUpdater.mediaSource.readyState !== 'open') {
18448 return;
18449 }
18450
18451 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
18452
18453 try {
18454 sourceUpdater.mediaSource.endOfStream(error);
18455 } catch (e) {
18456 videojs.log.warn('Failed to call media source endOfStream', e);
18457 }
18458 };
18459 },
18460 duration: function duration(_duration) {
18461 return function (sourceUpdater) {
18462 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
18463
18464 try {
18465 sourceUpdater.mediaSource.duration = _duration;
18466 } catch (e) {
18467 videojs.log.warn('Failed to set media source duration', e);
18468 }
18469 };
18470 },
18471 abort: function abort() {
18472 return function (type, sourceUpdater) {
18473 if (sourceUpdater.mediaSource.readyState !== 'open') {
18474 return;
18475 }
18476
18477 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18478 // or the media source does not contain this source buffer.
18479
18480 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18481 return;
18482 }
18483
18484 sourceUpdater.logger_("calling abort on " + type + "Buffer");
18485
18486 try {
18487 sourceBuffer.abort();
18488 } catch (e) {
18489 videojs.log.warn("Failed to abort on " + type + "Buffer", e);
18490 }
18491 };
18492 },
18493 addSourceBuffer: function addSourceBuffer(type, codec) {
18494 return function (sourceUpdater) {
18495 var titleType = toTitleCase(type);
18496 var mime = getMimeForCodec(codec);
18497 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
18498 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
18499 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18500 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18501 sourceUpdater.codecs[type] = codec;
18502 sourceUpdater[type + "Buffer"] = sourceBuffer;
18503 };
18504 },
18505 removeSourceBuffer: function removeSourceBuffer(type) {
18506 return function (sourceUpdater) {
18507 var sourceBuffer = sourceUpdater[type + "Buffer"];
18508 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
18509 // or the media source does not contain this source buffer.
18510
18511 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18512 return;
18513 }
18514
18515 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
18516
18517 try {
18518 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
18519 } catch (e) {
18520 videojs.log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
18521 }
18522 };
18523 },
18524 changeType: function changeType(codec) {
18525 return function (type, sourceUpdater) {
18526 var sourceBuffer = sourceUpdater[type + "Buffer"];
18527 var mime = getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
18528 // or the media source does not contain this source buffer.
18529
18530 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18531 return;
18532 } // do not update codec if we don't need to.
18533
18534
18535 if (sourceUpdater.codecs[type] === codec) {
18536 return;
18537 }
18538
18539 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
18540 sourceBuffer.changeType(mime);
18541 sourceUpdater.codecs[type] = codec;
18542 };
18543 }
18544};
18545
18546var pushQueue = function pushQueue(_ref) {
18547 var type = _ref.type,
18548 sourceUpdater = _ref.sourceUpdater,
18549 action = _ref.action,
18550 doneFn = _ref.doneFn,
18551 name = _ref.name;
18552 sourceUpdater.queue.push({
18553 type: type,
18554 action: action,
18555 doneFn: doneFn,
18556 name: name
18557 });
18558 shiftQueue(type, sourceUpdater);
18559};
18560
18561var onUpdateend = function onUpdateend(type, sourceUpdater) {
18562 return function (e) {
18563 // Although there should, in theory, be a pending action for any updateend receieved,
18564 // there are some actions that may trigger updateend events without set definitions in
18565 // the w3c spec. For instance, setting the duration on the media source may trigger
18566 // updateend events on source buffers. This does not appear to be in the spec. As such,
18567 // if we encounter an updateend without a corresponding pending action from our queue
18568 // for that source buffer type, process the next action.
18569 if (sourceUpdater.queuePending[type]) {
18570 var doneFn = sourceUpdater.queuePending[type].doneFn;
18571 sourceUpdater.queuePending[type] = null;
18572
18573 if (doneFn) {
18574 // if there's an error, report it
18575 doneFn(sourceUpdater[type + "Error_"]);
18576 }
18577 }
18578
18579 shiftQueue(type, sourceUpdater);
18580 };
18581};
18582/**
18583 * A queue of callbacks to be serialized and applied when a
18584 * MediaSource and its associated SourceBuffers are not in the
18585 * updating state. It is used by the segment loader to update the
18586 * underlying SourceBuffers when new data is loaded, for instance.
18587 *
18588 * @class SourceUpdater
18589 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
18590 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
18591 */
18592
18593
18594var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
18595 _inheritsLoose(SourceUpdater, _videojs$EventTarget);
18596
18597 function SourceUpdater(mediaSource) {
18598 var _this;
18599
18600 _this = _videojs$EventTarget.call(this) || this;
18601 _this.mediaSource = mediaSource;
18602
18603 _this.sourceopenListener_ = function () {
18604 return shiftQueue('mediaSource', _assertThisInitialized(_this));
18605 };
18606
18607 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
18608
18609 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
18610
18611 _this.audioTimestampOffset_ = 0;
18612 _this.videoTimestampOffset_ = 0;
18613 _this.queue = [];
18614 _this.queuePending = {
18615 audio: null,
18616 video: null
18617 };
18618 _this.delayedAudioAppendQueue_ = [];
18619 _this.videoAppendQueued_ = false;
18620 _this.codecs = {};
18621 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized(_this));
18622 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized(_this));
18623
18624 _this.onVideoError_ = function (e) {
18625 // used for debugging
18626 _this.videoError_ = e;
18627 };
18628
18629 _this.onAudioError_ = function (e) {
18630 // used for debugging
18631 _this.audioError_ = e;
18632 };
18633
18634 _this.createdSourceBuffers_ = false;
18635 _this.initializedEme_ = false;
18636 _this.triggeredReady_ = false;
18637 return _this;
18638 }
18639
18640 var _proto = SourceUpdater.prototype;
18641
18642 _proto.initializedEme = function initializedEme() {
18643 this.initializedEme_ = true;
18644 this.triggerReady();
18645 };
18646
18647 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
18648 // if false, likely waiting on one of the segment loaders to get enough data to create
18649 // source buffers
18650 return this.createdSourceBuffers_;
18651 };
18652
18653 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
18654 return this.initializedEme_;
18655 };
18656
18657 _proto.ready = function ready() {
18658 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
18659 };
18660
18661 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
18662 if (this.hasCreatedSourceBuffers()) {
18663 // already created them before
18664 return;
18665 } // the intial addOrChangeSourceBuffers will always be
18666 // two add buffers.
18667
18668
18669 this.addOrChangeSourceBuffers(codecs);
18670 this.createdSourceBuffers_ = true;
18671 this.trigger('createdsourcebuffers');
18672 this.triggerReady();
18673 };
18674
18675 _proto.triggerReady = function triggerReady() {
18676 // only allow ready to be triggered once, this prevents the case
18677 // where:
18678 // 1. we trigger createdsourcebuffers
18679 // 2. ie 11 synchronously initializates eme
18680 // 3. the synchronous initialization causes us to trigger ready
18681 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
18682 if (this.ready() && !this.triggeredReady_) {
18683 this.triggeredReady_ = true;
18684 this.trigger('ready');
18685 }
18686 }
18687 /**
18688 * Add a type of source buffer to the media source.
18689 *
18690 * @param {string} type
18691 * The type of source buffer to add.
18692 *
18693 * @param {string} codec
18694 * The codec to add the source buffer with.
18695 */
18696 ;
18697
18698 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
18699 pushQueue({
18700 type: 'mediaSource',
18701 sourceUpdater: this,
18702 action: actions.addSourceBuffer(type, codec),
18703 name: 'addSourceBuffer'
18704 });
18705 }
18706 /**
18707 * call abort on a source buffer.
18708 *
18709 * @param {string} type
18710 * The type of source buffer to call abort on.
18711 */
18712 ;
18713
18714 _proto.abort = function abort(type) {
18715 pushQueue({
18716 type: type,
18717 sourceUpdater: this,
18718 action: actions.abort(type),
18719 name: 'abort'
18720 });
18721 }
18722 /**
18723 * Call removeSourceBuffer and remove a specific type
18724 * of source buffer on the mediaSource.
18725 *
18726 * @param {string} type
18727 * The type of source buffer to remove.
18728 */
18729 ;
18730
18731 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
18732 if (!this.canRemoveSourceBuffer()) {
18733 videojs.log.error('removeSourceBuffer is not supported!');
18734 return;
18735 }
18736
18737 pushQueue({
18738 type: 'mediaSource',
18739 sourceUpdater: this,
18740 action: actions.removeSourceBuffer(type),
18741 name: 'removeSourceBuffer'
18742 });
18743 }
18744 /**
18745 * Whether or not the removeSourceBuffer function is supported
18746 * on the mediaSource.
18747 *
18748 * @return {boolean}
18749 * if removeSourceBuffer can be called.
18750 */
18751 ;
18752
18753 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
18754 // IE reports that it supports removeSourceBuffer, but often throws
18755 // errors when attempting to use the function. So we report that it
18756 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
18757 // throws errors, so we report that it does not support this as well.
18758 return !videojs.browser.IE_VERSION && !videojs.browser.IS_FIREFOX && window.MediaSource && window.MediaSource.prototype && typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
18759 }
18760 /**
18761 * Whether or not the changeType function is supported
18762 * on our SourceBuffers.
18763 *
18764 * @return {boolean}
18765 * if changeType can be called.
18766 */
18767 ;
18768
18769 SourceUpdater.canChangeType = function canChangeType() {
18770 return window.SourceBuffer && window.SourceBuffer.prototype && typeof window.SourceBuffer.prototype.changeType === 'function';
18771 }
18772 /**
18773 * Whether or not the changeType function is supported
18774 * on our SourceBuffers.
18775 *
18776 * @return {boolean}
18777 * if changeType can be called.
18778 */
18779 ;
18780
18781 _proto.canChangeType = function canChangeType() {
18782 return this.constructor.canChangeType();
18783 }
18784 /**
18785 * Call the changeType function on a source buffer, given the code and type.
18786 *
18787 * @param {string} type
18788 * The type of source buffer to call changeType on.
18789 *
18790 * @param {string} codec
18791 * The codec string to change type with on the source buffer.
18792 */
18793 ;
18794
18795 _proto.changeType = function changeType(type, codec) {
18796 if (!this.canChangeType()) {
18797 videojs.log.error('changeType is not supported!');
18798 return;
18799 }
18800
18801 pushQueue({
18802 type: type,
18803 sourceUpdater: this,
18804 action: actions.changeType(codec),
18805 name: 'changeType'
18806 });
18807 }
18808 /**
18809 * Add source buffers with a codec or, if they are already created,
18810 * call changeType on source buffers using changeType.
18811 *
18812 * @param {Object} codecs
18813 * Codecs to switch to
18814 */
18815 ;
18816
18817 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
18818 var _this2 = this;
18819
18820 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
18821 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
18822 }
18823
18824 Object.keys(codecs).forEach(function (type) {
18825 var codec = codecs[type];
18826
18827 if (!_this2.hasCreatedSourceBuffers()) {
18828 return _this2.addSourceBuffer(type, codec);
18829 }
18830
18831 if (_this2.canChangeType()) {
18832 _this2.changeType(type, codec);
18833 }
18834 });
18835 }
18836 /**
18837 * Queue an update to append an ArrayBuffer.
18838 *
18839 * @param {MediaObject} object containing audioBytes and/or videoBytes
18840 * @param {Function} done the function to call when done
18841 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
18842 */
18843 ;
18844
18845 _proto.appendBuffer = function appendBuffer(options, doneFn) {
18846 var _this3 = this;
18847
18848 var segmentInfo = options.segmentInfo,
18849 type = options.type,
18850 bytes = options.bytes;
18851 this.processedAppend_ = true;
18852
18853 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
18854 this.delayedAudioAppendQueue_.push([options, doneFn]);
18855 this.logger_("delayed audio append of " + bytes.length + " until video append");
18856 return;
18857 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
18858 // not be fired. This means that the queue will be blocked until the next action
18859 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
18860 // these errors by calling the doneFn with the specific error.
18861
18862
18863 var onError = doneFn;
18864 pushQueue({
18865 type: type,
18866 sourceUpdater: this,
18867 action: actions.appendBuffer(bytes, segmentInfo || {
18868 mediaIndex: -1
18869 }, onError),
18870 doneFn: doneFn,
18871 name: 'appendBuffer'
18872 });
18873
18874 if (type === 'video') {
18875 this.videoAppendQueued_ = true;
18876
18877 if (!this.delayedAudioAppendQueue_.length) {
18878 return;
18879 }
18880
18881 var queue = this.delayedAudioAppendQueue_.slice();
18882 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
18883 this.delayedAudioAppendQueue_.length = 0;
18884 queue.forEach(function (que) {
18885 _this3.appendBuffer.apply(_this3, que);
18886 });
18887 }
18888 }
18889 /**
18890 * Get the audio buffer's buffered timerange.
18891 *
18892 * @return {TimeRange}
18893 * The audio buffer's buffered time range
18894 */
18895 ;
18896
18897 _proto.audioBuffered = function audioBuffered() {
18898 // no media source/source buffer or it isn't in the media sources
18899 // source buffer list
18900 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
18901 return videojs.createTimeRange();
18902 }
18903
18904 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs.createTimeRange();
18905 }
18906 /**
18907 * Get the video buffer's buffered timerange.
18908 *
18909 * @return {TimeRange}
18910 * The video buffer's buffered time range
18911 */
18912 ;
18913
18914 _proto.videoBuffered = function videoBuffered() {
18915 // no media source/source buffer or it isn't in the media sources
18916 // source buffer list
18917 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
18918 return videojs.createTimeRange();
18919 }
18920
18921 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs.createTimeRange();
18922 }
18923 /**
18924 * Get a combined video/audio buffer's buffered timerange.
18925 *
18926 * @return {TimeRange}
18927 * the combined time range
18928 */
18929 ;
18930
18931 _proto.buffered = function buffered() {
18932 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
18933 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
18934
18935 if (audio && !video) {
18936 return this.audioBuffered();
18937 }
18938
18939 if (video && !audio) {
18940 return this.videoBuffered();
18941 }
18942
18943 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
18944 }
18945 /**
18946 * Add a callback to the queue that will set duration on the mediaSource.
18947 *
18948 * @param {number} duration
18949 * The duration to set
18950 *
18951 * @param {Function} [doneFn]
18952 * function to run after duration has been set.
18953 */
18954 ;
18955
18956 _proto.setDuration = function setDuration(duration, doneFn) {
18957 if (doneFn === void 0) {
18958 doneFn = noop;
18959 }
18960
18961 // In order to set the duration on the media source, it's necessary to wait for all
18962 // source buffers to no longer be updating. "If the updating attribute equals true on
18963 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
18964 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
18965 pushQueue({
18966 type: 'mediaSource',
18967 sourceUpdater: this,
18968 action: actions.duration(duration),
18969 name: 'duration',
18970 doneFn: doneFn
18971 });
18972 }
18973 /**
18974 * Add a mediaSource endOfStream call to the queue
18975 *
18976 * @param {Error} [error]
18977 * Call endOfStream with an error
18978 *
18979 * @param {Function} [doneFn]
18980 * A function that should be called when the
18981 * endOfStream call has finished.
18982 */
18983 ;
18984
18985 _proto.endOfStream = function endOfStream(error, doneFn) {
18986 if (error === void 0) {
18987 error = null;
18988 }
18989
18990 if (doneFn === void 0) {
18991 doneFn = noop;
18992 }
18993
18994 if (typeof error !== 'string') {
18995 error = undefined;
18996 } // In order to set the duration on the media source, it's necessary to wait for all
18997 // source buffers to no longer be updating. "If the updating attribute equals true on
18998 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
18999 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19000
19001
19002 pushQueue({
19003 type: 'mediaSource',
19004 sourceUpdater: this,
19005 action: actions.endOfStream(error),
19006 name: 'endOfStream',
19007 doneFn: doneFn
19008 });
19009 }
19010 /**
19011 * Queue an update to remove a time range from the buffer.
19012 *
19013 * @param {number} start where to start the removal
19014 * @param {number} end where to end the removal
19015 * @param {Function} [done=noop] optional callback to be executed when the remove
19016 * operation is complete
19017 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19018 */
19019 ;
19020
19021 _proto.removeAudio = function removeAudio(start, end, done) {
19022 if (done === void 0) {
19023 done = noop;
19024 }
19025
19026 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
19027 done();
19028 return;
19029 }
19030
19031 pushQueue({
19032 type: 'audio',
19033 sourceUpdater: this,
19034 action: actions.remove(start, end),
19035 doneFn: done,
19036 name: 'remove'
19037 });
19038 }
19039 /**
19040 * Queue an update to remove a time range from the buffer.
19041 *
19042 * @param {number} start where to start the removal
19043 * @param {number} end where to end the removal
19044 * @param {Function} [done=noop] optional callback to be executed when the remove
19045 * operation is complete
19046 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19047 */
19048 ;
19049
19050 _proto.removeVideo = function removeVideo(start, end, done) {
19051 if (done === void 0) {
19052 done = noop;
19053 }
19054
19055 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
19056 done();
19057 return;
19058 }
19059
19060 pushQueue({
19061 type: 'video',
19062 sourceUpdater: this,
19063 action: actions.remove(start, end),
19064 doneFn: done,
19065 name: 'remove'
19066 });
19067 }
19068 /**
19069 * Whether the underlying sourceBuffer is updating or not
19070 *
19071 * @return {boolean} the updating status of the SourceBuffer
19072 */
19073 ;
19074
19075 _proto.updating = function updating() {
19076 // the audio/video source buffer is updating
19077 if (_updating('audio', this) || _updating('video', this)) {
19078 return true;
19079 }
19080
19081 return false;
19082 }
19083 /**
19084 * Set/get the timestampoffset on the audio SourceBuffer
19085 *
19086 * @return {number} the timestamp offset
19087 */
19088 ;
19089
19090 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
19091 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
19092 this.audioTimestampOffset_ !== offset) {
19093 pushQueue({
19094 type: 'audio',
19095 sourceUpdater: this,
19096 action: actions.timestampOffset(offset),
19097 name: 'timestampOffset'
19098 });
19099 this.audioTimestampOffset_ = offset;
19100 }
19101
19102 return this.audioTimestampOffset_;
19103 }
19104 /**
19105 * Set/get the timestampoffset on the video SourceBuffer
19106 *
19107 * @return {number} the timestamp offset
19108 */
19109 ;
19110
19111 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
19112 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
19113 this.videoTimestampOffset !== offset) {
19114 pushQueue({
19115 type: 'video',
19116 sourceUpdater: this,
19117 action: actions.timestampOffset(offset),
19118 name: 'timestampOffset'
19119 });
19120 this.videoTimestampOffset_ = offset;
19121 }
19122
19123 return this.videoTimestampOffset_;
19124 }
19125 /**
19126 * Add a function to the queue that will be called
19127 * when it is its turn to run in the audio queue.
19128 *
19129 * @param {Function} callback
19130 * The callback to queue.
19131 */
19132 ;
19133
19134 _proto.audioQueueCallback = function audioQueueCallback(callback) {
19135 if (!this.audioBuffer) {
19136 return;
19137 }
19138
19139 pushQueue({
19140 type: 'audio',
19141 sourceUpdater: this,
19142 action: actions.callback(callback),
19143 name: 'callback'
19144 });
19145 }
19146 /**
19147 * Add a function to the queue that will be called
19148 * when it is its turn to run in the video queue.
19149 *
19150 * @param {Function} callback
19151 * The callback to queue.
19152 */
19153 ;
19154
19155 _proto.videoQueueCallback = function videoQueueCallback(callback) {
19156 if (!this.videoBuffer) {
19157 return;
19158 }
19159
19160 pushQueue({
19161 type: 'video',
19162 sourceUpdater: this,
19163 action: actions.callback(callback),
19164 name: 'callback'
19165 });
19166 }
19167 /**
19168 * dispose of the source updater and the underlying sourceBuffer
19169 */
19170 ;
19171
19172 _proto.dispose = function dispose() {
19173 var _this4 = this;
19174
19175 this.trigger('dispose');
19176 bufferTypes.forEach(function (type) {
19177 _this4.abort(type);
19178
19179 if (_this4.canRemoveSourceBuffer()) {
19180 _this4.removeSourceBuffer(type);
19181 } else {
19182 _this4[type + "QueueCallback"](function () {
19183 return cleanupBuffer(type, _this4);
19184 });
19185 }
19186 });
19187 this.videoAppendQueued_ = false;
19188 this.delayedAudioAppendQueue_.length = 0;
19189
19190 if (this.sourceopenListener_) {
19191 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
19192 }
19193
19194 this.off();
19195 };
19196
19197 return SourceUpdater;
19198}(videojs.EventTarget);
19199
19200var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
19201 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
19202};
19203
19204var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
19205 return char.charCodeAt(0);
19206}));
19207/**
19208 * An object that manages segment loading and appending.
19209 *
19210 * @class VTTSegmentLoader
19211 * @param {Object} options required and optional options
19212 * @extends videojs.EventTarget
19213 */
19214
19215var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
19216 _inheritsLoose(VTTSegmentLoader, _SegmentLoader);
19217
19218 function VTTSegmentLoader(settings, options) {
19219 var _this;
19220
19221 if (options === void 0) {
19222 options = {};
19223 }
19224
19225 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
19226 // however, VTTSegmentLoader has no need of a media source, so delete the reference
19227
19228 _this.mediaSource_ = null;
19229 _this.subtitlesTrack_ = null;
19230 _this.loaderType_ = 'subtitle';
19231 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
19232 // the sync controller leads to improper behavior.
19233
19234 _this.shouldSaveSegmentTimingInfo_ = false;
19235 return _this;
19236 }
19237
19238 var _proto = VTTSegmentLoader.prototype;
19239
19240 _proto.createTransmuxer_ = function createTransmuxer_() {
19241 // don't need to transmux any subtitles
19242 return null;
19243 }
19244 /**
19245 * Indicates which time ranges are buffered
19246 *
19247 * @return {TimeRange}
19248 * TimeRange object representing the current buffered ranges
19249 */
19250 ;
19251
19252 _proto.buffered_ = function buffered_() {
19253 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
19254 return videojs.createTimeRanges();
19255 }
19256
19257 var cues = this.subtitlesTrack_.cues;
19258 var start = cues[0].startTime;
19259 var end = cues[cues.length - 1].startTime;
19260 return videojs.createTimeRanges([[start, end]]);
19261 }
19262 /**
19263 * Gets and sets init segment for the provided map
19264 *
19265 * @param {Object} map
19266 * The map object representing the init segment to get or set
19267 * @param {boolean=} set
19268 * If true, the init segment for the provided map should be saved
19269 * @return {Object}
19270 * map object for desired init segment
19271 */
19272 ;
19273
19274 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
19275 if (set === void 0) {
19276 set = false;
19277 }
19278
19279 if (!map) {
19280 return null;
19281 }
19282
19283 var id = initSegmentId(map);
19284 var storedMap = this.initSegments_[id];
19285
19286 if (set && !storedMap && map.bytes) {
19287 // append WebVTT line terminators to the media initialization segment if it exists
19288 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
19289 // requires two or more WebVTT line terminators between the WebVTT header and the
19290 // rest of the file
19291 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
19292 var combinedSegment = new Uint8Array(combinedByteLength);
19293 combinedSegment.set(map.bytes);
19294 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
19295 this.initSegments_[id] = storedMap = {
19296 resolvedUri: map.resolvedUri,
19297 byterange: map.byterange,
19298 bytes: combinedSegment
19299 };
19300 }
19301
19302 return storedMap || map;
19303 }
19304 /**
19305 * Returns true if all configuration required for loading is present, otherwise false.
19306 *
19307 * @return {boolean} True if the all configuration is ready for loading
19308 * @private
19309 */
19310 ;
19311
19312 _proto.couldBeginLoading_ = function couldBeginLoading_() {
19313 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
19314 }
19315 /**
19316 * Once all the starting parameters have been specified, begin
19317 * operation. This method should only be invoked from the INIT
19318 * state.
19319 *
19320 * @private
19321 */
19322 ;
19323
19324 _proto.init_ = function init_() {
19325 this.state = 'READY';
19326 this.resetEverything();
19327 return this.monitorBuffer_();
19328 }
19329 /**
19330 * Set a subtitle track on the segment loader to add subtitles to
19331 *
19332 * @param {TextTrack=} track
19333 * The text track to add loaded subtitles to
19334 * @return {TextTrack}
19335 * Returns the subtitles track
19336 */
19337 ;
19338
19339 _proto.track = function track(_track) {
19340 if (typeof _track === 'undefined') {
19341 return this.subtitlesTrack_;
19342 }
19343
19344 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
19345 // buffering now
19346
19347 if (this.state === 'INIT' && this.couldBeginLoading_()) {
19348 this.init_();
19349 }
19350
19351 return this.subtitlesTrack_;
19352 }
19353 /**
19354 * Remove any data in the source buffer between start and end times
19355 *
19356 * @param {number} start - the start time of the region to remove from the buffer
19357 * @param {number} end - the end time of the region to remove from the buffer
19358 */
19359 ;
19360
19361 _proto.remove = function remove(start, end) {
19362 removeCuesFromTrack(start, end, this.subtitlesTrack_);
19363 }
19364 /**
19365 * fill the buffer with segements unless the sourceBuffers are
19366 * currently updating
19367 *
19368 * Note: this function should only ever be called by monitorBuffer_
19369 * and never directly
19370 *
19371 * @private
19372 */
19373 ;
19374
19375 _proto.fillBuffer_ = function fillBuffer_() {
19376 var _this2 = this;
19377
19378 // see if we need to begin loading immediately
19379 var segmentInfo = this.chooseNextRequest_();
19380
19381 if (!segmentInfo) {
19382 return;
19383 }
19384
19385 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
19386 // We don't have the timestamp offset that we need to sync subtitles.
19387 // Rerun on a timestamp offset or user interaction.
19388 var checkTimestampOffset = function checkTimestampOffset() {
19389 _this2.state = 'READY';
19390
19391 if (!_this2.paused()) {
19392 // if not paused, queue a buffer check as soon as possible
19393 _this2.monitorBuffer_();
19394 }
19395 };
19396
19397 this.syncController_.one('timestampoffset', checkTimestampOffset);
19398 this.state = 'WAITING_ON_TIMELINE';
19399 return;
19400 }
19401
19402 this.loadSegment_(segmentInfo);
19403 } // never set a timestamp offset for vtt segments.
19404 ;
19405
19406 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
19407 return null;
19408 };
19409
19410 _proto.chooseNextRequest_ = function chooseNextRequest_() {
19411 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
19412 }
19413 /**
19414 * Prevents the segment loader from requesting segments we know contain no subtitles
19415 * by walking forward until we find the next segment that we don't know whether it is
19416 * empty or not.
19417 *
19418 * @param {Object} segmentInfo
19419 * a segment info object that describes the current segment
19420 * @return {Object}
19421 * a segment info object that describes the current segment
19422 */
19423 ;
19424
19425 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
19426 while (segmentInfo && segmentInfo.segment.empty) {
19427 // stop at the last possible segmentInfo
19428 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
19429 segmentInfo = null;
19430 break;
19431 }
19432
19433 segmentInfo = this.generateSegmentInfo_({
19434 playlist: segmentInfo.playlist,
19435 mediaIndex: segmentInfo.mediaIndex + 1,
19436 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
19437 isSyncRequest: segmentInfo.isSyncRequest
19438 });
19439 }
19440
19441 return segmentInfo;
19442 };
19443
19444 _proto.stopForError = function stopForError(error) {
19445 this.error(error);
19446 this.state = 'READY';
19447 this.pause();
19448 this.trigger('error');
19449 }
19450 /**
19451 * append a decrypted segement to the SourceBuffer through a SourceUpdater
19452 *
19453 * @private
19454 */
19455 ;
19456
19457 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
19458 var _this3 = this;
19459
19460 if (!this.subtitlesTrack_) {
19461 this.state = 'READY';
19462 return;
19463 }
19464
19465 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
19466
19467 if (!this.pendingSegment_) {
19468 this.state = 'READY';
19469 this.mediaRequestsAborted += 1;
19470 return;
19471 }
19472
19473 if (error) {
19474 if (error.code === REQUEST_ERRORS.TIMEOUT) {
19475 this.handleTimeout_();
19476 }
19477
19478 if (error.code === REQUEST_ERRORS.ABORTED) {
19479 this.mediaRequestsAborted += 1;
19480 } else {
19481 this.mediaRequestsErrored += 1;
19482 }
19483
19484 this.stopForError(error);
19485 return;
19486 }
19487
19488 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
19489 // maintain functionality between segment loaders
19490
19491 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
19492 this.state = 'APPENDING'; // used for tests
19493
19494 this.trigger('appending');
19495 var segment = segmentInfo.segment;
19496
19497 if (segment.map) {
19498 segment.map.bytes = simpleSegment.map.bytes;
19499 }
19500
19501 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
19502
19503 if (typeof window.WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
19504 var loadHandler;
19505
19506 var errorHandler = function errorHandler() {
19507 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
19508
19509 _this3.stopForError({
19510 message: 'Error loading vtt.js'
19511 });
19512
19513 return;
19514 };
19515
19516 loadHandler = function loadHandler() {
19517 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
19518
19519 _this3.segmentRequestFinished_(error, simpleSegment, result);
19520 };
19521
19522 this.state = 'WAITING_ON_VTTJS';
19523 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
19524 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
19525 return;
19526 }
19527
19528 segment.requested = true;
19529
19530 try {
19531 this.parseVTTCues_(segmentInfo);
19532 } catch (e) {
19533 this.stopForError({
19534 message: e.message
19535 });
19536 return;
19537 }
19538
19539 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
19540
19541 if (segmentInfo.cues.length) {
19542 segmentInfo.timingInfo = {
19543 start: segmentInfo.cues[0].startTime,
19544 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
19545 };
19546 } else {
19547 segmentInfo.timingInfo = {
19548 start: segmentInfo.startOfSegment,
19549 end: segmentInfo.startOfSegment + segmentInfo.duration
19550 };
19551 }
19552
19553 if (segmentInfo.isSyncRequest) {
19554 this.trigger('syncinfoupdate');
19555 this.pendingSegment_ = null;
19556 this.state = 'READY';
19557 return;
19558 }
19559
19560 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
19561 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
19562 // the subtitle track
19563
19564 segmentInfo.cues.forEach(function (cue) {
19565 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window.VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
19566 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
19567 // cues to have identical time-intervals, but if the text is also identical
19568 // we can safely assume it is a duplicate that can be removed (ex. when a cue
19569 // "overlaps" VTT segments)
19570
19571 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
19572 this.handleAppendsDone_();
19573 };
19574
19575 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
19576 // that we do not support here.
19577 };
19578
19579 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
19580 }
19581 /**
19582 * Uses the WebVTT parser to parse the segment response
19583 *
19584 * @param {Object} segmentInfo
19585 * a segment info object that describes the current segment
19586 * @private
19587 */
19588 ;
19589
19590 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
19591 var decoder;
19592 var decodeBytesToString = false;
19593
19594 if (typeof window.TextDecoder === 'function') {
19595 decoder = new window.TextDecoder('utf8');
19596 } else {
19597 decoder = window.WebVTT.StringDecoder();
19598 decodeBytesToString = true;
19599 }
19600
19601 var parser = new window.WebVTT.Parser(window, window.vttjs, decoder);
19602 segmentInfo.cues = [];
19603 segmentInfo.timestampmap = {
19604 MPEGTS: 0,
19605 LOCAL: 0
19606 };
19607 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
19608
19609 parser.ontimestampmap = function (map) {
19610 segmentInfo.timestampmap = map;
19611 };
19612
19613 parser.onparsingerror = function (error) {
19614 videojs.log.warn('Error encountered when parsing cues: ' + error.message);
19615 };
19616
19617 if (segmentInfo.segment.map) {
19618 var mapData = segmentInfo.segment.map.bytes;
19619
19620 if (decodeBytesToString) {
19621 mapData = uint8ToUtf8(mapData);
19622 }
19623
19624 parser.parse(mapData);
19625 }
19626
19627 var segmentData = segmentInfo.bytes;
19628
19629 if (decodeBytesToString) {
19630 segmentData = uint8ToUtf8(segmentData);
19631 }
19632
19633 parser.parse(segmentData);
19634 parser.flush();
19635 }
19636 /**
19637 * Updates the start and end times of any cues parsed by the WebVTT parser using
19638 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
19639 * from the SyncController
19640 *
19641 * @param {Object} segmentInfo
19642 * a segment info object that describes the current segment
19643 * @param {Object} mappingObj
19644 * object containing a mapping from TS to media time
19645 * @param {Object} playlist
19646 * the playlist object containing the segment
19647 * @private
19648 */
19649 ;
19650
19651 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
19652 var segment = segmentInfo.segment;
19653
19654 if (!mappingObj) {
19655 // If the sync controller does not have a mapping of TS to Media Time for the
19656 // timeline, then we don't have enough information to update the cue
19657 // start/end times
19658 return;
19659 }
19660
19661 if (!segmentInfo.cues.length) {
19662 // If there are no cues, we also do not have enough information to figure out
19663 // segment timing. Mark that the segment contains no cues so we don't re-request
19664 // an empty segment.
19665 segment.empty = true;
19666 return;
19667 }
19668
19669 var timestampmap = segmentInfo.timestampmap;
19670 var diff = timestampmap.MPEGTS / ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
19671 segmentInfo.cues.forEach(function (cue) {
19672 // First convert cue time to TS time using the timestamp-map provided within the vtt
19673 cue.startTime += diff;
19674 cue.endTime += diff;
19675 });
19676
19677 if (!playlist.syncInfo) {
19678 var firstStart = segmentInfo.cues[0].startTime;
19679 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
19680 playlist.syncInfo = {
19681 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
19682 time: Math.min(firstStart, lastStart - segment.duration)
19683 };
19684 }
19685 };
19686
19687 return VTTSegmentLoader;
19688}(SegmentLoader);
19689
19690/**
19691 * @file ad-cue-tags.js
19692 */
19693/**
19694 * Searches for an ad cue that overlaps with the given mediaTime
19695 *
19696 * @param {Object} track
19697 * the track to find the cue for
19698 *
19699 * @param {number} mediaTime
19700 * the time to find the cue at
19701 *
19702 * @return {Object|null}
19703 * the found cue or null
19704 */
19705
19706var findAdCue = function findAdCue(track, mediaTime) {
19707 var cues = track.cues;
19708
19709 for (var i = 0; i < cues.length; i++) {
19710 var cue = cues[i];
19711
19712 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
19713 return cue;
19714 }
19715 }
19716
19717 return null;
19718};
19719var updateAdCues = function updateAdCues(media, track, offset) {
19720 if (offset === void 0) {
19721 offset = 0;
19722 }
19723
19724 if (!media.segments) {
19725 return;
19726 }
19727
19728 var mediaTime = offset;
19729 var cue;
19730
19731 for (var i = 0; i < media.segments.length; i++) {
19732 var segment = media.segments[i];
19733
19734 if (!cue) {
19735 // Since the cues will span for at least the segment duration, adding a fudge
19736 // factor of half segment duration will prevent duplicate cues from being
19737 // created when timing info is not exact (e.g. cue start time initialized
19738 // at 10.006677, but next call mediaTime is 10.003332 )
19739 cue = findAdCue(track, mediaTime + segment.duration / 2);
19740 }
19741
19742 if (cue) {
19743 if ('cueIn' in segment) {
19744 // Found a CUE-IN so end the cue
19745 cue.endTime = mediaTime;
19746 cue.adEndTime = mediaTime;
19747 mediaTime += segment.duration;
19748 cue = null;
19749 continue;
19750 }
19751
19752 if (mediaTime < cue.endTime) {
19753 // Already processed this mediaTime for this cue
19754 mediaTime += segment.duration;
19755 continue;
19756 } // otherwise extend cue until a CUE-IN is found
19757
19758
19759 cue.endTime += segment.duration;
19760 } else {
19761 if ('cueOut' in segment) {
19762 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
19763 cue.adStartTime = mediaTime; // Assumes tag format to be
19764 // #EXT-X-CUE-OUT:30
19765
19766 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
19767 track.addCue(cue);
19768 }
19769
19770 if ('cueOutCont' in segment) {
19771 // Entered into the middle of an ad cue
19772 // Assumes tag formate to be
19773 // #EXT-X-CUE-OUT-CONT:10/30
19774 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
19775 adOffset = _segment$cueOutCont$s[0],
19776 adTotal = _segment$cueOutCont$s[1];
19777
19778 cue = new window.VTTCue(mediaTime, mediaTime + segment.duration, '');
19779 cue.adStartTime = mediaTime - adOffset;
19780 cue.adEndTime = cue.adStartTime + adTotal;
19781 track.addCue(cue);
19782 }
19783 }
19784
19785 mediaTime += segment.duration;
19786 }
19787};
19788
19789// synchronize expired playlist segments.
19790// the max media sequence diff is 48 hours of live stream
19791// content with two second segments. Anything larger than that
19792// will likely be invalid.
19793
19794var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
19795var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
19796// the equivalence display-time 0 === segment-index 0
19797{
19798 name: 'VOD',
19799 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19800 if (duration !== Infinity) {
19801 var syncPoint = {
19802 time: 0,
19803 segmentIndex: 0,
19804 partIndex: null
19805 };
19806 return syncPoint;
19807 }
19808
19809 return null;
19810 }
19811}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
19812{
19813 name: 'ProgramDateTime',
19814 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19815 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
19816 return null;
19817 }
19818
19819 var syncPoint = null;
19820 var lastDistance = null;
19821 var partsAndSegments = getPartsAndSegments(playlist);
19822 currentTime = currentTime || 0;
19823
19824 for (var i = 0; i < partsAndSegments.length; i++) {
19825 // start from the end and loop backwards for live
19826 // or start from the front and loop forwards for non-live
19827 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
19828 var partAndSegment = partsAndSegments[index];
19829 var segment = partAndSegment.segment;
19830 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
19831
19832 if (!datetimeMapping) {
19833 continue;
19834 }
19835
19836 if (segment.dateTimeObject) {
19837 var segmentTime = segment.dateTimeObject.getTime() / 1000;
19838 var start = segmentTime + datetimeMapping; // take part duration into account.
19839
19840 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
19841 for (var z = 0; z < partAndSegment.partIndex; z++) {
19842 start += segment.parts[z].duration;
19843 }
19844 }
19845
19846 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
19847 // currentTime and can stop looking for better candidates
19848
19849 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
19850 break;
19851 }
19852
19853 lastDistance = distance;
19854 syncPoint = {
19855 time: start,
19856 segmentIndex: partAndSegment.segmentIndex,
19857 partIndex: partAndSegment.partIndex
19858 };
19859 }
19860 }
19861
19862 return syncPoint;
19863 }
19864}, // Stategy "Segment": We have a known time mapping for a timeline and a
19865// segment in the current timeline with timing data
19866{
19867 name: 'Segment',
19868 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19869 var syncPoint = null;
19870 var lastDistance = null;
19871 currentTime = currentTime || 0;
19872 var partsAndSegments = getPartsAndSegments(playlist);
19873
19874 for (var i = 0; i < partsAndSegments.length; i++) {
19875 // start from the end and loop backwards for live
19876 // or start from the front and loop forwards for non-live
19877 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
19878 var partAndSegment = partsAndSegments[index];
19879 var segment = partAndSegment.segment;
19880 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
19881
19882 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
19883 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
19884 // currentTime and can stop looking for better candidates
19885
19886 if (lastDistance !== null && lastDistance < distance) {
19887 break;
19888 }
19889
19890 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
19891 lastDistance = distance;
19892 syncPoint = {
19893 time: start,
19894 segmentIndex: partAndSegment.segmentIndex,
19895 partIndex: partAndSegment.partIndex
19896 };
19897 }
19898 }
19899 }
19900
19901 return syncPoint;
19902 }
19903}, // Stategy "Discontinuity": We have a discontinuity with a known
19904// display-time
19905{
19906 name: 'Discontinuity',
19907 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19908 var syncPoint = null;
19909 currentTime = currentTime || 0;
19910
19911 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
19912 var lastDistance = null;
19913
19914 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
19915 var segmentIndex = playlist.discontinuityStarts[i];
19916 var discontinuity = playlist.discontinuitySequence + i + 1;
19917 var discontinuitySync = syncController.discontinuities[discontinuity];
19918
19919 if (discontinuitySync) {
19920 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
19921 // currentTime and can stop looking for better candidates
19922
19923 if (lastDistance !== null && lastDistance < distance) {
19924 break;
19925 }
19926
19927 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
19928 lastDistance = distance;
19929 syncPoint = {
19930 time: discontinuitySync.time,
19931 segmentIndex: segmentIndex,
19932 partIndex: null
19933 };
19934 }
19935 }
19936 }
19937 }
19938
19939 return syncPoint;
19940 }
19941}, // Stategy "Playlist": We have a playlist with a known mapping of
19942// segment index to display time
19943{
19944 name: 'Playlist',
19945 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19946 if (playlist.syncInfo) {
19947 var syncPoint = {
19948 time: playlist.syncInfo.time,
19949 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
19950 partIndex: null
19951 };
19952 return syncPoint;
19953 }
19954
19955 return null;
19956 }
19957}];
19958
19959var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
19960 _inheritsLoose(SyncController, _videojs$EventTarget);
19961
19962 function SyncController(options) {
19963 var _this;
19964
19965 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
19966
19967 _this.timelines = [];
19968 _this.discontinuities = [];
19969 _this.timelineToDatetimeMappings = {};
19970 _this.logger_ = logger('SyncController');
19971 return _this;
19972 }
19973 /**
19974 * Find a sync-point for the playlist specified
19975 *
19976 * A sync-point is defined as a known mapping from display-time to
19977 * a segment-index in the current playlist.
19978 *
19979 * @param {Playlist} playlist
19980 * The playlist that needs a sync-point
19981 * @param {number} duration
19982 * Duration of the MediaSource (Infinite if playing a live source)
19983 * @param {number} currentTimeline
19984 * The last timeline from which a segment was loaded
19985 * @return {Object}
19986 * A sync-point object
19987 */
19988
19989
19990 var _proto = SyncController.prototype;
19991
19992 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
19993 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
19994
19995 if (!syncPoints.length) {
19996 // Signal that we need to attempt to get a sync-point manually
19997 // by fetching a segment in the playlist and constructing
19998 // a sync-point from that information
19999 return null;
20000 } // Now find the sync-point that is closest to the currentTime because
20001 // that should result in the most accurate guess about which segment
20002 // to fetch
20003
20004
20005 return this.selectSyncPoint_(syncPoints, {
20006 key: 'time',
20007 value: currentTime
20008 });
20009 }
20010 /**
20011 * Calculate the amount of time that has expired off the playlist during playback
20012 *
20013 * @param {Playlist} playlist
20014 * Playlist object to calculate expired from
20015 * @param {number} duration
20016 * Duration of the MediaSource (Infinity if playling a live source)
20017 * @return {number|null}
20018 * The amount of time that has expired off the playlist during playback. Null
20019 * if no sync-points for the playlist can be found.
20020 */
20021 ;
20022
20023 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
20024 if (!playlist || !playlist.segments) {
20025 return null;
20026 }
20027
20028 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
20029
20030 if (!syncPoints.length) {
20031 return null;
20032 }
20033
20034 var syncPoint = this.selectSyncPoint_(syncPoints, {
20035 key: 'segmentIndex',
20036 value: 0
20037 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
20038 // duration from index 0 to syncPoint.segmentIndex instead of adding.
20039
20040 if (syncPoint.segmentIndex > 0) {
20041 syncPoint.time *= -1;
20042 }
20043
20044 return Math.abs(syncPoint.time + sumDurations({
20045 defaultDuration: playlist.targetDuration,
20046 durationList: playlist.segments,
20047 startIndex: syncPoint.segmentIndex,
20048 endIndex: 0
20049 }));
20050 }
20051 /**
20052 * Runs each sync-point strategy and returns a list of sync-points returned by the
20053 * strategies
20054 *
20055 * @private
20056 * @param {Playlist} playlist
20057 * The playlist that needs a sync-point
20058 * @param {number} duration
20059 * Duration of the MediaSource (Infinity if playing a live source)
20060 * @param {number} currentTimeline
20061 * The last timeline from which a segment was loaded
20062 * @return {Array}
20063 * A list of sync-point objects
20064 */
20065 ;
20066
20067 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
20068 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
20069
20070 for (var i = 0; i < syncPointStrategies.length; i++) {
20071 var strategy = syncPointStrategies[i];
20072 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
20073
20074 if (syncPoint) {
20075 syncPoint.strategy = strategy.name;
20076 syncPoints.push({
20077 strategy: strategy.name,
20078 syncPoint: syncPoint
20079 });
20080 }
20081 }
20082
20083 return syncPoints;
20084 }
20085 /**
20086 * Selects the sync-point nearest the specified target
20087 *
20088 * @private
20089 * @param {Array} syncPoints
20090 * List of sync-points to select from
20091 * @param {Object} target
20092 * Object specifying the property and value we are targeting
20093 * @param {string} target.key
20094 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
20095 * @param {number} target.value
20096 * The value to target for the specified key.
20097 * @return {Object}
20098 * The sync-point nearest the target
20099 */
20100 ;
20101
20102 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
20103 var bestSyncPoint = syncPoints[0].syncPoint;
20104 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
20105 var bestStrategy = syncPoints[0].strategy;
20106
20107 for (var i = 1; i < syncPoints.length; i++) {
20108 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
20109
20110 if (newDistance < bestDistance) {
20111 bestDistance = newDistance;
20112 bestSyncPoint = syncPoints[i].syncPoint;
20113 bestStrategy = syncPoints[i].strategy;
20114 }
20115 }
20116
20117 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
20118 return bestSyncPoint;
20119 }
20120 /**
20121 * Save any meta-data present on the segments when segments leave
20122 * the live window to the playlist to allow for synchronization at the
20123 * playlist level later.
20124 *
20125 * @param {Playlist} oldPlaylist - The previous active playlist
20126 * @param {Playlist} newPlaylist - The updated and most current playlist
20127 */
20128 ;
20129
20130 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
20131 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
20132
20133 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
20134 videojs.log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
20135 return;
20136 } // When a segment expires from the playlist and it has a start time
20137 // save that information as a possible sync-point reference in future
20138
20139
20140 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
20141 var lastRemovedSegment = oldPlaylist.segments[i];
20142
20143 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
20144 newPlaylist.syncInfo = {
20145 mediaSequence: oldPlaylist.mediaSequence + i,
20146 time: lastRemovedSegment.start
20147 };
20148 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
20149 this.trigger('syncinfoupdate');
20150 break;
20151 }
20152 }
20153 }
20154 /**
20155 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
20156 * before segments start to load.
20157 *
20158 * @param {Playlist} playlist - The currently active playlist
20159 */
20160 ;
20161
20162 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
20163 // It's possible for the playlist to be updated before playback starts, meaning time
20164 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
20165 // crossed, then the old time zero mapping (for the prior timeline) would be retained
20166 // unless the mappings are cleared.
20167 this.timelineToDatetimeMappings = {};
20168
20169 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
20170 var firstSegment = playlist.segments[0];
20171 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
20172 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
20173 }
20174 }
20175 /**
20176 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
20177 * based on the latest timing information.
20178 *
20179 * @param {Object} options
20180 * Options object
20181 * @param {SegmentInfo} options.segmentInfo
20182 * The current active request information
20183 * @param {boolean} options.shouldSaveTimelineMapping
20184 * If there's a timeline change, determines if the timeline mapping should be
20185 * saved for timeline mapping and program date time mappings.
20186 */
20187 ;
20188
20189 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
20190 var segmentInfo = _ref.segmentInfo,
20191 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
20192 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
20193 var segment = segmentInfo.segment;
20194
20195 if (didCalculateSegmentTimeMapping) {
20196 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
20197 // now with segment timing information
20198
20199 if (!segmentInfo.playlist.syncInfo) {
20200 segmentInfo.playlist.syncInfo = {
20201 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
20202 time: segment.start
20203 };
20204 }
20205 }
20206
20207 var dateTime = segment.dateTimeObject;
20208
20209 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
20210 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
20211 }
20212 };
20213
20214 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
20215 if (typeof this.timelines[timeline] === 'undefined') {
20216 return null;
20217 }
20218
20219 return this.timelines[timeline].time;
20220 };
20221
20222 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
20223 if (typeof this.timelines[timeline] === 'undefined') {
20224 return null;
20225 }
20226
20227 return this.timelines[timeline].mapping;
20228 }
20229 /**
20230 * Use the "media time" for a segment to generate a mapping to "display time" and
20231 * save that display time to the segment.
20232 *
20233 * @private
20234 * @param {SegmentInfo} segmentInfo
20235 * The current active request information
20236 * @param {Object} timingInfo
20237 * The start and end time of the current segment in "media time"
20238 * @param {boolean} shouldSaveTimelineMapping
20239 * If there's a timeline change, determines if the timeline mapping should be
20240 * saved in timelines.
20241 * @return {boolean}
20242 * Returns false if segment time mapping could not be calculated
20243 */
20244 ;
20245
20246 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
20247 // TODO: remove side effects
20248 var segment = segmentInfo.segment;
20249 var part = segmentInfo.part;
20250 var mappingObj = this.timelines[segmentInfo.timeline];
20251 var start;
20252 var end;
20253
20254 if (typeof segmentInfo.timestampOffset === 'number') {
20255 mappingObj = {
20256 time: segmentInfo.startOfSegment,
20257 mapping: segmentInfo.startOfSegment - timingInfo.start
20258 };
20259
20260 if (shouldSaveTimelineMapping) {
20261 this.timelines[segmentInfo.timeline] = mappingObj;
20262 this.trigger('timestampoffset');
20263 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
20264 }
20265
20266 start = segmentInfo.startOfSegment;
20267 end = timingInfo.end + mappingObj.mapping;
20268 } else if (mappingObj) {
20269 start = timingInfo.start + mappingObj.mapping;
20270 end = timingInfo.end + mappingObj.mapping;
20271 } else {
20272 return false;
20273 }
20274
20275 if (part) {
20276 part.start = start;
20277 part.end = end;
20278 } // If we don't have a segment start yet or the start value we got
20279 // is less than our current segment.start value, save a new start value.
20280 // We have to do this because parts will have segment timing info saved
20281 // multiple times and we want segment start to be the earliest part start
20282 // value for that segment.
20283
20284
20285 if (!segment.start || start < segment.start) {
20286 segment.start = start;
20287 }
20288
20289 segment.end = end;
20290 return true;
20291 }
20292 /**
20293 * Each time we have discontinuity in the playlist, attempt to calculate the location
20294 * in display of the start of the discontinuity and save that. We also save an accuracy
20295 * value so that we save values with the most accuracy (closest to 0.)
20296 *
20297 * @private
20298 * @param {SegmentInfo} segmentInfo - The current active request information
20299 */
20300 ;
20301
20302 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
20303 var playlist = segmentInfo.playlist;
20304 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
20305 // the start of the range and it's accuracy is 0 (greater accuracy values
20306 // mean more approximation)
20307
20308 if (segment.discontinuity) {
20309 this.discontinuities[segment.timeline] = {
20310 time: segment.start,
20311 accuracy: 0
20312 };
20313 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20314 // Search for future discontinuities that we can provide better timing
20315 // information for and save that information for sync purposes
20316 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20317 var segmentIndex = playlist.discontinuityStarts[i];
20318 var discontinuity = playlist.discontinuitySequence + i + 1;
20319 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
20320 var accuracy = Math.abs(mediaIndexDiff);
20321
20322 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
20323 var time = void 0;
20324
20325 if (mediaIndexDiff < 0) {
20326 time = segment.start - sumDurations({
20327 defaultDuration: playlist.targetDuration,
20328 durationList: playlist.segments,
20329 startIndex: segmentInfo.mediaIndex,
20330 endIndex: segmentIndex
20331 });
20332 } else {
20333 time = segment.end + sumDurations({
20334 defaultDuration: playlist.targetDuration,
20335 durationList: playlist.segments,
20336 startIndex: segmentInfo.mediaIndex + 1,
20337 endIndex: segmentIndex
20338 });
20339 }
20340
20341 this.discontinuities[discontinuity] = {
20342 time: time,
20343 accuracy: accuracy
20344 };
20345 }
20346 }
20347 }
20348 };
20349
20350 _proto.dispose = function dispose() {
20351 this.trigger('dispose');
20352 this.off();
20353 };
20354
20355 return SyncController;
20356}(videojs.EventTarget);
20357
20358/**
20359 * The TimelineChangeController acts as a source for segment loaders to listen for and
20360 * keep track of latest and pending timeline changes. This is useful to ensure proper
20361 * sync, as each loader may need to make a consideration for what timeline the other
20362 * loader is on before making changes which could impact the other loader's media.
20363 *
20364 * @class TimelineChangeController
20365 * @extends videojs.EventTarget
20366 */
20367
20368var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
20369 _inheritsLoose(TimelineChangeController, _videojs$EventTarget);
20370
20371 function TimelineChangeController() {
20372 var _this;
20373
20374 _this = _videojs$EventTarget.call(this) || this;
20375 _this.pendingTimelineChanges_ = {};
20376 _this.lastTimelineChanges_ = {};
20377 return _this;
20378 }
20379
20380 var _proto = TimelineChangeController.prototype;
20381
20382 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
20383 this.pendingTimelineChanges_[type] = null;
20384 this.trigger('pendingtimelinechange');
20385 };
20386
20387 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
20388 var type = _ref.type,
20389 from = _ref.from,
20390 to = _ref.to;
20391
20392 if (typeof from === 'number' && typeof to === 'number') {
20393 this.pendingTimelineChanges_[type] = {
20394 type: type,
20395 from: from,
20396 to: to
20397 };
20398 this.trigger('pendingtimelinechange');
20399 }
20400
20401 return this.pendingTimelineChanges_[type];
20402 };
20403
20404 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
20405 var type = _ref2.type,
20406 from = _ref2.from,
20407 to = _ref2.to;
20408
20409 if (typeof from === 'number' && typeof to === 'number') {
20410 this.lastTimelineChanges_[type] = {
20411 type: type,
20412 from: from,
20413 to: to
20414 };
20415 delete this.pendingTimelineChanges_[type];
20416 this.trigger('timelinechange');
20417 }
20418
20419 return this.lastTimelineChanges_[type];
20420 };
20421
20422 _proto.dispose = function dispose() {
20423 this.trigger('dispose');
20424 this.pendingTimelineChanges_ = {};
20425 this.lastTimelineChanges_ = {};
20426 this.off();
20427 };
20428
20429 return TimelineChangeController;
20430}(videojs.EventTarget);
20431
20432/* rollup-plugin-worker-factory start for worker!/Users/bcasey/Projects/videojs-http-streaming/src/decrypter-worker.js */
20433var workerCode = transform(getWorkerString(function () {
20434
20435 function createCommonjsModule(fn, basedir, module) {
20436 return module = {
20437 path: basedir,
20438 exports: {},
20439 require: function require(path, base) {
20440 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
20441 }
20442 }, fn(module, module.exports), module.exports;
20443 }
20444
20445 function commonjsRequire() {
20446 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
20447 }
20448
20449 var createClass = createCommonjsModule(function (module) {
20450 function _defineProperties(target, props) {
20451 for (var i = 0; i < props.length; i++) {
20452 var descriptor = props[i];
20453 descriptor.enumerable = descriptor.enumerable || false;
20454 descriptor.configurable = true;
20455 if ("value" in descriptor) descriptor.writable = true;
20456 Object.defineProperty(target, descriptor.key, descriptor);
20457 }
20458 }
20459
20460 function _createClass(Constructor, protoProps, staticProps) {
20461 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
20462 if (staticProps) _defineProperties(Constructor, staticProps);
20463 return Constructor;
20464 }
20465
20466 module.exports = _createClass;
20467 module.exports["default"] = module.exports, module.exports.__esModule = true;
20468 });
20469 var setPrototypeOf = createCommonjsModule(function (module) {
20470 function _setPrototypeOf(o, p) {
20471 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
20472 o.__proto__ = p;
20473 return o;
20474 };
20475
20476 module.exports["default"] = module.exports, module.exports.__esModule = true;
20477 return _setPrototypeOf(o, p);
20478 }
20479
20480 module.exports = _setPrototypeOf;
20481 module.exports["default"] = module.exports, module.exports.__esModule = true;
20482 });
20483 var inheritsLoose = createCommonjsModule(function (module) {
20484 function _inheritsLoose(subClass, superClass) {
20485 subClass.prototype = Object.create(superClass.prototype);
20486 subClass.prototype.constructor = subClass;
20487 setPrototypeOf(subClass, superClass);
20488 }
20489
20490 module.exports = _inheritsLoose;
20491 module.exports["default"] = module.exports, module.exports.__esModule = true;
20492 });
20493 /**
20494 * @file stream.js
20495 */
20496
20497 /**
20498 * A lightweight readable stream implemention that handles event dispatching.
20499 *
20500 * @class Stream
20501 */
20502
20503 var Stream = /*#__PURE__*/function () {
20504 function Stream() {
20505 this.listeners = {};
20506 }
20507 /**
20508 * Add a listener for a specified event type.
20509 *
20510 * @param {string} type the event name
20511 * @param {Function} listener the callback to be invoked when an event of
20512 * the specified type occurs
20513 */
20514
20515
20516 var _proto = Stream.prototype;
20517
20518 _proto.on = function on(type, listener) {
20519 if (!this.listeners[type]) {
20520 this.listeners[type] = [];
20521 }
20522
20523 this.listeners[type].push(listener);
20524 }
20525 /**
20526 * Remove a listener for a specified event type.
20527 *
20528 * @param {string} type the event name
20529 * @param {Function} listener a function previously registered for this
20530 * type of event through `on`
20531 * @return {boolean} if we could turn it off or not
20532 */
20533 ;
20534
20535 _proto.off = function off(type, listener) {
20536 if (!this.listeners[type]) {
20537 return false;
20538 }
20539
20540 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
20541 // In Video.js we slice listener functions
20542 // on trigger so that it does not mess up the order
20543 // while we loop through.
20544 //
20545 // Here we slice on off so that the loop in trigger
20546 // can continue using it's old reference to loop without
20547 // messing up the order.
20548
20549 this.listeners[type] = this.listeners[type].slice(0);
20550 this.listeners[type].splice(index, 1);
20551 return index > -1;
20552 }
20553 /**
20554 * Trigger an event of the specified type on this stream. Any additional
20555 * arguments to this function are passed as parameters to event listeners.
20556 *
20557 * @param {string} type the event name
20558 */
20559 ;
20560
20561 _proto.trigger = function trigger(type) {
20562 var callbacks = this.listeners[type];
20563
20564 if (!callbacks) {
20565 return;
20566 } // Slicing the arguments on every invocation of this method
20567 // can add a significant amount of overhead. Avoid the
20568 // intermediate object creation for the common case of a
20569 // single callback argument
20570
20571
20572 if (arguments.length === 2) {
20573 var length = callbacks.length;
20574
20575 for (var i = 0; i < length; ++i) {
20576 callbacks[i].call(this, arguments[1]);
20577 }
20578 } else {
20579 var args = Array.prototype.slice.call(arguments, 1);
20580 var _length = callbacks.length;
20581
20582 for (var _i = 0; _i < _length; ++_i) {
20583 callbacks[_i].apply(this, args);
20584 }
20585 }
20586 }
20587 /**
20588 * Destroys the stream and cleans up.
20589 */
20590 ;
20591
20592 _proto.dispose = function dispose() {
20593 this.listeners = {};
20594 }
20595 /**
20596 * Forwards all `data` events on this stream to the destination stream. The
20597 * destination stream should provide a method `push` to receive the data
20598 * events as they arrive.
20599 *
20600 * @param {Stream} destination the stream that will receive all `data` events
20601 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
20602 */
20603 ;
20604
20605 _proto.pipe = function pipe(destination) {
20606 this.on('data', function (data) {
20607 destination.push(data);
20608 });
20609 };
20610
20611 return Stream;
20612 }();
20613 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
20614
20615 /**
20616 * Returns the subarray of a Uint8Array without PKCS#7 padding.
20617 *
20618 * @param padded {Uint8Array} unencrypted bytes that have been padded
20619 * @return {Uint8Array} the unpadded bytes
20620 * @see http://tools.ietf.org/html/rfc5652
20621 */
20622
20623
20624 function unpad(padded) {
20625 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
20626 }
20627 /*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
20628
20629 /**
20630 * @file aes.js
20631 *
20632 * This file contains an adaptation of the AES decryption algorithm
20633 * from the Standford Javascript Cryptography Library. That work is
20634 * covered by the following copyright and permissions notice:
20635 *
20636 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
20637 * All rights reserved.
20638 *
20639 * Redistribution and use in source and binary forms, with or without
20640 * modification, are permitted provided that the following conditions are
20641 * met:
20642 *
20643 * 1. Redistributions of source code must retain the above copyright
20644 * notice, this list of conditions and the following disclaimer.
20645 *
20646 * 2. Redistributions in binary form must reproduce the above
20647 * copyright notice, this list of conditions and the following
20648 * disclaimer in the documentation and/or other materials provided
20649 * with the distribution.
20650 *
20651 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
20652 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20653 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20654 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
20655 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20656 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
20657 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
20658 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
20659 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
20660 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
20661 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
20662 *
20663 * The views and conclusions contained in the software and documentation
20664 * are those of the authors and should not be interpreted as representing
20665 * official policies, either expressed or implied, of the authors.
20666 */
20667
20668 /**
20669 * Expand the S-box tables.
20670 *
20671 * @private
20672 */
20673
20674
20675 var precompute = function precompute() {
20676 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
20677 var encTable = tables[0];
20678 var decTable = tables[1];
20679 var sbox = encTable[4];
20680 var sboxInv = decTable[4];
20681 var i;
20682 var x;
20683 var xInv;
20684 var d = [];
20685 var th = [];
20686 var x2;
20687 var x4;
20688 var x8;
20689 var s;
20690 var tEnc;
20691 var tDec; // Compute double and third tables
20692
20693 for (i = 0; i < 256; i++) {
20694 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
20695 }
20696
20697 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
20698 // Compute sbox
20699 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
20700 s = s >> 8 ^ s & 255 ^ 99;
20701 sbox[x] = s;
20702 sboxInv[s] = x; // Compute MixColumns
20703
20704 x8 = d[x4 = d[x2 = d[x]]];
20705 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
20706 tEnc = d[s] * 0x101 ^ s * 0x1010100;
20707
20708 for (i = 0; i < 4; i++) {
20709 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
20710 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
20711 }
20712 } // Compactify. Considerable speedup on Firefox.
20713
20714
20715 for (i = 0; i < 5; i++) {
20716 encTable[i] = encTable[i].slice(0);
20717 decTable[i] = decTable[i].slice(0);
20718 }
20719
20720 return tables;
20721 };
20722
20723 var aesTables = null;
20724 /**
20725 * Schedule out an AES key for both encryption and decryption. This
20726 * is a low-level class. Use a cipher mode to do bulk encryption.
20727 *
20728 * @class AES
20729 * @param key {Array} The key as an array of 4, 6 or 8 words.
20730 */
20731
20732 var AES = /*#__PURE__*/function () {
20733 function AES(key) {
20734 /**
20735 * The expanded S-box and inverse S-box tables. These will be computed
20736 * on the client so that we don't have to send them down the wire.
20737 *
20738 * There are two tables, _tables[0] is for encryption and
20739 * _tables[1] is for decryption.
20740 *
20741 * The first 4 sub-tables are the expanded S-box with MixColumns. The
20742 * last (_tables[01][4]) is the S-box itself.
20743 *
20744 * @private
20745 */
20746 // if we have yet to precompute the S-box tables
20747 // do so now
20748 if (!aesTables) {
20749 aesTables = precompute();
20750 } // then make a copy of that object for use
20751
20752
20753 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
20754 var i;
20755 var j;
20756 var tmp;
20757 var sbox = this._tables[0][4];
20758 var decTable = this._tables[1];
20759 var keyLen = key.length;
20760 var rcon = 1;
20761
20762 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
20763 throw new Error('Invalid aes key size');
20764 }
20765
20766 var encKey = key.slice(0);
20767 var decKey = [];
20768 this._key = [encKey, decKey]; // schedule encryption keys
20769
20770 for (i = keyLen; i < 4 * keyLen + 28; i++) {
20771 tmp = encKey[i - 1]; // apply sbox
20772
20773 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
20774 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
20775
20776 if (i % keyLen === 0) {
20777 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
20778 rcon = rcon << 1 ^ (rcon >> 7) * 283;
20779 }
20780 }
20781
20782 encKey[i] = encKey[i - keyLen] ^ tmp;
20783 } // schedule decryption keys
20784
20785
20786 for (j = 0; i; j++, i--) {
20787 tmp = encKey[j & 3 ? i : i - 4];
20788
20789 if (i <= 4 || j < 4) {
20790 decKey[j] = tmp;
20791 } else {
20792 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
20793 }
20794 }
20795 }
20796 /**
20797 * Decrypt 16 bytes, specified as four 32-bit words.
20798 *
20799 * @param {number} encrypted0 the first word to decrypt
20800 * @param {number} encrypted1 the second word to decrypt
20801 * @param {number} encrypted2 the third word to decrypt
20802 * @param {number} encrypted3 the fourth word to decrypt
20803 * @param {Int32Array} out the array to write the decrypted words
20804 * into
20805 * @param {number} offset the offset into the output array to start
20806 * writing results
20807 * @return {Array} The plaintext.
20808 */
20809
20810
20811 var _proto = AES.prototype;
20812
20813 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
20814 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
20815
20816 var a = encrypted0 ^ key[0];
20817 var b = encrypted3 ^ key[1];
20818 var c = encrypted2 ^ key[2];
20819 var d = encrypted1 ^ key[3];
20820 var a2;
20821 var b2;
20822 var c2; // key.length === 2 ?
20823
20824 var nInnerRounds = key.length / 4 - 2;
20825 var i;
20826 var kIndex = 4;
20827 var table = this._tables[1]; // load up the tables
20828
20829 var table0 = table[0];
20830 var table1 = table[1];
20831 var table2 = table[2];
20832 var table3 = table[3];
20833 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
20834
20835 for (i = 0; i < nInnerRounds; i++) {
20836 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
20837 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
20838 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
20839 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
20840 kIndex += 4;
20841 a = a2;
20842 b = b2;
20843 c = c2;
20844 } // Last round.
20845
20846
20847 for (i = 0; i < 4; i++) {
20848 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
20849 a2 = a;
20850 a = b;
20851 b = c;
20852 c = d;
20853 d = a2;
20854 }
20855 };
20856
20857 return AES;
20858 }();
20859 /**
20860 * A wrapper around the Stream class to use setTimeout
20861 * and run stream "jobs" Asynchronously
20862 *
20863 * @class AsyncStream
20864 * @extends Stream
20865 */
20866
20867
20868 var AsyncStream = /*#__PURE__*/function (_Stream) {
20869 inheritsLoose(AsyncStream, _Stream);
20870
20871 function AsyncStream() {
20872 var _this;
20873
20874 _this = _Stream.call(this, Stream) || this;
20875 _this.jobs = [];
20876 _this.delay = 1;
20877 _this.timeout_ = null;
20878 return _this;
20879 }
20880 /**
20881 * process an async job
20882 *
20883 * @private
20884 */
20885
20886
20887 var _proto = AsyncStream.prototype;
20888
20889 _proto.processJob_ = function processJob_() {
20890 this.jobs.shift()();
20891
20892 if (this.jobs.length) {
20893 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
20894 } else {
20895 this.timeout_ = null;
20896 }
20897 }
20898 /**
20899 * push a job into the stream
20900 *
20901 * @param {Function} job the job to push into the stream
20902 */
20903 ;
20904
20905 _proto.push = function push(job) {
20906 this.jobs.push(job);
20907
20908 if (!this.timeout_) {
20909 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
20910 }
20911 };
20912
20913 return AsyncStream;
20914 }(Stream);
20915 /**
20916 * Convert network-order (big-endian) bytes into their little-endian
20917 * representation.
20918 */
20919
20920
20921 var ntoh = function ntoh(word) {
20922 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
20923 };
20924 /**
20925 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
20926 *
20927 * @param {Uint8Array} encrypted the encrypted bytes
20928 * @param {Uint32Array} key the bytes of the decryption key
20929 * @param {Uint32Array} initVector the initialization vector (IV) to
20930 * use for the first round of CBC.
20931 * @return {Uint8Array} the decrypted bytes
20932 *
20933 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
20934 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
20935 * @see https://tools.ietf.org/html/rfc2315
20936 */
20937
20938
20939 var decrypt = function decrypt(encrypted, key, initVector) {
20940 // word-level access to the encrypted bytes
20941 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
20942 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
20943
20944 var decrypted = new Uint8Array(encrypted.byteLength);
20945 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
20946 // decrypted data
20947
20948 var init0;
20949 var init1;
20950 var init2;
20951 var init3;
20952 var encrypted0;
20953 var encrypted1;
20954 var encrypted2;
20955 var encrypted3; // iteration variable
20956
20957 var wordIx; // pull out the words of the IV to ensure we don't modify the
20958 // passed-in reference and easier access
20959
20960 init0 = initVector[0];
20961 init1 = initVector[1];
20962 init2 = initVector[2];
20963 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
20964 // to each decrypted block
20965
20966 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
20967 // convert big-endian (network order) words into little-endian
20968 // (javascript order)
20969 encrypted0 = ntoh(encrypted32[wordIx]);
20970 encrypted1 = ntoh(encrypted32[wordIx + 1]);
20971 encrypted2 = ntoh(encrypted32[wordIx + 2]);
20972 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
20973
20974 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
20975 // plaintext
20976
20977 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
20978 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
20979 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
20980 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
20981
20982 init0 = encrypted0;
20983 init1 = encrypted1;
20984 init2 = encrypted2;
20985 init3 = encrypted3;
20986 }
20987
20988 return decrypted;
20989 };
20990 /**
20991 * The `Decrypter` class that manages decryption of AES
20992 * data through `AsyncStream` objects and the `decrypt`
20993 * function
20994 *
20995 * @param {Uint8Array} encrypted the encrypted bytes
20996 * @param {Uint32Array} key the bytes of the decryption key
20997 * @param {Uint32Array} initVector the initialization vector (IV) to
20998 * @param {Function} done the function to run when done
20999 * @class Decrypter
21000 */
21001
21002
21003 var Decrypter = /*#__PURE__*/function () {
21004 function Decrypter(encrypted, key, initVector, done) {
21005 var step = Decrypter.STEP;
21006 var encrypted32 = new Int32Array(encrypted.buffer);
21007 var decrypted = new Uint8Array(encrypted.byteLength);
21008 var i = 0;
21009 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
21010
21011 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21012
21013 for (i = step; i < encrypted32.length; i += step) {
21014 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
21015 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21016 } // invoke the done() callback when everything is finished
21017
21018
21019 this.asyncStream_.push(function () {
21020 // remove pkcs#7 padding from the decrypted bytes
21021 done(null, unpad(decrypted));
21022 });
21023 }
21024 /**
21025 * a getter for step the maximum number of bytes to process at one time
21026 *
21027 * @return {number} the value of step 32000
21028 */
21029
21030
21031 var _proto = Decrypter.prototype;
21032 /**
21033 * @private
21034 */
21035
21036 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
21037 return function () {
21038 var bytes = decrypt(encrypted, key, initVector);
21039 decrypted.set(bytes, encrypted.byteOffset);
21040 };
21041 };
21042
21043 createClass(Decrypter, null, [{
21044 key: "STEP",
21045 get: function get() {
21046 // 4 * 8000;
21047 return 32000;
21048 }
21049 }]);
21050 return Decrypter;
21051 }();
21052 /**
21053 * @file bin-utils.js
21054 */
21055
21056 /**
21057 * Creates an object for sending to a web worker modifying properties that are TypedArrays
21058 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
21059 *
21060 * @param {Object} message
21061 * Object of properties and values to send to the web worker
21062 * @return {Object}
21063 * Modified message with TypedArray values expanded
21064 * @function createTransferableMessage
21065 */
21066
21067
21068 var createTransferableMessage = function createTransferableMessage(message) {
21069 var transferable = {};
21070 Object.keys(message).forEach(function (key) {
21071 var value = message[key];
21072
21073 if (ArrayBuffer.isView(value)) {
21074 transferable[key] = {
21075 bytes: value.buffer,
21076 byteOffset: value.byteOffset,
21077 byteLength: value.byteLength
21078 };
21079 } else {
21080 transferable[key] = value;
21081 }
21082 });
21083 return transferable;
21084 };
21085 /* global self */
21086
21087 /**
21088 * Our web worker interface so that things can talk to aes-decrypter
21089 * that will be running in a web worker. the scope is passed to this by
21090 * webworkify.
21091 */
21092
21093
21094 self.onmessage = function (event) {
21095 var data = event.data;
21096 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
21097 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
21098 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
21099 /* eslint-disable no-new, handle-callback-err */
21100
21101 new Decrypter(encrypted, key, iv, function (err, bytes) {
21102 self.postMessage(createTransferableMessage({
21103 source: data.source,
21104 decrypted: bytes
21105 }), [bytes.buffer]);
21106 });
21107 /* eslint-enable */
21108 };
21109}));
21110var Decrypter = factory(workerCode);
21111/* rollup-plugin-worker-factory end for worker!/Users/bcasey/Projects/videojs-http-streaming/src/decrypter-worker.js */
21112
21113/**
21114 * Convert the properties of an HLS track into an audioTrackKind.
21115 *
21116 * @private
21117 */
21118
21119var audioTrackKind_ = function audioTrackKind_(properties) {
21120 var kind = properties.default ? 'main' : 'alternative';
21121
21122 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
21123 kind = 'main-desc';
21124 }
21125
21126 return kind;
21127};
21128/**
21129 * Pause provided segment loader and playlist loader if active
21130 *
21131 * @param {SegmentLoader} segmentLoader
21132 * SegmentLoader to pause
21133 * @param {Object} mediaType
21134 * Active media type
21135 * @function stopLoaders
21136 */
21137
21138
21139var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
21140 segmentLoader.abort();
21141 segmentLoader.pause();
21142
21143 if (mediaType && mediaType.activePlaylistLoader) {
21144 mediaType.activePlaylistLoader.pause();
21145 mediaType.activePlaylistLoader = null;
21146 }
21147};
21148/**
21149 * Start loading provided segment loader and playlist loader
21150 *
21151 * @param {PlaylistLoader} playlistLoader
21152 * PlaylistLoader to start loading
21153 * @param {Object} mediaType
21154 * Active media type
21155 * @function startLoaders
21156 */
21157
21158var startLoaders = function startLoaders(playlistLoader, mediaType) {
21159 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
21160 // playlist loader
21161 mediaType.activePlaylistLoader = playlistLoader;
21162 playlistLoader.load();
21163};
21164/**
21165 * Returns a function to be called when the media group changes. It performs a
21166 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
21167 * change of group is merely a rendition switch of the same content at another encoding,
21168 * rather than a change of content, such as switching audio from English to Spanish.
21169 *
21170 * @param {string} type
21171 * MediaGroup type
21172 * @param {Object} settings
21173 * Object containing required information for media groups
21174 * @return {Function}
21175 * Handler for a non-destructive resync of SegmentLoader when the active media
21176 * group changes.
21177 * @function onGroupChanged
21178 */
21179
21180var onGroupChanged = function onGroupChanged(type, settings) {
21181 return function () {
21182 var _settings$segmentLoad = settings.segmentLoaders,
21183 segmentLoader = _settings$segmentLoad[type],
21184 mainSegmentLoader = _settings$segmentLoad.main,
21185 mediaType = settings.mediaTypes[type];
21186 var activeTrack = mediaType.activeTrack();
21187 var activeGroup = mediaType.getActiveGroup();
21188 var previousActiveLoader = mediaType.activePlaylistLoader;
21189 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
21190
21191 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
21192 return;
21193 }
21194
21195 mediaType.lastGroup_ = activeGroup;
21196 mediaType.lastTrack_ = activeTrack;
21197 stopLoaders(segmentLoader, mediaType);
21198
21199 if (!activeGroup || activeGroup.isMasterPlaylist) {
21200 // there is no group active or active group is a main playlist and won't change
21201 return;
21202 }
21203
21204 if (!activeGroup.playlistLoader) {
21205 if (previousActiveLoader) {
21206 // The previous group had a playlist loader but the new active group does not
21207 // this means we are switching from demuxed to muxed audio. In this case we want to
21208 // do a destructive reset of the main segment loader and not restart the audio
21209 // loaders.
21210 mainSegmentLoader.resetEverything();
21211 }
21212
21213 return;
21214 } // Non-destructive resync
21215
21216
21217 segmentLoader.resyncLoader();
21218 startLoaders(activeGroup.playlistLoader, mediaType);
21219 };
21220};
21221var onGroupChanging = function onGroupChanging(type, settings) {
21222 return function () {
21223 var segmentLoader = settings.segmentLoaders[type],
21224 mediaType = settings.mediaTypes[type];
21225 mediaType.lastGroup_ = null;
21226 segmentLoader.abort();
21227 segmentLoader.pause();
21228 };
21229};
21230/**
21231 * Returns a function to be called when the media track changes. It performs a
21232 * destructive reset of the SegmentLoader to ensure we start loading as close to
21233 * currentTime as possible.
21234 *
21235 * @param {string} type
21236 * MediaGroup type
21237 * @param {Object} settings
21238 * Object containing required information for media groups
21239 * @return {Function}
21240 * Handler for a destructive reset of SegmentLoader when the active media
21241 * track changes.
21242 * @function onTrackChanged
21243 */
21244
21245var onTrackChanged = function onTrackChanged(type, settings) {
21246 return function () {
21247 var masterPlaylistLoader = settings.masterPlaylistLoader,
21248 _settings$segmentLoad2 = settings.segmentLoaders,
21249 segmentLoader = _settings$segmentLoad2[type],
21250 mainSegmentLoader = _settings$segmentLoad2.main,
21251 mediaType = settings.mediaTypes[type];
21252 var activeTrack = mediaType.activeTrack();
21253 var activeGroup = mediaType.getActiveGroup();
21254 var previousActiveLoader = mediaType.activePlaylistLoader;
21255 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
21256
21257 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
21258 return;
21259 }
21260
21261 mediaType.lastGroup_ = activeGroup;
21262 mediaType.lastTrack_ = activeTrack;
21263 stopLoaders(segmentLoader, mediaType);
21264
21265 if (!activeGroup) {
21266 // there is no group active so we do not want to restart loaders
21267 return;
21268 }
21269
21270 if (activeGroup.isMasterPlaylist) {
21271 // track did not change, do nothing
21272 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
21273 return;
21274 }
21275
21276 var mpc = settings.vhs.masterPlaylistController_;
21277 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
21278
21279 if (mpc.media() === newPlaylist) {
21280 return;
21281 }
21282
21283 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
21284 masterPlaylistLoader.pause();
21285 mainSegmentLoader.resetEverything();
21286 mpc.fastQualityChange_(newPlaylist);
21287 return;
21288 }
21289
21290 if (type === 'AUDIO') {
21291 if (!activeGroup.playlistLoader) {
21292 // when switching from demuxed audio/video to muxed audio/video (noted by no
21293 // playlist loader for the audio group), we want to do a destructive reset of the
21294 // main segment loader and not restart the audio loaders
21295 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
21296 // it should be stopped
21297
21298 mainSegmentLoader.resetEverything();
21299 return;
21300 } // although the segment loader is an audio segment loader, call the setAudio
21301 // function to ensure it is prepared to re-append the init segment (or handle other
21302 // config changes)
21303
21304
21305 segmentLoader.setAudio(true);
21306 mainSegmentLoader.setAudio(false);
21307 }
21308
21309 if (previousActiveLoader === activeGroup.playlistLoader) {
21310 // Nothing has actually changed. This can happen because track change events can fire
21311 // multiple times for a "single" change. One for enabling the new active track, and
21312 // one for disabling the track that was active
21313 startLoaders(activeGroup.playlistLoader, mediaType);
21314 return;
21315 }
21316
21317 if (segmentLoader.track) {
21318 // For WebVTT, set the new text track in the segmentloader
21319 segmentLoader.track(activeTrack);
21320 } // destructive reset
21321
21322
21323 segmentLoader.resetEverything();
21324 startLoaders(activeGroup.playlistLoader, mediaType);
21325 };
21326};
21327var onError = {
21328 /**
21329 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21330 * an error.
21331 *
21332 * @param {string} type
21333 * MediaGroup type
21334 * @param {Object} settings
21335 * Object containing required information for media groups
21336 * @return {Function}
21337 * Error handler. Logs warning (or error if the playlist is blacklisted) to
21338 * console and switches back to default audio track.
21339 * @function onError.AUDIO
21340 */
21341 AUDIO: function AUDIO(type, settings) {
21342 return function () {
21343 var segmentLoader = settings.segmentLoaders[type],
21344 mediaType = settings.mediaTypes[type],
21345 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
21346 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
21347
21348 var activeTrack = mediaType.activeTrack();
21349 var activeGroup = mediaType.activeGroup();
21350 var id = (activeGroup.filter(function (group) {
21351 return group.default;
21352 })[0] || activeGroup[0]).id;
21353 var defaultTrack = mediaType.tracks[id];
21354
21355 if (activeTrack === defaultTrack) {
21356 // Default track encountered an error. All we can do now is blacklist the current
21357 // rendition and hope another will switch audio groups
21358 blacklistCurrentPlaylist({
21359 message: 'Problem encountered loading the default audio track.'
21360 });
21361 return;
21362 }
21363
21364 videojs.log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
21365
21366 for (var trackId in mediaType.tracks) {
21367 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
21368 }
21369
21370 mediaType.onTrackChanged();
21371 };
21372 },
21373
21374 /**
21375 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21376 * an error.
21377 *
21378 * @param {string} type
21379 * MediaGroup type
21380 * @param {Object} settings
21381 * Object containing required information for media groups
21382 * @return {Function}
21383 * Error handler. Logs warning to console and disables the active subtitle track
21384 * @function onError.SUBTITLES
21385 */
21386 SUBTITLES: function SUBTITLES(type, settings) {
21387 return function () {
21388 var segmentLoader = settings.segmentLoaders[type],
21389 mediaType = settings.mediaTypes[type];
21390 videojs.log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
21391 stopLoaders(segmentLoader, mediaType);
21392 var track = mediaType.activeTrack();
21393
21394 if (track) {
21395 track.mode = 'disabled';
21396 }
21397
21398 mediaType.onTrackChanged();
21399 };
21400 }
21401};
21402var setupListeners = {
21403 /**
21404 * Setup event listeners for audio playlist loader
21405 *
21406 * @param {string} type
21407 * MediaGroup type
21408 * @param {PlaylistLoader|null} playlistLoader
21409 * PlaylistLoader to register listeners on
21410 * @param {Object} settings
21411 * Object containing required information for media groups
21412 * @function setupListeners.AUDIO
21413 */
21414 AUDIO: function AUDIO(type, playlistLoader, settings) {
21415 if (!playlistLoader) {
21416 // no playlist loader means audio will be muxed with the video
21417 return;
21418 }
21419
21420 var tech = settings.tech,
21421 requestOptions = settings.requestOptions,
21422 segmentLoader = settings.segmentLoaders[type];
21423 playlistLoader.on('loadedmetadata', function () {
21424 var media = playlistLoader.media();
21425 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
21426 // permits, start downloading segments
21427
21428 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21429 segmentLoader.load();
21430 }
21431 });
21432 playlistLoader.on('loadedplaylist', function () {
21433 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21434
21435 if (!tech.paused()) {
21436 segmentLoader.load();
21437 }
21438 });
21439 playlistLoader.on('error', onError[type](type, settings));
21440 },
21441
21442 /**
21443 * Setup event listeners for subtitle playlist loader
21444 *
21445 * @param {string} type
21446 * MediaGroup type
21447 * @param {PlaylistLoader|null} playlistLoader
21448 * PlaylistLoader to register listeners on
21449 * @param {Object} settings
21450 * Object containing required information for media groups
21451 * @function setupListeners.SUBTITLES
21452 */
21453 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
21454 var tech = settings.tech,
21455 requestOptions = settings.requestOptions,
21456 segmentLoader = settings.segmentLoaders[type],
21457 mediaType = settings.mediaTypes[type];
21458 playlistLoader.on('loadedmetadata', function () {
21459 var media = playlistLoader.media();
21460 segmentLoader.playlist(media, requestOptions);
21461 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
21462 // permits, start downloading segments
21463
21464 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21465 segmentLoader.load();
21466 }
21467 });
21468 playlistLoader.on('loadedplaylist', function () {
21469 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21470
21471 if (!tech.paused()) {
21472 segmentLoader.load();
21473 }
21474 });
21475 playlistLoader.on('error', onError[type](type, settings));
21476 }
21477};
21478var initialize = {
21479 /**
21480 * Setup PlaylistLoaders and AudioTracks for the audio groups
21481 *
21482 * @param {string} type
21483 * MediaGroup type
21484 * @param {Object} settings
21485 * Object containing required information for media groups
21486 * @function initialize.AUDIO
21487 */
21488 'AUDIO': function AUDIO(type, settings) {
21489 var vhs = settings.vhs,
21490 sourceType = settings.sourceType,
21491 segmentLoader = settings.segmentLoaders[type],
21492 requestOptions = settings.requestOptions,
21493 mediaGroups = settings.master.mediaGroups,
21494 _settings$mediaTypes$ = settings.mediaTypes[type],
21495 groups = _settings$mediaTypes$.groups,
21496 tracks = _settings$mediaTypes$.tracks,
21497 logger_ = _settings$mediaTypes$.logger_,
21498 masterPlaylistLoader = settings.masterPlaylistLoader;
21499 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
21500
21501 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
21502 mediaGroups[type] = {
21503 main: {
21504 default: {
21505 default: true
21506 }
21507 }
21508 };
21509
21510 if (audioOnlyMaster) {
21511 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
21512 }
21513 }
21514
21515 for (var groupId in mediaGroups[type]) {
21516 if (!groups[groupId]) {
21517 groups[groupId] = [];
21518 }
21519
21520 for (var variantLabel in mediaGroups[type][groupId]) {
21521 var properties = mediaGroups[type][groupId][variantLabel];
21522 var playlistLoader = void 0;
21523
21524 if (audioOnlyMaster) {
21525 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
21526 properties.isMasterPlaylist = true;
21527 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
21528 // use the resolved media playlist object
21529 } else if (sourceType === 'vhs-json' && properties.playlists) {
21530 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
21531 } else if (properties.resolvedUri) {
21532 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
21533 // should we even have properties.playlists in this check.
21534 } else if (properties.playlists && sourceType === 'dash') {
21535 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21536 } else {
21537 // no resolvedUri means the audio is muxed with the video when using this
21538 // audio track
21539 playlistLoader = null;
21540 }
21541
21542 properties = videojs.mergeOptions({
21543 id: variantLabel,
21544 playlistLoader: playlistLoader
21545 }, properties);
21546 setupListeners[type](type, properties.playlistLoader, settings);
21547 groups[groupId].push(properties);
21548
21549 if (typeof tracks[variantLabel] === 'undefined') {
21550 var track = new videojs.AudioTrack({
21551 id: variantLabel,
21552 kind: audioTrackKind_(properties),
21553 enabled: false,
21554 language: properties.language,
21555 default: properties.default,
21556 label: variantLabel
21557 });
21558 tracks[variantLabel] = track;
21559 }
21560 }
21561 } // setup single error event handler for the segment loader
21562
21563
21564 segmentLoader.on('error', onError[type](type, settings));
21565 },
21566
21567 /**
21568 * Setup PlaylistLoaders and TextTracks for the subtitle groups
21569 *
21570 * @param {string} type
21571 * MediaGroup type
21572 * @param {Object} settings
21573 * Object containing required information for media groups
21574 * @function initialize.SUBTITLES
21575 */
21576 'SUBTITLES': function SUBTITLES(type, settings) {
21577 var tech = settings.tech,
21578 vhs = settings.vhs,
21579 sourceType = settings.sourceType,
21580 segmentLoader = settings.segmentLoaders[type],
21581 requestOptions = settings.requestOptions,
21582 mediaGroups = settings.master.mediaGroups,
21583 _settings$mediaTypes$2 = settings.mediaTypes[type],
21584 groups = _settings$mediaTypes$2.groups,
21585 tracks = _settings$mediaTypes$2.tracks,
21586 masterPlaylistLoader = settings.masterPlaylistLoader;
21587
21588 for (var groupId in mediaGroups[type]) {
21589 if (!groups[groupId]) {
21590 groups[groupId] = [];
21591 }
21592
21593 for (var variantLabel in mediaGroups[type][groupId]) {
21594 if (mediaGroups[type][groupId][variantLabel].forced) {
21595 // Subtitle playlists with the forced attribute are not selectable in Safari.
21596 // According to Apple's HLS Authoring Specification:
21597 // If content has forced subtitles and regular subtitles in a given language,
21598 // the regular subtitles track in that language MUST contain both the forced
21599 // subtitles and the regular subtitles for that language.
21600 // Because of this requirement and that Safari does not add forced subtitles,
21601 // forced subtitles are skipped here to maintain consistent experience across
21602 // all platforms
21603 continue;
21604 }
21605
21606 var properties = mediaGroups[type][groupId][variantLabel];
21607 var playlistLoader = void 0;
21608
21609 if (sourceType === 'hls') {
21610 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
21611 } else if (sourceType === 'dash') {
21612 var playlists = properties.playlists.filter(function (p) {
21613 return p.excludeUntil !== Infinity;
21614 });
21615
21616 if (!playlists.length) {
21617 return;
21618 }
21619
21620 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21621 } else if (sourceType === 'vhs-json') {
21622 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
21623 // as provided, otherwise use the resolved URI to load the playlist
21624 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
21625 }
21626
21627 properties = videojs.mergeOptions({
21628 id: variantLabel,
21629 playlistLoader: playlistLoader
21630 }, properties);
21631 setupListeners[type](type, properties.playlistLoader, settings);
21632 groups[groupId].push(properties);
21633
21634 if (typeof tracks[variantLabel] === 'undefined') {
21635 var track = tech.addRemoteTextTrack({
21636 id: variantLabel,
21637 kind: 'subtitles',
21638 default: properties.default && properties.autoselect,
21639 language: properties.language,
21640 label: variantLabel
21641 }, false).track;
21642 tracks[variantLabel] = track;
21643 }
21644 }
21645 } // setup single error event handler for the segment loader
21646
21647
21648 segmentLoader.on('error', onError[type](type, settings));
21649 },
21650
21651 /**
21652 * Setup TextTracks for the closed-caption groups
21653 *
21654 * @param {String} type
21655 * MediaGroup type
21656 * @param {Object} settings
21657 * Object containing required information for media groups
21658 * @function initialize['CLOSED-CAPTIONS']
21659 */
21660 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
21661 var tech = settings.tech,
21662 mediaGroups = settings.master.mediaGroups,
21663 _settings$mediaTypes$3 = settings.mediaTypes[type],
21664 groups = _settings$mediaTypes$3.groups,
21665 tracks = _settings$mediaTypes$3.tracks;
21666
21667 for (var groupId in mediaGroups[type]) {
21668 if (!groups[groupId]) {
21669 groups[groupId] = [];
21670 }
21671
21672 for (var variantLabel in mediaGroups[type][groupId]) {
21673 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
21674
21675 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
21676 continue;
21677 }
21678
21679 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
21680 var newProps = {
21681 label: variantLabel,
21682 language: properties.language,
21683 instreamId: properties.instreamId,
21684 default: properties.default && properties.autoselect
21685 };
21686
21687 if (captionServices[newProps.instreamId]) {
21688 newProps = videojs.mergeOptions(newProps, captionServices[newProps.instreamId]);
21689 }
21690
21691 if (newProps.default === undefined) {
21692 delete newProps.default;
21693 } // No PlaylistLoader is required for Closed-Captions because the captions are
21694 // embedded within the video stream
21695
21696
21697 groups[groupId].push(videojs.mergeOptions({
21698 id: variantLabel
21699 }, properties));
21700
21701 if (typeof tracks[variantLabel] === 'undefined') {
21702 var track = tech.addRemoteTextTrack({
21703 id: newProps.instreamId,
21704 kind: 'captions',
21705 default: newProps.default,
21706 language: newProps.language,
21707 label: newProps.label
21708 }, false).track;
21709 tracks[variantLabel] = track;
21710 }
21711 }
21712 }
21713 }
21714};
21715
21716var groupMatch = function groupMatch(list, media) {
21717 for (var i = 0; i < list.length; i++) {
21718 if (playlistMatch(media, list[i])) {
21719 return true;
21720 }
21721
21722 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
21723 return true;
21724 }
21725 }
21726
21727 return false;
21728};
21729/**
21730 * Returns a function used to get the active group of the provided type
21731 *
21732 * @param {string} type
21733 * MediaGroup type
21734 * @param {Object} settings
21735 * Object containing required information for media groups
21736 * @return {Function}
21737 * Function that returns the active media group for the provided type. Takes an
21738 * optional parameter {TextTrack} track. If no track is provided, a list of all
21739 * variants in the group, otherwise the variant corresponding to the provided
21740 * track is returned.
21741 * @function activeGroup
21742 */
21743
21744
21745var activeGroup = function activeGroup(type, settings) {
21746 return function (track) {
21747 var masterPlaylistLoader = settings.masterPlaylistLoader,
21748 groups = settings.mediaTypes[type].groups;
21749 var media = masterPlaylistLoader.media();
21750
21751 if (!media) {
21752 return null;
21753 }
21754
21755 var variants = null; // set to variants to main media active group
21756
21757 if (media.attributes[type]) {
21758 variants = groups[media.attributes[type]];
21759 }
21760
21761 var groupKeys = Object.keys(groups);
21762
21763 if (!variants) {
21764 // find the masterPlaylistLoader media
21765 // that is in a media group if we are dealing
21766 // with audio only
21767 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
21768 for (var i = 0; i < groupKeys.length; i++) {
21769 var groupPropertyList = groups[groupKeys[i]];
21770
21771 if (groupMatch(groupPropertyList, media)) {
21772 variants = groupPropertyList;
21773 break;
21774 }
21775 } // use the main group if it exists
21776
21777 } else if (groups.main) {
21778 variants = groups.main; // only one group, use that one
21779 } else if (groupKeys.length === 1) {
21780 variants = groups[groupKeys[0]];
21781 }
21782 }
21783
21784 if (typeof track === 'undefined') {
21785 return variants;
21786 }
21787
21788 if (track === null || !variants) {
21789 // An active track was specified so a corresponding group is expected. track === null
21790 // means no track is currently active so there is no corresponding group
21791 return null;
21792 }
21793
21794 return variants.filter(function (props) {
21795 return props.id === track.id;
21796 })[0] || null;
21797 };
21798};
21799var activeTrack = {
21800 /**
21801 * Returns a function used to get the active track of type provided
21802 *
21803 * @param {string} type
21804 * MediaGroup type
21805 * @param {Object} settings
21806 * Object containing required information for media groups
21807 * @return {Function}
21808 * Function that returns the active media track for the provided type. Returns
21809 * null if no track is active
21810 * @function activeTrack.AUDIO
21811 */
21812 AUDIO: function AUDIO(type, settings) {
21813 return function () {
21814 var tracks = settings.mediaTypes[type].tracks;
21815
21816 for (var id in tracks) {
21817 if (tracks[id].enabled) {
21818 return tracks[id];
21819 }
21820 }
21821
21822 return null;
21823 };
21824 },
21825
21826 /**
21827 * Returns a function used to get the active track of type provided
21828 *
21829 * @param {string} type
21830 * MediaGroup type
21831 * @param {Object} settings
21832 * Object containing required information for media groups
21833 * @return {Function}
21834 * Function that returns the active media track for the provided type. Returns
21835 * null if no track is active
21836 * @function activeTrack.SUBTITLES
21837 */
21838 SUBTITLES: function SUBTITLES(type, settings) {
21839 return function () {
21840 var tracks = settings.mediaTypes[type].tracks;
21841
21842 for (var id in tracks) {
21843 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
21844 return tracks[id];
21845 }
21846 }
21847
21848 return null;
21849 };
21850 }
21851};
21852var getActiveGroup = function getActiveGroup(type, _ref) {
21853 var mediaTypes = _ref.mediaTypes;
21854 return function () {
21855 var activeTrack_ = mediaTypes[type].activeTrack();
21856
21857 if (!activeTrack_) {
21858 return null;
21859 }
21860
21861 return mediaTypes[type].activeGroup(activeTrack_);
21862 };
21863};
21864/**
21865 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
21866 * Closed-Captions) specified in the master manifest.
21867 *
21868 * @param {Object} settings
21869 * Object containing required information for setting up the media groups
21870 * @param {Tech} settings.tech
21871 * The tech of the player
21872 * @param {Object} settings.requestOptions
21873 * XHR request options used by the segment loaders
21874 * @param {PlaylistLoader} settings.masterPlaylistLoader
21875 * PlaylistLoader for the master source
21876 * @param {VhsHandler} settings.vhs
21877 * VHS SourceHandler
21878 * @param {Object} settings.master
21879 * The parsed master manifest
21880 * @param {Object} settings.mediaTypes
21881 * Object to store the loaders, tracks, and utility methods for each media type
21882 * @param {Function} settings.blacklistCurrentPlaylist
21883 * Blacklists the current rendition and forces a rendition switch.
21884 * @function setupMediaGroups
21885 */
21886
21887var setupMediaGroups = function setupMediaGroups(settings) {
21888 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
21889 initialize[type](type, settings);
21890 });
21891 var mediaTypes = settings.mediaTypes,
21892 masterPlaylistLoader = settings.masterPlaylistLoader,
21893 tech = settings.tech,
21894 vhs = settings.vhs,
21895 _settings$segmentLoad3 = settings.segmentLoaders,
21896 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
21897 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
21898
21899 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21900 mediaTypes[type].activeGroup = activeGroup(type, settings);
21901 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
21902 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
21903 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
21904 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
21905 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
21906 }); // DO NOT enable the default subtitle or caption track.
21907 // DO enable the default audio track
21908
21909 var audioGroup = mediaTypes.AUDIO.activeGroup();
21910
21911 if (audioGroup) {
21912 var groupId = (audioGroup.filter(function (group) {
21913 return group.default;
21914 })[0] || audioGroup[0]).id;
21915 mediaTypes.AUDIO.tracks[groupId].enabled = true;
21916 mediaTypes.AUDIO.onGroupChanged();
21917 mediaTypes.AUDIO.onTrackChanged();
21918 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
21919 // track is changed, but needs to be handled here since the track may not be considered
21920 // changed on the first call to onTrackChanged
21921
21922 if (!activeAudioGroup.playlistLoader) {
21923 // either audio is muxed with video or the stream is audio only
21924 mainSegmentLoader.setAudio(true);
21925 } else {
21926 // audio is demuxed
21927 mainSegmentLoader.setAudio(false);
21928 audioSegmentLoader.setAudio(true);
21929 }
21930 }
21931
21932 masterPlaylistLoader.on('mediachange', function () {
21933 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21934 return mediaTypes[type].onGroupChanged();
21935 });
21936 });
21937 masterPlaylistLoader.on('mediachanging', function () {
21938 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21939 return mediaTypes[type].onGroupChanging();
21940 });
21941 }); // custom audio track change event handler for usage event
21942
21943 var onAudioTrackChanged = function onAudioTrackChanged() {
21944 mediaTypes.AUDIO.onTrackChanged();
21945 tech.trigger({
21946 type: 'usage',
21947 name: 'vhs-audio-change'
21948 });
21949 tech.trigger({
21950 type: 'usage',
21951 name: 'hls-audio-change'
21952 });
21953 };
21954
21955 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
21956 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
21957 vhs.on('dispose', function () {
21958 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
21959 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
21960 }); // clear existing audio tracks and add the ones we just created
21961
21962 tech.clearTracks('audio');
21963
21964 for (var id in mediaTypes.AUDIO.tracks) {
21965 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
21966 }
21967};
21968/**
21969 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
21970 * media type
21971 *
21972 * @return {Object}
21973 * Object to store the loaders, tracks, and utility methods for each media type
21974 * @function createMediaTypes
21975 */
21976
21977var createMediaTypes = function createMediaTypes() {
21978 var mediaTypes = {};
21979 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
21980 mediaTypes[type] = {
21981 groups: {},
21982 tracks: {},
21983 activePlaylistLoader: null,
21984 activeGroup: noop,
21985 activeTrack: noop,
21986 getActiveGroup: noop,
21987 onGroupChanged: noop,
21988 onTrackChanged: noop,
21989 lastTrack_: null,
21990 logger_: logger("MediaGroups[" + type + "]")
21991 };
21992 });
21993 return mediaTypes;
21994};
21995
21996var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
21997var Vhs$1; // SegmentLoader stats that need to have each loader's
21998// values summed to calculate the final value
21999
22000var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
22001
22002var sumLoaderStat = function sumLoaderStat(stat) {
22003 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
22004};
22005
22006var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
22007 var currentPlaylist = _ref.currentPlaylist,
22008 nextPlaylist = _ref.nextPlaylist,
22009 forwardBuffer = _ref.forwardBuffer,
22010 bufferLowWaterLine = _ref.bufferLowWaterLine,
22011 bufferHighWaterLine = _ref.bufferHighWaterLine,
22012 duration = _ref.duration,
22013 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
22014 log = _ref.log;
22015
22016 // we have no other playlist to switch to
22017 if (!nextPlaylist) {
22018 videojs.log.warn('We received no playlist to switch to. Please check your stream.');
22019 return false;
22020 }
22021
22022 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
22023
22024 if (!currentPlaylist) {
22025 log(sharedLogLine + " as current playlist is not set");
22026 return true;
22027 } // no need to switch if playlist is the same
22028
22029
22030 if (nextPlaylist.id === currentPlaylist.id) {
22031 return false;
22032 } // If the playlist is live, then we want to not take low water line into account.
22033 // This is because in LIVE, the player plays 3 segments from the end of the
22034 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
22035 // in those segments, a viewer will never experience a rendition upswitch.
22036
22037
22038 if (!currentPlaylist.endList) {
22039 log(sharedLogLine + " as current playlist is live");
22040 return true;
22041 }
22042
22043 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
22044 // duration is below the max potential low water line
22045
22046 if (duration < maxBufferLowWaterLine) {
22047 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
22048 return true;
22049 }
22050
22051 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
22052 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
22053 // we can switch down
22054
22055 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
22056 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
22057
22058 if (experimentalBufferBasedABR) {
22059 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
22060 }
22061
22062 log(logLine);
22063 return true;
22064 } // and if our buffer is higher than the low water line,
22065 // we can switch up
22066
22067
22068 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
22069 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
22070
22071 if (experimentalBufferBasedABR) {
22072 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
22073 }
22074
22075 log(_logLine);
22076 return true;
22077 }
22078
22079 log("not " + sharedLogLine + " as no switching criteria met");
22080 return false;
22081};
22082/**
22083 * the master playlist controller controller all interactons
22084 * between playlists and segmentloaders. At this time this mainly
22085 * involves a master playlist and a series of audio playlists
22086 * if they are available
22087 *
22088 * @class MasterPlaylistController
22089 * @extends videojs.EventTarget
22090 */
22091
22092
22093var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
22094 _inheritsLoose(MasterPlaylistController, _videojs$EventTarget);
22095
22096 function MasterPlaylistController(options) {
22097 var _this;
22098
22099 _this = _videojs$EventTarget.call(this) || this;
22100 var src = options.src,
22101 handleManifestRedirects = options.handleManifestRedirects,
22102 withCredentials = options.withCredentials,
22103 tech = options.tech,
22104 bandwidth = options.bandwidth,
22105 externVhs = options.externVhs,
22106 useCueTags = options.useCueTags,
22107 blacklistDuration = options.blacklistDuration,
22108 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
22109 sourceType = options.sourceType,
22110 cacheEncryptionKeys = options.cacheEncryptionKeys,
22111 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
22112 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
22113 captionServices = options.captionServices;
22114
22115 if (!src) {
22116 throw new Error('A non-empty playlist URL or JSON manifest string is required');
22117 }
22118
22119 var maxPlaylistRetries = options.maxPlaylistRetries;
22120
22121 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
22122 maxPlaylistRetries = Infinity;
22123 }
22124
22125 Vhs$1 = externVhs;
22126 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
22127 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
22128 _this.withCredentials = withCredentials;
22129 _this.tech_ = tech;
22130 _this.vhs_ = tech.vhs;
22131 _this.sourceType_ = sourceType;
22132 _this.useCueTags_ = useCueTags;
22133 _this.blacklistDuration = blacklistDuration;
22134 _this.maxPlaylistRetries = maxPlaylistRetries;
22135 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
22136
22137 if (_this.useCueTags_) {
22138 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
22139 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
22140 }
22141
22142 _this.requestOptions_ = {
22143 withCredentials: withCredentials,
22144 handleManifestRedirects: handleManifestRedirects,
22145 maxPlaylistRetries: maxPlaylistRetries,
22146 timeout: null
22147 };
22148
22149 _this.on('error', _this.pauseLoading);
22150
22151 _this.mediaTypes_ = createMediaTypes();
22152 _this.mediaSource = new window.MediaSource();
22153 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized(_this));
22154 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized(_this));
22155 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized(_this));
22156
22157 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
22158
22159
22160 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
22161
22162 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
22163 // everything, and the MediaSource should not be detached without a proper disposal
22164
22165
22166 _this.seekable_ = videojs.createTimeRanges();
22167 _this.hasPlayed_ = false;
22168 _this.syncController_ = new SyncController(options);
22169 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
22170 kind: 'metadata',
22171 label: 'segment-metadata'
22172 }, false).track;
22173 _this.decrypter_ = new Decrypter();
22174 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
22175 _this.inbandTextTracks_ = {};
22176 _this.timelineChangeController_ = new TimelineChangeController();
22177 var segmentLoaderSettings = {
22178 vhs: _this.vhs_,
22179 parse708captions: options.parse708captions,
22180 captionServices: captionServices,
22181 mediaSource: _this.mediaSource,
22182 currentTime: _this.tech_.currentTime.bind(_this.tech_),
22183 seekable: function seekable() {
22184 return _this.seekable();
22185 },
22186 seeking: function seeking() {
22187 return _this.tech_.seeking();
22188 },
22189 duration: function duration() {
22190 return _this.duration();
22191 },
22192 hasPlayed: function hasPlayed() {
22193 return _this.hasPlayed_;
22194 },
22195 goalBufferLength: function goalBufferLength() {
22196 return _this.goalBufferLength();
22197 },
22198 bandwidth: bandwidth,
22199 syncController: _this.syncController_,
22200 decrypter: _this.decrypter_,
22201 sourceType: _this.sourceType_,
22202 inbandTextTracks: _this.inbandTextTracks_,
22203 cacheEncryptionKeys: cacheEncryptionKeys,
22204 sourceUpdater: _this.sourceUpdater_,
22205 timelineChangeController: _this.timelineChangeController_,
22206 experimentalExactManifestTimings: options.experimentalExactManifestTimings
22207 }; // The source type check not only determines whether a special DASH playlist loader
22208 // should be used, but also covers the case where the provided src is a vhs-json
22209 // manifest object (instead of a URL). In the case of vhs-json, the default
22210 // PlaylistLoader should be used.
22211
22212 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
22213
22214 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
22215 // combined audio/video or just video when alternate audio track is selected
22216
22217
22218 _this.mainSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22219 segmentMetadataTrack: _this.segmentMetadataTrack_,
22220 loaderType: 'main'
22221 }), options); // alternate audio track
22222
22223 _this.audioSegmentLoader_ = new SegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22224 loaderType: 'audio'
22225 }), options);
22226 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs.mergeOptions(segmentLoaderSettings, {
22227 loaderType: 'vtt',
22228 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
22229 }), options);
22230
22231 _this.setupSegmentLoaderListeners_();
22232
22233 if (_this.experimentalBufferBasedABR) {
22234 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
22235 return _this.startABRTimer_();
22236 });
22237
22238 _this.tech_.on('pause', function () {
22239 return _this.stopABRTimer_();
22240 });
22241
22242 _this.tech_.on('play', function () {
22243 return _this.startABRTimer_();
22244 });
22245 } // Create SegmentLoader stat-getters
22246 // mediaRequests_
22247 // mediaRequestsAborted_
22248 // mediaRequestsTimedout_
22249 // mediaRequestsErrored_
22250 // mediaTransferDuration_
22251 // mediaBytesTransferred_
22252 // mediaAppends_
22253
22254
22255 loaderStats.forEach(function (stat) {
22256 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized(_this), stat);
22257 });
22258 _this.logger_ = logger('MPC');
22259 _this.triggeredFmp4Usage = false;
22260
22261 if (_this.tech_.preload() === 'none') {
22262 _this.loadOnPlay_ = function () {
22263 _this.loadOnPlay_ = null;
22264
22265 _this.masterPlaylistLoader_.load();
22266 };
22267
22268 _this.tech_.one('play', _this.loadOnPlay_);
22269 } else {
22270 _this.masterPlaylistLoader_.load();
22271 }
22272
22273 _this.timeToLoadedData__ = -1;
22274 _this.mainAppendsToLoadedData__ = -1;
22275 _this.audioAppendsToLoadedData__ = -1;
22276 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
22277
22278 _this.tech_.one(event, function () {
22279 var timeToLoadedDataStart = Date.now();
22280
22281 _this.tech_.one('loadeddata', function () {
22282 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
22283 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
22284 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
22285 });
22286 });
22287
22288 return _this;
22289 }
22290
22291 var _proto = MasterPlaylistController.prototype;
22292
22293 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
22294 return this.mainAppendsToLoadedData__;
22295 };
22296
22297 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
22298 return this.audioAppendsToLoadedData__;
22299 };
22300
22301 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
22302 var main = this.mainAppendsToLoadedData_();
22303 var audio = this.audioAppendsToLoadedData_();
22304
22305 if (main === -1 || audio === -1) {
22306 return -1;
22307 }
22308
22309 return main + audio;
22310 };
22311
22312 _proto.timeToLoadedData_ = function timeToLoadedData_() {
22313 return this.timeToLoadedData__;
22314 }
22315 /**
22316 * Run selectPlaylist and switch to the new playlist if we should
22317 *
22318 * @private
22319 *
22320 */
22321 ;
22322
22323 _proto.checkABR_ = function checkABR_() {
22324 var nextPlaylist = this.selectPlaylist();
22325
22326 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
22327 this.switchMedia_(nextPlaylist, 'abr');
22328 }
22329 };
22330
22331 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
22332 var oldMedia = this.media();
22333 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
22334 var newId = playlist.id || playlist.uri;
22335
22336 if (oldId && oldId !== newId) {
22337 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
22338 this.tech_.trigger({
22339 type: 'usage',
22340 name: "vhs-rendition-change-" + cause
22341 });
22342 }
22343
22344 this.masterPlaylistLoader_.media(playlist, delay);
22345 }
22346 /**
22347 * Start a timer that periodically calls checkABR_
22348 *
22349 * @private
22350 */
22351 ;
22352
22353 _proto.startABRTimer_ = function startABRTimer_() {
22354 var _this2 = this;
22355
22356 this.stopABRTimer_();
22357 this.abrTimer_ = window.setInterval(function () {
22358 return _this2.checkABR_();
22359 }, 250);
22360 }
22361 /**
22362 * Stop the timer that periodically calls checkABR_
22363 *
22364 * @private
22365 */
22366 ;
22367
22368 _proto.stopABRTimer_ = function stopABRTimer_() {
22369 // if we're scrubbing, we don't need to pause.
22370 // This getter will be added to Video.js in version 7.11.
22371 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
22372 return;
22373 }
22374
22375 window.clearInterval(this.abrTimer_);
22376 this.abrTimer_ = null;
22377 }
22378 /**
22379 * Get a list of playlists for the currently selected audio playlist
22380 *
22381 * @return {Array} the array of audio playlists
22382 */
22383 ;
22384
22385 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
22386 var master = this.master();
22387 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
22388 // assume that the audio tracks are contained in masters
22389 // playlist array, use that or an empty array.
22390
22391 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
22392 return defaultPlaylists;
22393 }
22394
22395 var AUDIO = master.mediaGroups.AUDIO;
22396 var groupKeys = Object.keys(AUDIO);
22397 var track; // get the current active track
22398
22399 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
22400 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
22401 } else {
22402 // default group is `main` or just the first group.
22403 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
22404
22405 for (var label in defaultGroup) {
22406 if (defaultGroup[label].default) {
22407 track = {
22408 label: label
22409 };
22410 break;
22411 }
22412 }
22413 } // no active track no playlists.
22414
22415
22416 if (!track) {
22417 return defaultPlaylists;
22418 }
22419
22420 var playlists = []; // get all of the playlists that are possible for the
22421 // active track.
22422
22423 for (var group in AUDIO) {
22424 if (AUDIO[group][track.label]) {
22425 var properties = AUDIO[group][track.label];
22426
22427 if (properties.playlists && properties.playlists.length) {
22428 playlists.push.apply(playlists, properties.playlists);
22429 } else if (properties.uri) {
22430 playlists.push(properties);
22431 } else if (master.playlists.length) {
22432 // if an audio group does not have a uri
22433 // see if we have main playlists that use it as a group.
22434 // if we do then add those to the playlists list.
22435 for (var i = 0; i < master.playlists.length; i++) {
22436 var playlist = master.playlists[i];
22437
22438 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
22439 playlists.push(playlist);
22440 }
22441 }
22442 }
22443 }
22444 }
22445
22446 if (!playlists.length) {
22447 return defaultPlaylists;
22448 }
22449
22450 return playlists;
22451 }
22452 /**
22453 * Register event handlers on the master playlist loader. A helper
22454 * function for construction time.
22455 *
22456 * @private
22457 */
22458 ;
22459
22460 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
22461 var _this3 = this;
22462
22463 this.masterPlaylistLoader_.on('loadedmetadata', function () {
22464 var media = _this3.masterPlaylistLoader_.media();
22465
22466 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22467 // timeout the request.
22468
22469 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22470 _this3.requestOptions_.timeout = 0;
22471 } else {
22472 _this3.requestOptions_.timeout = requestTimeout;
22473 } // if this isn't a live video and preload permits, start
22474 // downloading segments
22475
22476
22477 if (media.endList && _this3.tech_.preload() !== 'none') {
22478 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22479
22480 _this3.mainSegmentLoader_.load();
22481 }
22482
22483 setupMediaGroups({
22484 sourceType: _this3.sourceType_,
22485 segmentLoaders: {
22486 AUDIO: _this3.audioSegmentLoader_,
22487 SUBTITLES: _this3.subtitleSegmentLoader_,
22488 main: _this3.mainSegmentLoader_
22489 },
22490 tech: _this3.tech_,
22491 requestOptions: _this3.requestOptions_,
22492 masterPlaylistLoader: _this3.masterPlaylistLoader_,
22493 vhs: _this3.vhs_,
22494 master: _this3.master(),
22495 mediaTypes: _this3.mediaTypes_,
22496 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
22497 });
22498
22499 _this3.triggerPresenceUsage_(_this3.master(), media);
22500
22501 _this3.setupFirstPlay();
22502
22503 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
22504 _this3.trigger('selectedinitialmedia');
22505 } else {
22506 // We must wait for the active audio playlist loader to
22507 // finish setting up before triggering this event so the
22508 // representations API and EME setup is correct
22509 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
22510 _this3.trigger('selectedinitialmedia');
22511 });
22512 }
22513 });
22514 this.masterPlaylistLoader_.on('loadedplaylist', function () {
22515 if (_this3.loadOnPlay_) {
22516 _this3.tech_.off('play', _this3.loadOnPlay_);
22517 }
22518
22519 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
22520
22521 if (!updatedPlaylist) {
22522 // exclude any variants that are not supported by the browser before selecting
22523 // an initial media as the playlist selectors do not consider browser support
22524 _this3.excludeUnsupportedVariants_();
22525
22526 var selectedMedia;
22527
22528 if (_this3.enableLowInitialPlaylist) {
22529 selectedMedia = _this3.selectInitialPlaylist();
22530 }
22531
22532 if (!selectedMedia) {
22533 selectedMedia = _this3.selectPlaylist();
22534 }
22535
22536 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
22537 return;
22538 }
22539
22540 _this3.initialMedia_ = selectedMedia;
22541
22542 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
22543 // fire again since the playlist will be requested. In the case of vhs-json
22544 // (where the manifest object is provided as the source), when the media
22545 // playlist's `segments` list is already available, a media playlist won't be
22546 // requested, and loadedplaylist won't fire again, so the playlist handler must be
22547 // called on its own here.
22548
22549
22550 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
22551
22552 if (!haveJsonSource) {
22553 return;
22554 }
22555
22556 updatedPlaylist = _this3.initialMedia_;
22557 }
22558
22559 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
22560 });
22561 this.masterPlaylistLoader_.on('error', function () {
22562 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
22563 });
22564 this.masterPlaylistLoader_.on('mediachanging', function () {
22565 _this3.mainSegmentLoader_.abort();
22566
22567 _this3.mainSegmentLoader_.pause();
22568 });
22569 this.masterPlaylistLoader_.on('mediachange', function () {
22570 var media = _this3.masterPlaylistLoader_.media();
22571
22572 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22573 // timeout the request.
22574
22575 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22576 _this3.requestOptions_.timeout = 0;
22577 } else {
22578 _this3.requestOptions_.timeout = requestTimeout;
22579 } // TODO: Create a new event on the PlaylistLoader that signals
22580 // that the segments have changed in some way and use that to
22581 // update the SegmentLoader instead of doing it twice here and
22582 // on `loadedplaylist`
22583
22584
22585 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22586
22587 _this3.mainSegmentLoader_.load();
22588
22589 _this3.tech_.trigger({
22590 type: 'mediachange',
22591 bubbles: true
22592 });
22593 });
22594 this.masterPlaylistLoader_.on('playlistunchanged', function () {
22595 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
22596 // excluded for not-changing. We likely just have a really slowly updating
22597 // playlist.
22598
22599
22600 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
22601 return;
22602 }
22603
22604 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
22605
22606 if (playlistOutdated) {
22607 // Playlist has stopped updating and we're stuck at its end. Try to
22608 // blacklist it and switch to another playlist in the hope that that
22609 // one is updating (and give the player a chance to re-adjust to the
22610 // safe live point).
22611 _this3.blacklistCurrentPlaylist({
22612 message: 'Playlist no longer updating.',
22613 reason: 'playlist-unchanged'
22614 }); // useful for monitoring QoS
22615
22616
22617 _this3.tech_.trigger('playliststuck');
22618 }
22619 });
22620 this.masterPlaylistLoader_.on('renditiondisabled', function () {
22621 _this3.tech_.trigger({
22622 type: 'usage',
22623 name: 'vhs-rendition-disabled'
22624 });
22625
22626 _this3.tech_.trigger({
22627 type: 'usage',
22628 name: 'hls-rendition-disabled'
22629 });
22630 });
22631 this.masterPlaylistLoader_.on('renditionenabled', function () {
22632 _this3.tech_.trigger({
22633 type: 'usage',
22634 name: 'vhs-rendition-enabled'
22635 });
22636
22637 _this3.tech_.trigger({
22638 type: 'usage',
22639 name: 'hls-rendition-enabled'
22640 });
22641 });
22642 }
22643 /**
22644 * Given an updated media playlist (whether it was loaded for the first time, or
22645 * refreshed for live playlists), update any relevant properties and state to reflect
22646 * changes in the media that should be accounted for (e.g., cues and duration).
22647 *
22648 * @param {Object} updatedPlaylist the updated media playlist object
22649 *
22650 * @private
22651 */
22652 ;
22653
22654 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
22655 if (this.useCueTags_) {
22656 this.updateAdCues_(updatedPlaylist);
22657 } // TODO: Create a new event on the PlaylistLoader that signals
22658 // that the segments have changed in some way and use that to
22659 // update the SegmentLoader instead of doing it twice here and
22660 // on `mediachange`
22661
22662
22663 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
22664 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
22665 // as it is possible that it was temporarily stopped while waiting for
22666 // a playlist (e.g., in case the playlist errored and we re-requested it).
22667
22668 if (!this.tech_.paused()) {
22669 this.mainSegmentLoader_.load();
22670
22671 if (this.audioSegmentLoader_) {
22672 this.audioSegmentLoader_.load();
22673 }
22674 }
22675 }
22676 /**
22677 * A helper function for triggerring presence usage events once per source
22678 *
22679 * @private
22680 */
22681 ;
22682
22683 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
22684 var mediaGroups = master.mediaGroups || {};
22685 var defaultDemuxed = true;
22686 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
22687
22688 for (var mediaGroup in mediaGroups.AUDIO) {
22689 for (var label in mediaGroups.AUDIO[mediaGroup]) {
22690 var properties = mediaGroups.AUDIO[mediaGroup][label];
22691
22692 if (!properties.uri) {
22693 defaultDemuxed = false;
22694 }
22695 }
22696 }
22697
22698 if (defaultDemuxed) {
22699 this.tech_.trigger({
22700 type: 'usage',
22701 name: 'vhs-demuxed'
22702 });
22703 this.tech_.trigger({
22704 type: 'usage',
22705 name: 'hls-demuxed'
22706 });
22707 }
22708
22709 if (Object.keys(mediaGroups.SUBTITLES).length) {
22710 this.tech_.trigger({
22711 type: 'usage',
22712 name: 'vhs-webvtt'
22713 });
22714 this.tech_.trigger({
22715 type: 'usage',
22716 name: 'hls-webvtt'
22717 });
22718 }
22719
22720 if (Vhs$1.Playlist.isAes(media)) {
22721 this.tech_.trigger({
22722 type: 'usage',
22723 name: 'vhs-aes'
22724 });
22725 this.tech_.trigger({
22726 type: 'usage',
22727 name: 'hls-aes'
22728 });
22729 }
22730
22731 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
22732 this.tech_.trigger({
22733 type: 'usage',
22734 name: 'vhs-alternate-audio'
22735 });
22736 this.tech_.trigger({
22737 type: 'usage',
22738 name: 'hls-alternate-audio'
22739 });
22740 }
22741
22742 if (this.useCueTags_) {
22743 this.tech_.trigger({
22744 type: 'usage',
22745 name: 'vhs-playlist-cue-tags'
22746 });
22747 this.tech_.trigger({
22748 type: 'usage',
22749 name: 'hls-playlist-cue-tags'
22750 });
22751 }
22752 };
22753
22754 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
22755 var currentPlaylist = this.masterPlaylistLoader_.media();
22756 var buffered = this.tech_.buffered();
22757 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - this.tech_.currentTime() : 0;
22758 var bufferLowWaterLine = this.bufferLowWaterLine();
22759 var bufferHighWaterLine = this.bufferHighWaterLine();
22760 return shouldSwitchToMedia({
22761 currentPlaylist: currentPlaylist,
22762 nextPlaylist: nextPlaylist,
22763 forwardBuffer: forwardBuffer,
22764 bufferLowWaterLine: bufferLowWaterLine,
22765 bufferHighWaterLine: bufferHighWaterLine,
22766 duration: this.duration(),
22767 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
22768 log: this.logger_
22769 });
22770 }
22771 /**
22772 * Register event handlers on the segment loaders. A helper function
22773 * for construction time.
22774 *
22775 * @private
22776 */
22777 ;
22778
22779 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
22780 var _this4 = this;
22781
22782 if (!this.experimentalBufferBasedABR) {
22783 this.mainSegmentLoader_.on('bandwidthupdate', function () {
22784 var nextPlaylist = _this4.selectPlaylist();
22785
22786 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
22787 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
22788 }
22789
22790 _this4.tech_.trigger('bandwidthupdate');
22791 });
22792 this.mainSegmentLoader_.on('progress', function () {
22793 _this4.trigger('progress');
22794 });
22795 }
22796
22797 this.mainSegmentLoader_.on('error', function () {
22798 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
22799 });
22800 this.mainSegmentLoader_.on('appenderror', function () {
22801 _this4.error = _this4.mainSegmentLoader_.error_;
22802
22803 _this4.trigger('error');
22804 });
22805 this.mainSegmentLoader_.on('syncinfoupdate', function () {
22806 _this4.onSyncInfoUpdate_();
22807 });
22808 this.mainSegmentLoader_.on('timestampoffset', function () {
22809 _this4.tech_.trigger({
22810 type: 'usage',
22811 name: 'vhs-timestamp-offset'
22812 });
22813
22814 _this4.tech_.trigger({
22815 type: 'usage',
22816 name: 'hls-timestamp-offset'
22817 });
22818 });
22819 this.audioSegmentLoader_.on('syncinfoupdate', function () {
22820 _this4.onSyncInfoUpdate_();
22821 });
22822 this.audioSegmentLoader_.on('appenderror', function () {
22823 _this4.error = _this4.audioSegmentLoader_.error_;
22824
22825 _this4.trigger('error');
22826 });
22827 this.mainSegmentLoader_.on('ended', function () {
22828 _this4.logger_('main segment loader ended');
22829
22830 _this4.onEndOfStream();
22831 });
22832 this.mainSegmentLoader_.on('earlyabort', function (event) {
22833 // never try to early abort with the new ABR algorithm
22834 if (_this4.experimentalBufferBasedABR) {
22835 return;
22836 }
22837
22838 _this4.delegateLoaders_('all', ['abort']);
22839
22840 _this4.blacklistCurrentPlaylist({
22841 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
22842 }, ABORT_EARLY_BLACKLIST_SECONDS);
22843 });
22844
22845 var updateCodecs = function updateCodecs() {
22846 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
22847 return _this4.tryToCreateSourceBuffers_();
22848 }
22849
22850 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
22851
22852
22853 if (!codecs) {
22854 return;
22855 }
22856
22857 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
22858 };
22859
22860 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
22861 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
22862 this.mainSegmentLoader_.on('fmp4', function () {
22863 if (!_this4.triggeredFmp4Usage) {
22864 _this4.tech_.trigger({
22865 type: 'usage',
22866 name: 'vhs-fmp4'
22867 });
22868
22869 _this4.tech_.trigger({
22870 type: 'usage',
22871 name: 'hls-fmp4'
22872 });
22873
22874 _this4.triggeredFmp4Usage = true;
22875 }
22876 });
22877 this.audioSegmentLoader_.on('fmp4', function () {
22878 if (!_this4.triggeredFmp4Usage) {
22879 _this4.tech_.trigger({
22880 type: 'usage',
22881 name: 'vhs-fmp4'
22882 });
22883
22884 _this4.tech_.trigger({
22885 type: 'usage',
22886 name: 'hls-fmp4'
22887 });
22888
22889 _this4.triggeredFmp4Usage = true;
22890 }
22891 });
22892 this.audioSegmentLoader_.on('ended', function () {
22893 _this4.logger_('audioSegmentLoader ended');
22894
22895 _this4.onEndOfStream();
22896 });
22897 };
22898
22899 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
22900 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
22901 }
22902 /**
22903 * Call load on our SegmentLoaders
22904 */
22905 ;
22906
22907 _proto.load = function load() {
22908 this.mainSegmentLoader_.load();
22909
22910 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
22911 this.audioSegmentLoader_.load();
22912 }
22913
22914 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
22915 this.subtitleSegmentLoader_.load();
22916 }
22917 }
22918 /**
22919 * Re-tune playback quality level for the current player
22920 * conditions without performing destructive actions, like
22921 * removing already buffered content
22922 *
22923 * @private
22924 * @deprecated
22925 */
22926 ;
22927
22928 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
22929 if (media === void 0) {
22930 media = this.selectPlaylist();
22931 }
22932
22933 this.fastQualityChange_(media);
22934 }
22935 /**
22936 * Re-tune playback quality level for the current player
22937 * conditions. This method will perform destructive actions like removing
22938 * already buffered content in order to readjust the currently active
22939 * playlist quickly. This is good for manual quality changes
22940 *
22941 * @private
22942 */
22943 ;
22944
22945 _proto.fastQualityChange_ = function fastQualityChange_(media) {
22946 var _this5 = this;
22947
22948 if (media === void 0) {
22949 media = this.selectPlaylist();
22950 }
22951
22952 if (media === this.masterPlaylistLoader_.media()) {
22953 this.logger_('skipping fastQualityChange because new media is same as old');
22954 return;
22955 }
22956
22957 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
22958 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
22959 // ahead is roughly the minimum that will accomplish this across a variety of content
22960 // in IE and Edge, but seeking in place is sufficient on all other browsers)
22961 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
22962 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
22963
22964 this.mainSegmentLoader_.resetEverything(function () {
22965 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
22966 // from the previously enabled rendition to load before the new playlist has finished loading
22967 if (videojs.browser.IE_VERSION || videojs.browser.IS_EDGE) {
22968 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
22969 } else {
22970 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
22971 }
22972 }); // don't need to reset audio as it is reset when media changes
22973 }
22974 /**
22975 * Begin playback.
22976 */
22977 ;
22978
22979 _proto.play = function play() {
22980 if (this.setupFirstPlay()) {
22981 return;
22982 }
22983
22984 if (this.tech_.ended()) {
22985 this.tech_.setCurrentTime(0);
22986 }
22987
22988 if (this.hasPlayed_) {
22989 this.load();
22990 }
22991
22992 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
22993 // seek forward to the live point
22994
22995 if (this.tech_.duration() === Infinity) {
22996 if (this.tech_.currentTime() < seekable.start(0)) {
22997 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
22998 }
22999 }
23000 }
23001 /**
23002 * Seek to the latest media position if this is a live video and the
23003 * player and video are loaded and initialized.
23004 */
23005 ;
23006
23007 _proto.setupFirstPlay = function setupFirstPlay() {
23008 var _this6 = this;
23009
23010 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
23011 // If 1) there is no active media
23012 // 2) the player is paused
23013 // 3) the first play has already been setup
23014 // then exit early
23015
23016 if (!media || this.tech_.paused() || this.hasPlayed_) {
23017 return false;
23018 } // when the video is a live stream
23019
23020
23021 if (!media.endList) {
23022 var seekable = this.seekable();
23023
23024 if (!seekable.length) {
23025 // without a seekable range, the player cannot seek to begin buffering at the live
23026 // point
23027 return false;
23028 }
23029
23030 if (videojs.browser.IE_VERSION && this.tech_.readyState() === 0) {
23031 // IE11 throws an InvalidStateError if you try to set currentTime while the
23032 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
23033 this.tech_.one('loadedmetadata', function () {
23034 _this6.trigger('firstplay');
23035
23036 _this6.tech_.setCurrentTime(seekable.end(0));
23037
23038 _this6.hasPlayed_ = true;
23039 });
23040 return false;
23041 } // trigger firstplay to inform the source handler to ignore the next seek event
23042
23043
23044 this.trigger('firstplay'); // seek to the live point
23045
23046 this.tech_.setCurrentTime(seekable.end(0));
23047 }
23048
23049 this.hasPlayed_ = true; // we can begin loading now that everything is ready
23050
23051 this.load();
23052 return true;
23053 }
23054 /**
23055 * handle the sourceopen event on the MediaSource
23056 *
23057 * @private
23058 */
23059 ;
23060
23061 _proto.handleSourceOpen_ = function handleSourceOpen_() {
23062 // Only attempt to create the source buffer if none already exist.
23063 // handleSourceOpen is also called when we are "re-opening" a source buffer
23064 // after `endOfStream` has been called (in response to a seek for instance)
23065 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
23066 // code in video.js but is required because play() must be invoked
23067 // *after* the media source has opened.
23068
23069 if (this.tech_.autoplay()) {
23070 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
23071 // on browsers which return a promise
23072
23073 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
23074 playPromise.then(null, function (e) {});
23075 }
23076 }
23077
23078 this.trigger('sourceopen');
23079 }
23080 /**
23081 * handle the sourceended event on the MediaSource
23082 *
23083 * @private
23084 */
23085 ;
23086
23087 _proto.handleSourceEnded_ = function handleSourceEnded_() {
23088 if (!this.inbandTextTracks_.metadataTrack_) {
23089 return;
23090 }
23091
23092 var cues = this.inbandTextTracks_.metadataTrack_.cues;
23093
23094 if (!cues || !cues.length) {
23095 return;
23096 }
23097
23098 var duration = this.duration();
23099 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
23100 }
23101 /**
23102 * handle the durationchange event on the MediaSource
23103 *
23104 * @private
23105 */
23106 ;
23107
23108 _proto.handleDurationChange_ = function handleDurationChange_() {
23109 this.tech_.trigger('durationchange');
23110 }
23111 /**
23112 * Calls endOfStream on the media source when all active stream types have called
23113 * endOfStream
23114 *
23115 * @param {string} streamType
23116 * Stream type of the segment loader that called endOfStream
23117 * @private
23118 */
23119 ;
23120
23121 _proto.onEndOfStream = function onEndOfStream() {
23122 var isEndOfStream = this.mainSegmentLoader_.ended_;
23123
23124 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23125 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
23126
23127 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
23128 // if we do not know if the main segment loader contains video yet or if we
23129 // definitively know the main segment loader contains video, then we need to wait
23130 // for both main and audio segment loaders to call endOfStream
23131 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
23132 } else {
23133 // otherwise just rely on the audio loader
23134 isEndOfStream = this.audioSegmentLoader_.ended_;
23135 }
23136 }
23137
23138 if (!isEndOfStream) {
23139 return;
23140 }
23141
23142 this.stopABRTimer_();
23143 this.sourceUpdater_.endOfStream();
23144 }
23145 /**
23146 * Check if a playlist has stopped being updated
23147 *
23148 * @param {Object} playlist the media playlist object
23149 * @return {boolean} whether the playlist has stopped being updated or not
23150 */
23151 ;
23152
23153 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
23154 var seekable = this.seekable();
23155
23156 if (!seekable.length) {
23157 // playlist doesn't have enough information to determine whether we are stuck
23158 return false;
23159 }
23160
23161 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
23162
23163 if (expired === null) {
23164 return false;
23165 } // does not use the safe live end to calculate playlist end, since we
23166 // don't want to say we are stuck while there is still content
23167
23168
23169 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
23170 var currentTime = this.tech_.currentTime();
23171 var buffered = this.tech_.buffered();
23172
23173 if (!buffered.length) {
23174 // return true if the playhead reached the absolute end of the playlist
23175 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
23176 }
23177
23178 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
23179 // end of playlist
23180
23181 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
23182 }
23183 /**
23184 * Blacklists a playlist when an error occurs for a set amount of time
23185 * making it unavailable for selection by the rendition selection algorithm
23186 * and then forces a new playlist (rendition) selection.
23187 *
23188 * @param {Object=} error an optional error that may include the playlist
23189 * to blacklist
23190 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
23191 * playlist
23192 */
23193 ;
23194
23195 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
23196 if (error === void 0) {
23197 error = {};
23198 }
23199
23200 // If the `error` was generated by the playlist loader, it will contain
23201 // the playlist we were trying to load (but failed) and that should be
23202 // blacklisted instead of the currently selected playlist which is likely
23203 // out-of-date in this scenario
23204 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
23205 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
23206 // trying to load the master OR while we were disposing of the tech
23207
23208 if (!currentPlaylist) {
23209 this.error = error;
23210
23211 if (this.mediaSource.readyState !== 'open') {
23212 this.trigger('error');
23213 } else {
23214 this.sourceUpdater_.endOfStream('network');
23215 }
23216
23217 return;
23218 }
23219
23220 currentPlaylist.playlistErrors_++;
23221 var playlists = this.masterPlaylistLoader_.master.playlists;
23222 var enabledPlaylists = playlists.filter(isEnabled);
23223 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
23224 // forever
23225
23226 if (playlists.length === 1 && blacklistDuration !== Infinity) {
23227 videojs.log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
23228 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
23229
23230 return this.masterPlaylistLoader_.load(isFinalRendition);
23231 }
23232
23233 if (isFinalRendition) {
23234 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
23235 // it, instead of erring the player or retrying this playlist, clear out the current
23236 // blacklist. This allows other playlists to be attempted in case any have been
23237 // fixed.
23238 var reincluded = false;
23239 playlists.forEach(function (playlist) {
23240 // skip current playlist which is about to be blacklisted
23241 if (playlist === currentPlaylist) {
23242 return;
23243 }
23244
23245 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
23246
23247 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
23248 reincluded = true;
23249 delete playlist.excludeUntil;
23250 }
23251 });
23252
23253 if (reincluded) {
23254 videojs.log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
23255 // playlist. This is needed for users relying on the retryplaylist event to catch a
23256 // case where the player might be stuck and looping through "dead" playlists.
23257
23258 this.tech_.trigger('retryplaylist');
23259 }
23260 } // Blacklist this playlist
23261
23262
23263 var excludeUntil;
23264
23265 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
23266 excludeUntil = Infinity;
23267 } else {
23268 excludeUntil = Date.now() + blacklistDuration * 1000;
23269 }
23270
23271 currentPlaylist.excludeUntil = excludeUntil;
23272
23273 if (error.reason) {
23274 currentPlaylist.lastExcludeReason_ = error.reason;
23275 }
23276
23277 this.tech_.trigger('blacklistplaylist');
23278 this.tech_.trigger({
23279 type: 'usage',
23280 name: 'vhs-rendition-blacklisted'
23281 });
23282 this.tech_.trigger({
23283 type: 'usage',
23284 name: 'hls-rendition-blacklisted'
23285 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
23286 // Would be something like media().id !=== currentPlaylist.id and we would need something
23287 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
23288 // from loading a new playlist on any blacklist.
23289 // Select a new playlist
23290
23291 var nextPlaylist = this.selectPlaylist();
23292
23293 if (!nextPlaylist) {
23294 this.error = 'Playback cannot continue. No available working or supported playlists.';
23295 this.trigger('error');
23296 return;
23297 }
23298
23299 var logFn = error.internal ? this.logger_ : videojs.log.warn;
23300 var errorMessage = error.message ? ' ' + error.message : '';
23301 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
23302
23303 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
23304 this.delegateLoaders_('audio', ['abort', 'pause']);
23305 } // if subtitle group changed reset subtitle loaders
23306
23307
23308 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
23309 this.delegateLoaders_('subtitle', ['abort', 'pause']);
23310 }
23311
23312 this.delegateLoaders_('main', ['abort', 'pause']);
23313 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
23314 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
23315
23316 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
23317 }
23318 /**
23319 * Pause all segment/playlist loaders
23320 */
23321 ;
23322
23323 _proto.pauseLoading = function pauseLoading() {
23324 this.delegateLoaders_('all', ['abort', 'pause']);
23325 this.stopABRTimer_();
23326 }
23327 /**
23328 * Call a set of functions in order on playlist loaders, segment loaders,
23329 * or both types of loaders.
23330 *
23331 * @param {string} filter
23332 * Filter loaders that should call fnNames using a string. Can be:
23333 * * all - run on all loaders
23334 * * audio - run on all audio loaders
23335 * * subtitle - run on all subtitle loaders
23336 * * main - run on the main/master loaders
23337 *
23338 * @param {Array|string} fnNames
23339 * A string or array of function names to call.
23340 */
23341 ;
23342
23343 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
23344 var _this7 = this;
23345
23346 var loaders = [];
23347 var dontFilterPlaylist = filter === 'all';
23348
23349 if (dontFilterPlaylist || filter === 'main') {
23350 loaders.push(this.masterPlaylistLoader_);
23351 }
23352
23353 var mediaTypes = [];
23354
23355 if (dontFilterPlaylist || filter === 'audio') {
23356 mediaTypes.push('AUDIO');
23357 }
23358
23359 if (dontFilterPlaylist || filter === 'subtitle') {
23360 mediaTypes.push('CLOSED-CAPTIONS');
23361 mediaTypes.push('SUBTITLES');
23362 }
23363
23364 mediaTypes.forEach(function (mediaType) {
23365 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
23366
23367 if (loader) {
23368 loaders.push(loader);
23369 }
23370 });
23371 ['main', 'audio', 'subtitle'].forEach(function (name) {
23372 var loader = _this7[name + "SegmentLoader_"];
23373
23374 if (loader && (filter === name || filter === 'all')) {
23375 loaders.push(loader);
23376 }
23377 });
23378 loaders.forEach(function (loader) {
23379 return fnNames.forEach(function (fnName) {
23380 if (typeof loader[fnName] === 'function') {
23381 loader[fnName]();
23382 }
23383 });
23384 });
23385 }
23386 /**
23387 * set the current time on all segment loaders
23388 *
23389 * @param {TimeRange} currentTime the current time to set
23390 * @return {TimeRange} the current time
23391 */
23392 ;
23393
23394 _proto.setCurrentTime = function setCurrentTime(currentTime) {
23395 var buffered = findRange(this.tech_.buffered(), currentTime);
23396
23397 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
23398 // return immediately if the metadata is not ready yet
23399 return 0;
23400 } // it's clearly an edge-case but don't thrown an error if asked to
23401 // seek within an empty playlist
23402
23403
23404 if (!this.masterPlaylistLoader_.media().segments) {
23405 return 0;
23406 } // if the seek location is already buffered, continue buffering as usual
23407
23408
23409 if (buffered && buffered.length) {
23410 return currentTime;
23411 } // cancel outstanding requests so we begin buffering at the new
23412 // location
23413
23414
23415 this.mainSegmentLoader_.resetEverything();
23416 this.mainSegmentLoader_.abort();
23417
23418 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23419 this.audioSegmentLoader_.resetEverything();
23420 this.audioSegmentLoader_.abort();
23421 }
23422
23423 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23424 this.subtitleSegmentLoader_.resetEverything();
23425 this.subtitleSegmentLoader_.abort();
23426 } // start segment loader loading in case they are paused
23427
23428
23429 this.load();
23430 }
23431 /**
23432 * get the current duration
23433 *
23434 * @return {TimeRange} the duration
23435 */
23436 ;
23437
23438 _proto.duration = function duration() {
23439 if (!this.masterPlaylistLoader_) {
23440 return 0;
23441 }
23442
23443 var media = this.masterPlaylistLoader_.media();
23444
23445 if (!media) {
23446 // no playlists loaded yet, so can't determine a duration
23447 return 0;
23448 } // Don't rely on the media source for duration in the case of a live playlist since
23449 // setting the native MediaSource's duration to infinity ends up with consequences to
23450 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23451 //
23452 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23453 // however, few browsers have support for setLiveSeekableRange()
23454 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23455 //
23456 // Until a time when the duration of the media source can be set to infinity, and a
23457 // seekable range specified across browsers, just return Infinity.
23458
23459
23460 if (!media.endList) {
23461 return Infinity;
23462 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
23463 // available). If it's not available, fall back to a playlist-calculated estimate.
23464
23465
23466 if (this.mediaSource) {
23467 return this.mediaSource.duration;
23468 }
23469
23470 return Vhs$1.Playlist.duration(media);
23471 }
23472 /**
23473 * check the seekable range
23474 *
23475 * @return {TimeRange} the seekable range
23476 */
23477 ;
23478
23479 _proto.seekable = function seekable() {
23480 return this.seekable_;
23481 };
23482
23483 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
23484 var audioSeekable;
23485
23486 if (!this.masterPlaylistLoader_) {
23487 return;
23488 }
23489
23490 var media = this.masterPlaylistLoader_.media();
23491
23492 if (!media) {
23493 return;
23494 }
23495
23496 var expired = this.syncController_.getExpiredTime(media, this.duration());
23497
23498 if (expired === null) {
23499 // not enough information to update seekable
23500 return;
23501 }
23502
23503 var master = this.masterPlaylistLoader_.master;
23504 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23505
23506 if (mainSeekable.length === 0) {
23507 return;
23508 }
23509
23510 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23511 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
23512 expired = this.syncController_.getExpiredTime(media, this.duration());
23513
23514 if (expired === null) {
23515 return;
23516 }
23517
23518 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23519
23520 if (audioSeekable.length === 0) {
23521 return;
23522 }
23523 }
23524
23525 var oldEnd;
23526 var oldStart;
23527
23528 if (this.seekable_ && this.seekable_.length) {
23529 oldEnd = this.seekable_.end(0);
23530 oldStart = this.seekable_.start(0);
23531 }
23532
23533 if (!audioSeekable) {
23534 // seekable has been calculated based on buffering video data so it
23535 // can be returned directly
23536 this.seekable_ = mainSeekable;
23537 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
23538 // seekables are pretty far off, rely on main
23539 this.seekable_ = mainSeekable;
23540 } else {
23541 this.seekable_ = videojs.createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
23542 } // seekable is the same as last time
23543
23544
23545 if (this.seekable_ && this.seekable_.length) {
23546 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
23547 return;
23548 }
23549 }
23550
23551 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
23552 this.tech_.trigger('seekablechanged');
23553 }
23554 /**
23555 * Update the player duration
23556 */
23557 ;
23558
23559 _proto.updateDuration = function updateDuration(isLive) {
23560 if (this.updateDuration_) {
23561 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23562 this.updateDuration_ = null;
23563 }
23564
23565 if (this.mediaSource.readyState !== 'open') {
23566 this.updateDuration_ = this.updateDuration.bind(this, isLive);
23567 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
23568 return;
23569 }
23570
23571 if (isLive) {
23572 var seekable = this.seekable();
23573
23574 if (!seekable.length) {
23575 return;
23576 } // Even in the case of a live playlist, the native MediaSource's duration should not
23577 // be set to Infinity (even though this would be expected for a live playlist), since
23578 // setting the native MediaSource's duration to infinity ends up with consequences to
23579 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23580 //
23581 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23582 // however, few browsers have support for setLiveSeekableRange()
23583 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23584 //
23585 // Until a time when the duration of the media source can be set to infinity, and a
23586 // seekable range specified across browsers, the duration should be greater than or
23587 // equal to the last possible seekable value.
23588 // MediaSource duration starts as NaN
23589 // It is possible (and probable) that this case will never be reached for many
23590 // sources, since the MediaSource reports duration as the highest value without
23591 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
23592 // we buffered times 0 to 100 with real times of 100 to 200, even though current
23593 // time will be between 0 and 100, the native media source may report the duration
23594 // as 200. However, since we report duration separate from the media source (as
23595 // Infinity), and as long as the native media source duration value is greater than
23596 // our reported seekable range, seeks will work as expected. The large number as
23597 // duration for live is actually a strategy used by some players to work around the
23598 // issue of live seekable ranges cited above.
23599
23600
23601 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
23602 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
23603 }
23604
23605 return;
23606 }
23607
23608 var buffered = this.tech_.buffered();
23609 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
23610
23611 if (buffered.length > 0) {
23612 duration = Math.max(duration, buffered.end(buffered.length - 1));
23613 }
23614
23615 if (this.mediaSource.duration !== duration) {
23616 this.sourceUpdater_.setDuration(duration);
23617 }
23618 }
23619 /**
23620 * dispose of the MasterPlaylistController and everything
23621 * that it controls
23622 */
23623 ;
23624
23625 _proto.dispose = function dispose() {
23626 var _this8 = this;
23627
23628 this.trigger('dispose');
23629 this.decrypter_.terminate();
23630 this.masterPlaylistLoader_.dispose();
23631 this.mainSegmentLoader_.dispose();
23632
23633 if (this.loadOnPlay_) {
23634 this.tech_.off('play', this.loadOnPlay_);
23635 }
23636
23637 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
23638 var groups = _this8.mediaTypes_[type].groups;
23639
23640 for (var id in groups) {
23641 groups[id].forEach(function (group) {
23642 if (group.playlistLoader) {
23643 group.playlistLoader.dispose();
23644 }
23645 });
23646 }
23647 });
23648 this.audioSegmentLoader_.dispose();
23649 this.subtitleSegmentLoader_.dispose();
23650 this.sourceUpdater_.dispose();
23651 this.timelineChangeController_.dispose();
23652 this.stopABRTimer_();
23653
23654 if (this.updateDuration_) {
23655 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23656 }
23657
23658 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
23659
23660 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
23661 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
23662 this.off();
23663 }
23664 /**
23665 * return the master playlist object if we have one
23666 *
23667 * @return {Object} the master playlist object that we parsed
23668 */
23669 ;
23670
23671 _proto.master = function master() {
23672 return this.masterPlaylistLoader_.master;
23673 }
23674 /**
23675 * return the currently selected playlist
23676 *
23677 * @return {Object} the currently selected playlist object that we parsed
23678 */
23679 ;
23680
23681 _proto.media = function media() {
23682 // playlist loader will not return media if it has not been fully loaded
23683 return this.masterPlaylistLoader_.media() || this.initialMedia_;
23684 };
23685
23686 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
23687 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
23688 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
23689 // otherwise check on the segment loader.
23690
23691 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
23692
23693 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
23694 return false;
23695 }
23696
23697 return true;
23698 };
23699
23700 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
23701 var _this9 = this;
23702
23703 var media = {
23704 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
23705 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
23706 }; // set "main" media equal to video
23707
23708 media.video = media.main;
23709 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
23710 var codecs = {};
23711 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
23712
23713 if (media.main.hasVideo) {
23714 codecs.video = playlistCodecs.video || media.main.videoCodec || DEFAULT_VIDEO_CODEC;
23715 }
23716
23717 if (media.main.isMuxed) {
23718 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || DEFAULT_AUDIO_CODEC);
23719 }
23720
23721 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
23722 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
23723
23724 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
23725 } // no codecs, no playback.
23726
23727
23728 if (!codecs.audio && !codecs.video) {
23729 this.blacklistCurrentPlaylist({
23730 playlist: this.media(),
23731 message: 'Could not determine codecs for playlist.',
23732 blacklistDuration: Infinity
23733 });
23734 return;
23735 } // fmp4 relies on browser support, while ts relies on muxer support
23736
23737
23738 var supportFunction = function supportFunction(isFmp4, codec) {
23739 return isFmp4 ? browserSupportsCodec(codec) : muxerSupportsCodec(codec);
23740 };
23741
23742 var unsupportedCodecs = {};
23743 var unsupportedAudio;
23744 ['video', 'audio'].forEach(function (type) {
23745 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
23746 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
23747 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
23748 unsupportedCodecs[supporter].push(codecs[type]);
23749
23750 if (type === 'audio') {
23751 unsupportedAudio = supporter;
23752 }
23753 }
23754 });
23755
23756 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
23757 var audioGroup = this.media().attributes.AUDIO;
23758 this.master().playlists.forEach(function (variant) {
23759 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
23760
23761 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
23762 variant.excludeUntil = Infinity;
23763 }
23764 });
23765 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
23766 } // if we have any unsupported codecs blacklist this playlist.
23767
23768
23769 if (Object.keys(unsupportedCodecs).length) {
23770 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
23771 if (acc) {
23772 acc += ', ';
23773 }
23774
23775 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
23776 return acc;
23777 }, '') + '.';
23778 this.blacklistCurrentPlaylist({
23779 playlist: this.media(),
23780 internal: true,
23781 message: message,
23782 blacklistDuration: Infinity
23783 });
23784 return;
23785 } // check if codec switching is happening
23786
23787
23788 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
23789 var switchMessages = [];
23790 ['video', 'audio'].forEach(function (type) {
23791 var newCodec = (parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
23792 var oldCodec = (parseCodecs(codecs[type] || '')[0] || {}).type;
23793
23794 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
23795 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
23796 }
23797 });
23798
23799 if (switchMessages.length) {
23800 this.blacklistCurrentPlaylist({
23801 playlist: this.media(),
23802 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
23803 blacklistDuration: Infinity,
23804 internal: true
23805 });
23806 return;
23807 }
23808 } // TODO: when using the muxer shouldn't we just return
23809 // the codecs that the muxer outputs?
23810
23811
23812 return codecs;
23813 }
23814 /**
23815 * Create source buffers and exlude any incompatible renditions.
23816 *
23817 * @private
23818 */
23819 ;
23820
23821 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
23822 // media source is not ready yet or sourceBuffers are already
23823 // created.
23824 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
23825 return;
23826 }
23827
23828 if (!this.areMediaTypesKnown_()) {
23829 return;
23830 }
23831
23832 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
23833
23834 if (!codecs) {
23835 return;
23836 }
23837
23838 this.sourceUpdater_.createSourceBuffers(codecs);
23839 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
23840 this.excludeIncompatibleVariants_(codecString);
23841 }
23842 /**
23843 * Excludes playlists with codecs that are unsupported by the muxer and browser.
23844 */
23845 ;
23846
23847 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
23848 var _this10 = this;
23849
23850 var playlists = this.master().playlists;
23851 var ids = []; // TODO: why don't we have a property to loop through all
23852 // playlist? Why did we ever mix indexes and keys?
23853
23854 Object.keys(playlists).forEach(function (key) {
23855 var variant = playlists[key]; // check if we already processed this playlist.
23856
23857 if (ids.indexOf(variant.id) !== -1) {
23858 return;
23859 }
23860
23861 ids.push(variant.id);
23862 var codecs = codecsForPlaylist(_this10.master, variant);
23863 var unsupported = [];
23864
23865 if (codecs.audio && !muxerSupportsCodec(codecs.audio) && !browserSupportsCodec(codecs.audio)) {
23866 unsupported.push("audio codec " + codecs.audio);
23867 }
23868
23869 if (codecs.video && !muxerSupportsCodec(codecs.video) && !browserSupportsCodec(codecs.video)) {
23870 unsupported.push("video codec " + codecs.video);
23871 }
23872
23873 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
23874 unsupported.push("text codec " + codecs.text);
23875 }
23876
23877 if (unsupported.length) {
23878 variant.excludeUntil = Infinity;
23879
23880 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
23881 }
23882 });
23883 }
23884 /**
23885 * Blacklist playlists that are known to be codec or
23886 * stream-incompatible with the SourceBuffer configuration. For
23887 * instance, Media Source Extensions would cause the video element to
23888 * stall waiting for video data if you switched from a variant with
23889 * video and audio to an audio-only one.
23890 *
23891 * @param {Object} media a media playlist compatible with the current
23892 * set of SourceBuffers. Variants in the current master playlist that
23893 * do not appear to have compatible codec or stream configurations
23894 * will be excluded from the default playlist selection algorithm
23895 * indefinitely.
23896 * @private
23897 */
23898 ;
23899
23900 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
23901 var _this11 = this;
23902
23903 var ids = [];
23904 var playlists = this.master().playlists;
23905 var codecs = unwrapCodecList(parseCodecs(codecString));
23906 var codecCount_ = codecCount(codecs);
23907 var videoDetails = codecs.video && parseCodecs(codecs.video)[0] || null;
23908 var audioDetails = codecs.audio && parseCodecs(codecs.audio)[0] || null;
23909 Object.keys(playlists).forEach(function (key) {
23910 var variant = playlists[key]; // check if we already processed this playlist.
23911 // or it if it is already excluded forever.
23912
23913 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
23914 return;
23915 }
23916
23917 ids.push(variant.id);
23918 var blacklistReasons = []; // get codecs from the playlist for this variant
23919
23920 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
23921 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
23922 // variant is incompatible. Wait for mux.js to probe
23923
23924 if (!variantCodecs.audio && !variantCodecs.video) {
23925 return;
23926 } // TODO: we can support this by removing the
23927 // old media source and creating a new one, but it will take some work.
23928 // The number of streams cannot change
23929
23930
23931 if (variantCodecCount !== codecCount_) {
23932 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
23933 } // only exclude playlists by codec change, if codecs cannot switch
23934 // during playback.
23935
23936
23937 if (!_this11.sourceUpdater_.canChangeType()) {
23938 var variantVideoDetails = variantCodecs.video && parseCodecs(variantCodecs.video)[0] || null;
23939 var variantAudioDetails = variantCodecs.audio && parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
23940
23941 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
23942 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
23943 } // the audio codec cannot change
23944
23945
23946 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
23947 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
23948 }
23949 }
23950
23951 if (blacklistReasons.length) {
23952 variant.excludeUntil = Infinity;
23953
23954 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
23955 }
23956 });
23957 };
23958
23959 _proto.updateAdCues_ = function updateAdCues_(media) {
23960 var offset = 0;
23961 var seekable = this.seekable();
23962
23963 if (seekable.length) {
23964 offset = seekable.start(0);
23965 }
23966
23967 updateAdCues(media, this.cueTagsTrack_, offset);
23968 }
23969 /**
23970 * Calculates the desired forward buffer length based on current time
23971 *
23972 * @return {number} Desired forward buffer length in seconds
23973 */
23974 ;
23975
23976 _proto.goalBufferLength = function goalBufferLength() {
23977 var currentTime = this.tech_.currentTime();
23978 var initial = Config.GOAL_BUFFER_LENGTH;
23979 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
23980 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
23981 return Math.min(initial + currentTime * rate, max);
23982 }
23983 /**
23984 * Calculates the desired buffer low water line based on current time
23985 *
23986 * @return {number} Desired buffer low water line in seconds
23987 */
23988 ;
23989
23990 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
23991 var currentTime = this.tech_.currentTime();
23992 var initial = Config.BUFFER_LOW_WATER_LINE;
23993 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
23994 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
23995 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
23996 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
23997 };
23998
23999 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
24000 return Config.BUFFER_HIGH_WATER_LINE;
24001 };
24002
24003 return MasterPlaylistController;
24004}(videojs.EventTarget);
24005
24006/**
24007 * Returns a function that acts as the Enable/disable playlist function.
24008 *
24009 * @param {PlaylistLoader} loader - The master playlist loader
24010 * @param {string} playlistID - id of the playlist
24011 * @param {Function} changePlaylistFn - A function to be called after a
24012 * playlist's enabled-state has been changed. Will NOT be called if a
24013 * playlist's enabled-state is unchanged
24014 * @param {boolean=} enable - Value to set the playlist enabled-state to
24015 * or if undefined returns the current enabled-state for the playlist
24016 * @return {Function} Function for setting/getting enabled
24017 */
24018
24019var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
24020 return function (enable) {
24021 var playlist = loader.master.playlists[playlistID];
24022 var incompatible = isIncompatible(playlist);
24023 var currentlyEnabled = isEnabled(playlist);
24024
24025 if (typeof enable === 'undefined') {
24026 return currentlyEnabled;
24027 }
24028
24029 if (enable) {
24030 delete playlist.disabled;
24031 } else {
24032 playlist.disabled = true;
24033 }
24034
24035 if (enable !== currentlyEnabled && !incompatible) {
24036 // Ensure the outside world knows about our changes
24037 changePlaylistFn();
24038
24039 if (enable) {
24040 loader.trigger('renditionenabled');
24041 } else {
24042 loader.trigger('renditiondisabled');
24043 }
24044 }
24045
24046 return enable;
24047 };
24048};
24049/**
24050 * The representation object encapsulates the publicly visible information
24051 * in a media playlist along with a setter/getter-type function (enabled)
24052 * for changing the enabled-state of a particular playlist entry
24053 *
24054 * @class Representation
24055 */
24056
24057
24058var Representation = function Representation(vhsHandler, playlist, id) {
24059 var mpc = vhsHandler.masterPlaylistController_,
24060 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
24061
24062 var changeType = smoothQualityChange ? 'smooth' : 'fast';
24063 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
24064
24065 if (playlist.attributes) {
24066 var resolution = playlist.attributes.RESOLUTION;
24067 this.width = resolution && resolution.width;
24068 this.height = resolution && resolution.height;
24069 this.bandwidth = playlist.attributes.BANDWIDTH;
24070 }
24071
24072 this.codecs = codecsForPlaylist(mpc.master(), playlist);
24073 this.playlist = playlist; // The id is simply the ordinality of the media playlist
24074 // within the master playlist
24075
24076 this.id = id; // Partially-apply the enableFunction to create a playlist-
24077 // specific variant
24078
24079 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
24080};
24081/**
24082 * A mixin function that adds the `representations` api to an instance
24083 * of the VhsHandler class
24084 *
24085 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
24086 * representation API into
24087 */
24088
24089
24090var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
24091 // Add a single API-specific function to the VhsHandler instance
24092 vhsHandler.representations = function () {
24093 var master = vhsHandler.masterPlaylistController_.master();
24094 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
24095
24096 if (!playlists) {
24097 return [];
24098 }
24099
24100 return playlists.filter(function (media) {
24101 return !isIncompatible(media);
24102 }).map(function (e, i) {
24103 return new Representation(vhsHandler, e, e.id);
24104 });
24105 };
24106};
24107
24108/**
24109 * @file playback-watcher.js
24110 *
24111 * Playback starts, and now my watch begins. It shall not end until my death. I shall
24112 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
24113 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
24114 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
24115 * my life and honor to the Playback Watch, for this Player and all the Players to come.
24116 */
24117
24118var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
24119/**
24120 * @class PlaybackWatcher
24121 */
24122
24123var PlaybackWatcher = /*#__PURE__*/function () {
24124 /**
24125 * Represents an PlaybackWatcher object.
24126 *
24127 * @class
24128 * @param {Object} options an object that includes the tech and settings
24129 */
24130 function PlaybackWatcher(options) {
24131 var _this = this;
24132
24133 this.masterPlaylistController_ = options.masterPlaylistController;
24134 this.tech_ = options.tech;
24135 this.seekable = options.seekable;
24136 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
24137 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
24138 this.media = options.media;
24139 this.consecutiveUpdates = 0;
24140 this.lastRecordedTime = null;
24141 this.timer_ = null;
24142 this.checkCurrentTimeTimeout_ = null;
24143 this.logger_ = logger('PlaybackWatcher');
24144 this.logger_('initialize');
24145
24146 var playHandler = function playHandler() {
24147 return _this.monitorCurrentTime_();
24148 };
24149
24150 var canPlayHandler = function canPlayHandler() {
24151 return _this.monitorCurrentTime_();
24152 };
24153
24154 var waitingHandler = function waitingHandler() {
24155 return _this.techWaiting_();
24156 };
24157
24158 var cancelTimerHandler = function cancelTimerHandler() {
24159 return _this.cancelTimer_();
24160 };
24161
24162 var mpc = this.masterPlaylistController_;
24163 var loaderTypes = ['main', 'subtitle', 'audio'];
24164 var loaderChecks = {};
24165 loaderTypes.forEach(function (type) {
24166 loaderChecks[type] = {
24167 reset: function reset() {
24168 return _this.resetSegmentDownloads_(type);
24169 },
24170 updateend: function updateend() {
24171 return _this.checkSegmentDownloads_(type);
24172 }
24173 };
24174 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
24175 // isn't changing we want to reset. We cannot assume that the new rendition
24176 // will also be stalled, until after new appends.
24177
24178 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
24179 // This prevents one segment playlists (single vtt or single segment content)
24180 // from being detected as stalling. As the buffer will not change in those cases, since
24181 // the buffer is the entire video duration.
24182
24183 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
24184 });
24185 /**
24186 * We check if a seek was into a gap through the following steps:
24187 * 1. We get a seeking event and we do not get a seeked event. This means that
24188 * a seek was attempted but not completed.
24189 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
24190 * removed everything from our buffer and appended a segment, and should be ready
24191 * to check for gaps.
24192 */
24193
24194 var setSeekingHandlers = function setSeekingHandlers(fn) {
24195 ['main', 'audio'].forEach(function (type) {
24196 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
24197 });
24198 };
24199
24200 this.seekingAppendCheck_ = function () {
24201 if (_this.fixesBadSeeks_()) {
24202 _this.consecutiveUpdates = 0;
24203 _this.lastRecordedTime = _this.tech_.currentTime();
24204 setSeekingHandlers('off');
24205 }
24206 };
24207
24208 this.clearSeekingAppendCheck_ = function () {
24209 return setSeekingHandlers('off');
24210 };
24211
24212 this.watchForBadSeeking_ = function () {
24213 _this.clearSeekingAppendCheck_();
24214
24215 setSeekingHandlers('on');
24216 };
24217
24218 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
24219 this.tech_.on('seeking', this.watchForBadSeeking_);
24220 this.tech_.on('waiting', waitingHandler);
24221 this.tech_.on(timerCancelEvents, cancelTimerHandler);
24222 this.tech_.on('canplay', canPlayHandler);
24223 /*
24224 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
24225 is surfaced in one of two ways:
24226 1) The `waiting` event is fired before the player has buffered content, making it impossible
24227 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
24228 we can check if playback is stalled due to a gap, and skip the gap if necessary.
24229 2) A source with a gap at the beginning of the stream is loaded programatically while the player
24230 is in a playing state. To catch this case, it's important that our one-time play listener is setup
24231 even if the player is in a playing state
24232 */
24233
24234 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
24235
24236 this.dispose = function () {
24237 _this.clearSeekingAppendCheck_();
24238
24239 _this.logger_('dispose');
24240
24241 _this.tech_.off('waiting', waitingHandler);
24242
24243 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
24244
24245 _this.tech_.off('canplay', canPlayHandler);
24246
24247 _this.tech_.off('play', playHandler);
24248
24249 _this.tech_.off('seeking', _this.watchForBadSeeking_);
24250
24251 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
24252
24253 loaderTypes.forEach(function (type) {
24254 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
24255 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
24256
24257 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
24258 });
24259
24260 if (_this.checkCurrentTimeTimeout_) {
24261 window.clearTimeout(_this.checkCurrentTimeTimeout_);
24262 }
24263
24264 _this.cancelTimer_();
24265 };
24266 }
24267 /**
24268 * Periodically check current time to see if playback stopped
24269 *
24270 * @private
24271 */
24272
24273
24274 var _proto = PlaybackWatcher.prototype;
24275
24276 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
24277 this.checkCurrentTime_();
24278
24279 if (this.checkCurrentTimeTimeout_) {
24280 window.clearTimeout(this.checkCurrentTimeTimeout_);
24281 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
24282
24283
24284 this.checkCurrentTimeTimeout_ = window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
24285 }
24286 /**
24287 * Reset stalled download stats for a specific type of loader
24288 *
24289 * @param {string} type
24290 * The segment loader type to check.
24291 *
24292 * @listens SegmentLoader#playlistupdate
24293 * @listens Tech#seeking
24294 * @listens Tech#seeked
24295 */
24296 ;
24297
24298 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
24299 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
24300
24301 if (this[type + "StalledDownloads_"] > 0) {
24302 this.logger_("resetting possible stalled download count for " + type + " loader");
24303 }
24304
24305 this[type + "StalledDownloads_"] = 0;
24306 this[type + "Buffered_"] = loader.buffered_();
24307 }
24308 /**
24309 * Checks on every segment `appendsdone` to see
24310 * if segment appends are making progress. If they are not
24311 * and we are still downloading bytes. We blacklist the playlist.
24312 *
24313 * @param {string} type
24314 * The segment loader type to check.
24315 *
24316 * @listens SegmentLoader#appendsdone
24317 */
24318 ;
24319
24320 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
24321 var mpc = this.masterPlaylistController_;
24322 var loader = mpc[type + "SegmentLoader_"];
24323 var buffered = loader.buffered_();
24324 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
24325 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
24326 // the buffered value for this loader changed
24327 // appends are working
24328
24329 if (isBufferedDifferent) {
24330 this.resetSegmentDownloads_(type);
24331 return;
24332 }
24333
24334 this[type + "StalledDownloads_"]++;
24335 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
24336 playlistId: loader.playlist_ && loader.playlist_.id,
24337 buffered: timeRangesToArray(buffered)
24338 }); // after 10 possibly stalled appends with no reset, exclude
24339
24340 if (this[type + "StalledDownloads_"] < 10) {
24341 return;
24342 }
24343
24344 this.logger_(type + " loader stalled download exclusion");
24345 this.resetSegmentDownloads_(type);
24346 this.tech_.trigger({
24347 type: 'usage',
24348 name: "vhs-" + type + "-download-exclusion"
24349 });
24350
24351 if (type === 'subtitle') {
24352 return;
24353 } // TODO: should we exclude audio tracks rather than main tracks
24354 // when type is audio?
24355
24356
24357 mpc.blacklistCurrentPlaylist({
24358 message: "Excessive " + type + " segment downloading detected."
24359 }, Infinity);
24360 }
24361 /**
24362 * The purpose of this function is to emulate the "waiting" event on
24363 * browsers that do not emit it when they are waiting for more
24364 * data to continue playback
24365 *
24366 * @private
24367 */
24368 ;
24369
24370 _proto.checkCurrentTime_ = function checkCurrentTime_() {
24371 if (this.tech_.paused() || this.tech_.seeking()) {
24372 return;
24373 }
24374
24375 var currentTime = this.tech_.currentTime();
24376 var buffered = this.tech_.buffered();
24377
24378 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
24379 // If current time is at the end of the final buffered region, then any playback
24380 // stall is most likely caused by buffering in a low bandwidth environment. The tech
24381 // should fire a `waiting` event in this scenario, but due to browser and tech
24382 // inconsistencies. Calling `techWaiting_` here allows us to simulate
24383 // responding to a native `waiting` event when the tech fails to emit one.
24384 return this.techWaiting_();
24385 }
24386
24387 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
24388 this.consecutiveUpdates++;
24389 this.waiting_();
24390 } else if (currentTime === this.lastRecordedTime) {
24391 this.consecutiveUpdates++;
24392 } else {
24393 this.consecutiveUpdates = 0;
24394 this.lastRecordedTime = currentTime;
24395 }
24396 }
24397 /**
24398 * Cancels any pending timers and resets the 'timeupdate' mechanism
24399 * designed to detect that we are stalled
24400 *
24401 * @private
24402 */
24403 ;
24404
24405 _proto.cancelTimer_ = function cancelTimer_() {
24406 this.consecutiveUpdates = 0;
24407
24408 if (this.timer_) {
24409 this.logger_('cancelTimer_');
24410 clearTimeout(this.timer_);
24411 }
24412
24413 this.timer_ = null;
24414 }
24415 /**
24416 * Fixes situations where there's a bad seek
24417 *
24418 * @return {boolean} whether an action was taken to fix the seek
24419 * @private
24420 */
24421 ;
24422
24423 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
24424 var seeking = this.tech_.seeking();
24425
24426 if (!seeking) {
24427 return false;
24428 } // TODO: It's possible that these seekable checks should be moved out of this function
24429 // and into a function that runs on seekablechange. It's also possible that we only need
24430 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
24431 // seekable range.
24432
24433
24434 var seekable = this.seekable();
24435 var currentTime = this.tech_.currentTime();
24436 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
24437 var seekTo;
24438
24439 if (isAfterSeekableRange) {
24440 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
24441
24442 seekTo = seekableEnd;
24443 }
24444
24445 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24446 var seekableStart = seekable.start(0); // sync to the beginning of the live window
24447 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
24448
24449 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
24450 // happen in live with a 3 segment playlist), then don't use a time delta
24451 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
24452 }
24453
24454 if (typeof seekTo !== 'undefined') {
24455 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
24456 this.tech_.setCurrentTime(seekTo);
24457 return true;
24458 }
24459
24460 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
24461 var buffered = this.tech_.buffered();
24462 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
24463 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null; // verify that at least two segment durations have been
24464 // appended before checking for a gap.
24465
24466 var twoSegmentDurations = (this.media().targetDuration - TIME_FUDGE_FACTOR) * 2;
24467 var bufferedToCheck = [audioBuffered, videoBuffered];
24468
24469 for (var i = 0; i < bufferedToCheck.length; i++) {
24470 // skip null buffered
24471 if (!bufferedToCheck[i]) {
24472 continue;
24473 }
24474
24475 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations behind,
24476 // we haven't appended enough to call this a bad seek.
24477
24478 if (timeAhead < twoSegmentDurations) {
24479 return false;
24480 }
24481 }
24482
24483 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
24484 // to seek over the gap
24485
24486 if (nextRange.length === 0) {
24487 return false;
24488 }
24489
24490 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
24491 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
24492 this.tech_.setCurrentTime(seekTo);
24493 return true;
24494 }
24495 /**
24496 * Handler for situations when we determine the player is waiting.
24497 *
24498 * @private
24499 */
24500 ;
24501
24502 _proto.waiting_ = function waiting_() {
24503 if (this.techWaiting_()) {
24504 return;
24505 } // All tech waiting checks failed. Use last resort correction
24506
24507
24508 var currentTime = this.tech_.currentTime();
24509 var buffered = this.tech_.buffered();
24510 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
24511 // region with no indication that anything is amiss (seen in Firefox). Seeking to
24512 // currentTime is usually enough to kickstart the player. This checks that the player
24513 // is currently within a buffered region before attempting a corrective seek.
24514 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
24515 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
24516 // make sure there is ~3 seconds of forward buffer before taking any corrective action
24517 // to avoid triggering an `unknownwaiting` event when the network is slow.
24518
24519 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
24520 this.cancelTimer_();
24521 this.tech_.setCurrentTime(currentTime);
24522 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
24523
24524 this.tech_.trigger({
24525 type: 'usage',
24526 name: 'vhs-unknown-waiting'
24527 });
24528 this.tech_.trigger({
24529 type: 'usage',
24530 name: 'hls-unknown-waiting'
24531 });
24532 return;
24533 }
24534 }
24535 /**
24536 * Handler for situations when the tech fires a `waiting` event
24537 *
24538 * @return {boolean}
24539 * True if an action (or none) was needed to correct the waiting. False if no
24540 * checks passed
24541 * @private
24542 */
24543 ;
24544
24545 _proto.techWaiting_ = function techWaiting_() {
24546 var seekable = this.seekable();
24547 var currentTime = this.tech_.currentTime();
24548
24549 if (this.tech_.seeking() || this.timer_ !== null) {
24550 // Tech is seeking or already waiting on another action, no action needed
24551 return true;
24552 }
24553
24554 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24555 var livePoint = seekable.end(seekable.length - 1);
24556 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
24557 this.cancelTimer_();
24558 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
24559
24560 this.tech_.trigger({
24561 type: 'usage',
24562 name: 'vhs-live-resync'
24563 });
24564 this.tech_.trigger({
24565 type: 'usage',
24566 name: 'hls-live-resync'
24567 });
24568 return true;
24569 }
24570
24571 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
24572 var buffered = this.tech_.buffered();
24573 var videoUnderflow = this.videoUnderflow_({
24574 audioBuffered: sourceUpdater.audioBuffered(),
24575 videoBuffered: sourceUpdater.videoBuffered(),
24576 currentTime: currentTime
24577 });
24578
24579 if (videoUnderflow) {
24580 // Even though the video underflowed and was stuck in a gap, the audio overplayed
24581 // the gap, leading currentTime into a buffered range. Seeking to currentTime
24582 // allows the video to catch up to the audio position without losing any audio
24583 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
24584 this.cancelTimer_();
24585 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
24586
24587 this.tech_.trigger({
24588 type: 'usage',
24589 name: 'vhs-video-underflow'
24590 });
24591 this.tech_.trigger({
24592 type: 'usage',
24593 name: 'hls-video-underflow'
24594 });
24595 return true;
24596 }
24597
24598 var nextRange = findNextRange(buffered, currentTime); // check for gap
24599
24600 if (nextRange.length > 0) {
24601 var difference = nextRange.start(0) - currentTime;
24602 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
24603 this.cancelTimer_();
24604 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
24605 return true;
24606 } // All checks failed. Returning false to indicate failure to correct waiting
24607
24608
24609 return false;
24610 };
24611
24612 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
24613 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
24614 allowSeeksWithinUnsafeLiveWindow = false;
24615 }
24616
24617 if (!seekable.length) {
24618 // we can't make a solid case if there's no seekable, default to false
24619 return false;
24620 }
24621
24622 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
24623 var isLive = !playlist.endList;
24624
24625 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
24626 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
24627 }
24628
24629 if (currentTime > allowedEnd) {
24630 return true;
24631 }
24632
24633 return false;
24634 };
24635
24636 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
24637 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
24638 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
24639 return true;
24640 }
24641
24642 return false;
24643 };
24644
24645 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
24646 var videoBuffered = _ref.videoBuffered,
24647 audioBuffered = _ref.audioBuffered,
24648 currentTime = _ref.currentTime;
24649
24650 // audio only content will not have video underflow :)
24651 if (!videoBuffered) {
24652 return;
24653 }
24654
24655 var gap; // find a gap in demuxed content.
24656
24657 if (videoBuffered.length && audioBuffered.length) {
24658 // in Chrome audio will continue to play for ~3s when we run out of video
24659 // so we have to check that the video buffer did have some buffer in the
24660 // past.
24661 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
24662 var videoRange = findRange(videoBuffered, currentTime);
24663 var audioRange = findRange(audioBuffered, currentTime);
24664
24665 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
24666 gap = {
24667 start: lastVideoRange.end(0),
24668 end: audioRange.end(0)
24669 };
24670 } // find a gap in muxed content.
24671
24672 } else {
24673 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
24674 // stuck in a gap due to video underflow.
24675
24676 if (!nextRange.length) {
24677 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
24678 }
24679 }
24680
24681 if (gap) {
24682 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
24683 return true;
24684 }
24685
24686 return false;
24687 }
24688 /**
24689 * Timer callback. If playback still has not proceeded, then we seek
24690 * to the start of the next buffered region.
24691 *
24692 * @private
24693 */
24694 ;
24695
24696 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
24697 var buffered = this.tech_.buffered();
24698 var currentTime = this.tech_.currentTime();
24699 var nextRange = findNextRange(buffered, currentTime);
24700 this.cancelTimer_();
24701
24702 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
24703 return;
24704 }
24705
24706 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
24707
24708 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
24709 this.tech_.trigger({
24710 type: 'usage',
24711 name: 'vhs-gap-skip'
24712 });
24713 this.tech_.trigger({
24714 type: 'usage',
24715 name: 'hls-gap-skip'
24716 });
24717 };
24718
24719 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
24720 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
24721 // playing for ~3 seconds after the video gap starts. This is done to account for
24722 // video buffer underflow/underrun (note that this is not done when there is audio
24723 // buffer underflow/underrun -- in that case the video will stop as soon as it
24724 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
24725 // video stalls). The player's time will reflect the playthrough of audio, so the
24726 // time will appear as if we are in a buffered region, even if we are stuck in a
24727 // "gap."
24728 //
24729 // Example:
24730 // video buffer: 0 => 10.1, 10.2 => 20
24731 // audio buffer: 0 => 20
24732 // overall buffer: 0 => 10.1, 10.2 => 20
24733 // current time: 13
24734 //
24735 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
24736 // however, the audio continued playing until it reached ~3 seconds past the gap
24737 // (13 seconds), at which point it stops as well. Since current time is past the
24738 // gap, findNextRange will return no ranges.
24739 //
24740 // To check for this issue, we see if there is a gap that starts somewhere within
24741 // a 3 second range (3 seconds +/- 1 second) back from our current time.
24742 var gaps = findGaps(buffered);
24743
24744 for (var i = 0; i < gaps.length; i++) {
24745 var start = gaps.start(i);
24746 var end = gaps.end(i); // gap is starts no more than 4 seconds back
24747
24748 if (currentTime - start < 4 && currentTime - start > 2) {
24749 return {
24750 start: start,
24751 end: end
24752 };
24753 }
24754 }
24755
24756 return null;
24757 };
24758
24759 return PlaybackWatcher;
24760}();
24761
24762var defaultOptions = {
24763 errorInterval: 30,
24764 getSource: function getSource(next) {
24765 var tech = this.tech({
24766 IWillNotUseThisInPlugins: true
24767 });
24768 var sourceObj = tech.currentSource_ || this.currentSource();
24769 return next(sourceObj);
24770 }
24771};
24772/**
24773 * Main entry point for the plugin
24774 *
24775 * @param {Player} player a reference to a videojs Player instance
24776 * @param {Object} [options] an object with plugin options
24777 * @private
24778 */
24779
24780var initPlugin = function initPlugin(player, options) {
24781 var lastCalled = 0;
24782 var seekTo = 0;
24783 var localOptions = videojs.mergeOptions(defaultOptions, options);
24784 player.ready(function () {
24785 player.trigger({
24786 type: 'usage',
24787 name: 'vhs-error-reload-initialized'
24788 });
24789 player.trigger({
24790 type: 'usage',
24791 name: 'hls-error-reload-initialized'
24792 });
24793 });
24794 /**
24795 * Player modifications to perform that must wait until `loadedmetadata`
24796 * has been triggered
24797 *
24798 * @private
24799 */
24800
24801 var loadedMetadataHandler = function loadedMetadataHandler() {
24802 if (seekTo) {
24803 player.currentTime(seekTo);
24804 }
24805 };
24806 /**
24807 * Set the source on the player element, play, and seek if necessary
24808 *
24809 * @param {Object} sourceObj An object specifying the source url and mime-type to play
24810 * @private
24811 */
24812
24813
24814 var setSource = function setSource(sourceObj) {
24815 if (sourceObj === null || sourceObj === undefined) {
24816 return;
24817 }
24818
24819 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
24820 player.one('loadedmetadata', loadedMetadataHandler);
24821 player.src(sourceObj);
24822 player.trigger({
24823 type: 'usage',
24824 name: 'vhs-error-reload'
24825 });
24826 player.trigger({
24827 type: 'usage',
24828 name: 'hls-error-reload'
24829 });
24830 player.play();
24831 };
24832 /**
24833 * Attempt to get a source from either the built-in getSource function
24834 * or a custom function provided via the options
24835 *
24836 * @private
24837 */
24838
24839
24840 var errorHandler = function errorHandler() {
24841 // Do not attempt to reload the source if a source-reload occurred before
24842 // 'errorInterval' time has elapsed since the last source-reload
24843 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
24844 player.trigger({
24845 type: 'usage',
24846 name: 'vhs-error-reload-canceled'
24847 });
24848 player.trigger({
24849 type: 'usage',
24850 name: 'hls-error-reload-canceled'
24851 });
24852 return;
24853 }
24854
24855 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
24856 videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
24857 return;
24858 }
24859
24860 lastCalled = Date.now();
24861 return localOptions.getSource.call(player, setSource);
24862 };
24863 /**
24864 * Unbind any event handlers that were bound by the plugin
24865 *
24866 * @private
24867 */
24868
24869
24870 var cleanupEvents = function cleanupEvents() {
24871 player.off('loadedmetadata', loadedMetadataHandler);
24872 player.off('error', errorHandler);
24873 player.off('dispose', cleanupEvents);
24874 };
24875 /**
24876 * Cleanup before re-initializing the plugin
24877 *
24878 * @param {Object} [newOptions] an object with plugin options
24879 * @private
24880 */
24881
24882
24883 var reinitPlugin = function reinitPlugin(newOptions) {
24884 cleanupEvents();
24885 initPlugin(player, newOptions);
24886 };
24887
24888 player.on('error', errorHandler);
24889 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
24890 // initializing the plugin
24891
24892 player.reloadSourceOnError = reinitPlugin;
24893};
24894/**
24895 * Reload the source when an error is detected as long as there
24896 * wasn't an error previously within the last 30 seconds
24897 *
24898 * @param {Object} [options] an object with plugin options
24899 */
24900
24901
24902var reloadSourceOnError = function reloadSourceOnError(options) {
24903 initPlugin(this, options);
24904};
24905
24906var version$4 = "2.11.1";
24907
24908var version$3 = "5.14.1";
24909
24910var version$2 = "0.19.2";
24911
24912var version$1 = "4.7.0";
24913
24914var version = "3.1.2";
24915
24916var Vhs = {
24917 PlaylistLoader: PlaylistLoader,
24918 Playlist: Playlist,
24919 utils: utils,
24920 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
24921 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
24922 lastBandwidthSelector: lastBandwidthSelector,
24923 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
24924 comparePlaylistBandwidth: comparePlaylistBandwidth,
24925 comparePlaylistResolution: comparePlaylistResolution,
24926 xhr: xhrFactory()
24927}; // Define getter/setters for config properties
24928
24929Object.keys(Config).forEach(function (prop) {
24930 Object.defineProperty(Vhs, prop, {
24931 get: function get() {
24932 videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
24933 return Config[prop];
24934 },
24935 set: function set(value) {
24936 videojs.log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
24937
24938 if (typeof value !== 'number' || value < 0) {
24939 videojs.log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
24940 return;
24941 }
24942
24943 Config[prop] = value;
24944 }
24945 });
24946});
24947var LOCAL_STORAGE_KEY = 'videojs-vhs';
24948/**
24949 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
24950 *
24951 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
24952 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
24953 * @function handleVhsMediaChange
24954 */
24955
24956var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
24957 var newPlaylist = playlistLoader.media();
24958 var selectedIndex = -1;
24959
24960 for (var i = 0; i < qualityLevels.length; i++) {
24961 if (qualityLevels[i].id === newPlaylist.id) {
24962 selectedIndex = i;
24963 break;
24964 }
24965 }
24966
24967 qualityLevels.selectedIndex_ = selectedIndex;
24968 qualityLevels.trigger({
24969 selectedIndex: selectedIndex,
24970 type: 'change'
24971 });
24972};
24973/**
24974 * Adds quality levels to list once playlist metadata is available
24975 *
24976 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
24977 * @param {Object} vhs Vhs object to listen to for media events.
24978 * @function handleVhsLoadedMetadata
24979 */
24980
24981
24982var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
24983 vhs.representations().forEach(function (rep) {
24984 qualityLevels.addQualityLevel(rep);
24985 });
24986 handleVhsMediaChange(qualityLevels, vhs.playlists);
24987}; // HLS is a source handler, not a tech. Make sure attempts to use it
24988// as one do not cause exceptions.
24989
24990
24991Vhs.canPlaySource = function () {
24992 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
24993};
24994
24995var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
24996 if (!keySystemOptions) {
24997 return keySystemOptions;
24998 }
24999
25000 var codecs = {};
25001
25002 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
25003 codecs = unwrapCodecList(parseCodecs(mainPlaylist.attributes.CODECS));
25004 }
25005
25006 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
25007 codecs.audio = audioPlaylist.attributes.CODECS;
25008 }
25009
25010 var videoContentType = getMimeForCodec(codecs.video);
25011 var audioContentType = getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
25012
25013 var keySystemContentTypes = {};
25014
25015 for (var keySystem in keySystemOptions) {
25016 keySystemContentTypes[keySystem] = {};
25017
25018 if (audioContentType) {
25019 keySystemContentTypes[keySystem].audioContentType = audioContentType;
25020 }
25021
25022 if (videoContentType) {
25023 keySystemContentTypes[keySystem].videoContentType = videoContentType;
25024 } // Default to using the video playlist's PSSH even though they may be different, as
25025 // videojs-contrib-eme will only accept one in the options.
25026 //
25027 // This shouldn't be an issue for most cases as early intialization will handle all
25028 // unique PSSH values, and if they aren't, then encrypted events should have the
25029 // specific information needed for the unique license.
25030
25031
25032 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
25033 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
25034 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
25035 // so we need to prevent overwriting the URL entirely
25036
25037
25038 if (typeof keySystemOptions[keySystem] === 'string') {
25039 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
25040 }
25041 }
25042
25043 return videojs.mergeOptions(keySystemOptions, keySystemContentTypes);
25044};
25045/**
25046 * @typedef {Object} KeySystems
25047 *
25048 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
25049 * Note: not all options are listed here.
25050 *
25051 * @property {Uint8Array} [pssh]
25052 * Protection System Specific Header
25053 */
25054
25055/**
25056 * Goes through all the playlists and collects an array of KeySystems options objects
25057 * containing each playlist's keySystems and their pssh values, if available.
25058 *
25059 * @param {Object[]} playlists
25060 * The playlists to look through
25061 * @param {string[]} keySystems
25062 * The keySystems to collect pssh values for
25063 *
25064 * @return {KeySystems[]}
25065 * An array of KeySystems objects containing available key systems and their
25066 * pssh values
25067 */
25068
25069
25070var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
25071 return playlists.reduce(function (keySystemsArr, playlist) {
25072 if (!playlist.contentProtection) {
25073 return keySystemsArr;
25074 }
25075
25076 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
25077 var keySystemOptions = playlist.contentProtection[keySystem];
25078
25079 if (keySystemOptions && keySystemOptions.pssh) {
25080 keySystemsObj[keySystem] = {
25081 pssh: keySystemOptions.pssh
25082 };
25083 }
25084
25085 return keySystemsObj;
25086 }, {});
25087
25088 if (Object.keys(keySystemsOptions).length) {
25089 keySystemsArr.push(keySystemsOptions);
25090 }
25091
25092 return keySystemsArr;
25093 }, []);
25094};
25095/**
25096 * Returns a promise that waits for the
25097 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
25098 *
25099 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
25100 * browsers.
25101 *
25102 * As per the above ticket, this is particularly important for Chrome, where, if
25103 * unencrypted content is appended before encrypted content and the key session has not
25104 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
25105 * during playback.
25106 *
25107 * @param {Object} player
25108 * The player instance
25109 * @param {Object[]} sourceKeySystems
25110 * The key systems options from the player source
25111 * @param {Object} [audioMedia]
25112 * The active audio media playlist (optional)
25113 * @param {Object[]} mainPlaylists
25114 * The playlists found on the master playlist object
25115 *
25116 * @return {Object}
25117 * Promise that resolves when the key session has been created
25118 */
25119
25120
25121var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
25122 var player = _ref.player,
25123 sourceKeySystems = _ref.sourceKeySystems,
25124 audioMedia = _ref.audioMedia,
25125 mainPlaylists = _ref.mainPlaylists;
25126
25127 if (!player.eme.initializeMediaKeys) {
25128 return Promise.resolve();
25129 } // TODO should all audio PSSH values be initialized for DRM?
25130 //
25131 // All unique video rendition pssh values are initialized for DRM, but here only
25132 // the initial audio playlist license is initialized. In theory, an encrypted
25133 // event should be fired if the user switches to an alternative audio playlist
25134 // where a license is required, but this case hasn't yet been tested. In addition, there
25135 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
25136 // languages).
25137
25138
25139 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
25140 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
25141 var initializationFinishedPromises = [];
25142 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
25143 // only place where it should not be deduped is for ms-prefixed APIs, but the early
25144 // return for IE11 above, and the existence of modern EME APIs in addition to
25145 // ms-prefixed APIs on Edge should prevent this from being a concern.
25146 // initializeMediaKeys also won't use the webkit-prefixed APIs.
25147
25148 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
25149 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
25150 player.tech_.one('keysessioncreated', resolve);
25151 }));
25152 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
25153 player.eme.initializeMediaKeys({
25154 keySystems: keySystemsOptions
25155 }, function (err) {
25156 if (err) {
25157 reject(err);
25158 return;
25159 }
25160
25161 resolve();
25162 });
25163 }));
25164 }); // The reasons Promise.race is chosen over Promise.any:
25165 //
25166 // * Promise.any is only available in Safari 14+.
25167 // * None of these promises are expected to reject. If they do reject, it might be
25168 // better here for the race to surface the rejection, rather than mask it by using
25169 // Promise.any.
25170
25171 return Promise.race([// If a session was previously created, these will all finish resolving without
25172 // creating a new session, otherwise it will take until the end of all license
25173 // requests, which is why the key session check is used (to make setup much faster).
25174 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
25175 Promise.race(keySessionCreatedPromises)]);
25176};
25177/**
25178 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
25179 * there are keySystems on the source, sets up source options to prepare the source for
25180 * eme.
25181 *
25182 * @param {Object} player
25183 * The player instance
25184 * @param {Object[]} sourceKeySystems
25185 * The key systems options from the player source
25186 * @param {Object} media
25187 * The active media playlist
25188 * @param {Object} [audioMedia]
25189 * The active audio media playlist (optional)
25190 *
25191 * @return {boolean}
25192 * Whether or not options were configured and EME is available
25193 */
25194
25195var setupEmeOptions = function setupEmeOptions(_ref2) {
25196 var player = _ref2.player,
25197 sourceKeySystems = _ref2.sourceKeySystems,
25198 media = _ref2.media,
25199 audioMedia = _ref2.audioMedia;
25200 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
25201
25202 if (!sourceOptions) {
25203 return false;
25204 }
25205
25206 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
25207 // do nothing.
25208
25209 if (sourceOptions && !player.eme) {
25210 videojs.log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
25211 return false;
25212 }
25213
25214 return true;
25215};
25216
25217var getVhsLocalStorage = function getVhsLocalStorage() {
25218 if (!window.localStorage) {
25219 return null;
25220 }
25221
25222 var storedObject = window.localStorage.getItem(LOCAL_STORAGE_KEY);
25223
25224 if (!storedObject) {
25225 return null;
25226 }
25227
25228 try {
25229 return JSON.parse(storedObject);
25230 } catch (e) {
25231 // someone may have tampered with the value
25232 return null;
25233 }
25234};
25235
25236var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
25237 if (!window.localStorage) {
25238 return false;
25239 }
25240
25241 var objectToStore = getVhsLocalStorage();
25242 objectToStore = objectToStore ? videojs.mergeOptions(objectToStore, options) : options;
25243
25244 try {
25245 window.localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
25246 } catch (e) {
25247 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
25248 // storage is set to 0).
25249 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
25250 // No need to perform any operation.
25251 return false;
25252 }
25253
25254 return objectToStore;
25255};
25256/**
25257 * Parses VHS-supported media types from data URIs. See
25258 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
25259 * for information on data URIs.
25260 *
25261 * @param {string} dataUri
25262 * The data URI
25263 *
25264 * @return {string|Object}
25265 * The parsed object/string, or the original string if no supported media type
25266 * was found
25267 */
25268
25269
25270var expandDataUri = function expandDataUri(dataUri) {
25271 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
25272 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
25273 } // no known case for this data URI, return the string as-is
25274
25275
25276 return dataUri;
25277};
25278/**
25279 * Whether the browser has built-in HLS support.
25280 */
25281
25282
25283Vhs.supportsNativeHls = function () {
25284 if (!document || !document.createElement) {
25285 return false;
25286 }
25287
25288 var video = document.createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
25289
25290 if (!videojs.getTech('Html5').isSupported()) {
25291 return false;
25292 } // HLS manifests can go by many mime-types
25293
25294
25295 var canPlay = [// Apple santioned
25296 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
25297 'audio/mpegurl', // Very common
25298 'audio/x-mpegurl', // Very common
25299 'application/x-mpegurl', // Included for completeness
25300 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
25301 return canPlay.some(function (canItPlay) {
25302 return /maybe|probably/i.test(video.canPlayType(canItPlay));
25303 });
25304}();
25305
25306Vhs.supportsNativeDash = function () {
25307 if (!document || !document.createElement || !videojs.getTech('Html5').isSupported()) {
25308 return false;
25309 }
25310
25311 return /maybe|probably/i.test(document.createElement('video').canPlayType('application/dash+xml'));
25312}();
25313
25314Vhs.supportsTypeNatively = function (type) {
25315 if (type === 'hls') {
25316 return Vhs.supportsNativeHls;
25317 }
25318
25319 if (type === 'dash') {
25320 return Vhs.supportsNativeDash;
25321 }
25322
25323 return false;
25324};
25325/**
25326 * HLS is a source handler, not a tech. Make sure attempts to use it
25327 * as one do not cause exceptions.
25328 */
25329
25330
25331Vhs.isSupported = function () {
25332 return videojs.log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25333};
25334
25335var Component = videojs.getComponent('Component');
25336/**
25337 * The Vhs Handler object, where we orchestrate all of the parts
25338 * of HLS to interact with video.js
25339 *
25340 * @class VhsHandler
25341 * @extends videojs.Component
25342 * @param {Object} source the soruce object
25343 * @param {Tech} tech the parent tech object
25344 * @param {Object} options optional and required options
25345 */
25346
25347var VhsHandler = /*#__PURE__*/function (_Component) {
25348 _inheritsLoose(VhsHandler, _Component);
25349
25350 function VhsHandler(source, tech, options) {
25351 var _this;
25352
25353 _this = _Component.call(this, tech, videojs.mergeOptions(options.hls, options.vhs)) || this;
25354
25355 if (options.hls && Object.keys(options.hls).length) {
25356 videojs.log.warn('Using hls options is deprecated. Use vhs instead.');
25357 } // if a tech level `initialBandwidth` option was passed
25358 // use that over the VHS level `bandwidth` option
25359
25360
25361 if (typeof options.initialBandwidth === 'number') {
25362 _this.options_.bandwidth = options.initialBandwidth;
25363 }
25364
25365 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
25366 // backwards-compatibility
25367
25368 if (tech.options_ && tech.options_.playerId) {
25369 var _player = videojs(tech.options_.playerId);
25370
25371 if (!_player.hasOwnProperty('hls')) {
25372 Object.defineProperty(_player, 'hls', {
25373 get: function get() {
25374 videojs.log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
25375 tech.trigger({
25376 type: 'usage',
25377 name: 'hls-player-access'
25378 });
25379 return _assertThisInitialized(_this);
25380 },
25381 configurable: true
25382 });
25383 }
25384
25385 if (!_player.hasOwnProperty('vhs')) {
25386 Object.defineProperty(_player, 'vhs', {
25387 get: function get() {
25388 videojs.log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
25389 tech.trigger({
25390 type: 'usage',
25391 name: 'vhs-player-access'
25392 });
25393 return _assertThisInitialized(_this);
25394 },
25395 configurable: true
25396 });
25397 }
25398
25399 if (!_player.hasOwnProperty('dash')) {
25400 Object.defineProperty(_player, 'dash', {
25401 get: function get() {
25402 videojs.log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
25403 return _assertThisInitialized(_this);
25404 },
25405 configurable: true
25406 });
25407 }
25408
25409 _this.player_ = _player;
25410 }
25411
25412 _this.tech_ = tech;
25413 _this.source_ = source;
25414 _this.stats = {};
25415 _this.ignoreNextSeekingEvent_ = false;
25416
25417 _this.setOptions_();
25418
25419 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
25420 tech.overrideNativeAudioTracks(true);
25421 tech.overrideNativeVideoTracks(true);
25422 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
25423 // overriding native HLS only works if audio tracks have been emulated
25424 // error early if we're misconfigured
25425 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
25426 } // listen for fullscreenchange events for this player so that we
25427 // can adjust our quality selection quickly
25428
25429
25430 _this.on(document, ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
25431 var fullscreenElement = document.fullscreenElement || document.webkitFullscreenElement || document.mozFullScreenElement || document.msFullscreenElement;
25432
25433 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
25434 _this.masterPlaylistController_.fastQualityChange_();
25435 } else {
25436 // When leaving fullscreen, since the in page pixel dimensions should be smaller
25437 // than full screen, see if there should be a rendition switch down to preserve
25438 // bandwidth.
25439 _this.masterPlaylistController_.checkABR_();
25440 }
25441 });
25442
25443 _this.on(_this.tech_, 'seeking', function () {
25444 if (this.ignoreNextSeekingEvent_) {
25445 this.ignoreNextSeekingEvent_ = false;
25446 return;
25447 }
25448
25449 this.setCurrentTime(this.tech_.currentTime());
25450 });
25451
25452 _this.on(_this.tech_, 'error', function () {
25453 // verify that the error was real and we are loaded
25454 // enough to have mpc loaded.
25455 if (this.tech_.error() && this.masterPlaylistController_) {
25456 this.masterPlaylistController_.pauseLoading();
25457 }
25458 });
25459
25460 _this.on(_this.tech_, 'play', _this.play);
25461
25462 return _this;
25463 }
25464
25465 var _proto = VhsHandler.prototype;
25466
25467 _proto.setOptions_ = function setOptions_() {
25468 var _this2 = this;
25469
25470 // defaults
25471 this.options_.withCredentials = this.options_.withCredentials || false;
25472 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
25473 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
25474 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
25475 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
25476 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
25477 this.options_.customTagParsers = this.options_.customTagParsers || [];
25478 this.options_.customTagMappers = this.options_.customTagMappers || [];
25479 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
25480
25481 if (typeof this.options_.blacklistDuration !== 'number') {
25482 this.options_.blacklistDuration = 5 * 60;
25483 }
25484
25485 if (typeof this.options_.bandwidth !== 'number') {
25486 if (this.options_.useBandwidthFromLocalStorage) {
25487 var storedObject = getVhsLocalStorage();
25488
25489 if (storedObject && storedObject.bandwidth) {
25490 this.options_.bandwidth = storedObject.bandwidth;
25491 this.tech_.trigger({
25492 type: 'usage',
25493 name: 'vhs-bandwidth-from-local-storage'
25494 });
25495 this.tech_.trigger({
25496 type: 'usage',
25497 name: 'hls-bandwidth-from-local-storage'
25498 });
25499 }
25500
25501 if (storedObject && storedObject.throughput) {
25502 this.options_.throughput = storedObject.throughput;
25503 this.tech_.trigger({
25504 type: 'usage',
25505 name: 'vhs-throughput-from-local-storage'
25506 });
25507 this.tech_.trigger({
25508 type: 'usage',
25509 name: 'hls-throughput-from-local-storage'
25510 });
25511 }
25512 }
25513 } // if bandwidth was not set by options or pulled from local storage, start playlist
25514 // selection at a reasonable bandwidth
25515
25516
25517 if (typeof this.options_.bandwidth !== 'number') {
25518 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
25519 } // If the bandwidth number is unchanged from the initial setting
25520 // then this takes precedence over the enableLowInitialPlaylist option
25521
25522
25523 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
25524
25525 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
25526 if (typeof _this2.source_[option] !== 'undefined') {
25527 _this2.options_[option] = _this2.source_[option];
25528 }
25529 });
25530 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
25531 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
25532 }
25533 /**
25534 * called when player.src gets called, handle a new source
25535 *
25536 * @param {Object} src the source object to handle
25537 */
25538 ;
25539
25540 _proto.src = function src(_src, type) {
25541 var _this3 = this;
25542
25543 // do nothing if the src is falsey
25544 if (!_src) {
25545 return;
25546 }
25547
25548 this.setOptions_(); // add master playlist controller options
25549
25550 this.options_.src = expandDataUri(this.source_.src);
25551 this.options_.tech = this.tech_;
25552 this.options_.externVhs = Vhs;
25553 this.options_.sourceType = simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
25554
25555 this.options_.seekTo = function (time) {
25556 _this3.tech_.setCurrentTime(time);
25557 };
25558
25559 if (this.options_.smoothQualityChange) {
25560 videojs.log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
25561 }
25562
25563 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
25564 var playbackWatcherOptions = videojs.mergeOptions({
25565 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
25566 }, this.options_, {
25567 seekable: function seekable() {
25568 return _this3.seekable();
25569 },
25570 media: function media() {
25571 return _this3.masterPlaylistController_.media();
25572 },
25573 masterPlaylistController: this.masterPlaylistController_
25574 });
25575 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
25576 this.masterPlaylistController_.on('error', function () {
25577 var player = videojs.players[_this3.tech_.options_.playerId];
25578 var error = _this3.masterPlaylistController_.error;
25579
25580 if (typeof error === 'object' && !error.code) {
25581 error.code = 3;
25582 } else if (typeof error === 'string') {
25583 error = {
25584 message: error,
25585 code: 3
25586 };
25587 }
25588
25589 player.error(error);
25590 });
25591 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
25592 // compatibility with < v2
25593
25594 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
25595 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
25596
25597 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
25598 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
25599 // controller. Using a custom property for backwards compatibility
25600 // with < v2
25601
25602 Object.defineProperties(this, {
25603 selectPlaylist: {
25604 get: function get() {
25605 return this.masterPlaylistController_.selectPlaylist;
25606 },
25607 set: function set(selectPlaylist) {
25608 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
25609 }
25610 },
25611 throughput: {
25612 get: function get() {
25613 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
25614 },
25615 set: function set(throughput) {
25616 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
25617 // for the cumulative average
25618
25619 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
25620 }
25621 },
25622 bandwidth: {
25623 get: function get() {
25624 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
25625 },
25626 set: function set(bandwidth) {
25627 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
25628 // `count` is set to zero that current value of `rate` isn't included
25629 // in the cumulative average
25630
25631 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
25632 rate: 0,
25633 count: 0
25634 };
25635 }
25636 },
25637
25638 /**
25639 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
25640 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
25641 * the entire process after that - decryption, transmuxing, and appending - provided
25642 * by `throughput`.
25643 *
25644 * Since the two process are serial, the overall system bandwidth is given by:
25645 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
25646 */
25647 systemBandwidth: {
25648 get: function get() {
25649 var invBandwidth = 1 / (this.bandwidth || 1);
25650 var invThroughput;
25651
25652 if (this.throughput > 0) {
25653 invThroughput = 1 / this.throughput;
25654 } else {
25655 invThroughput = 0;
25656 }
25657
25658 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
25659 return systemBitrate;
25660 },
25661 set: function set() {
25662 videojs.log.error('The "systemBandwidth" property is read-only');
25663 }
25664 }
25665 });
25666
25667 if (this.options_.bandwidth) {
25668 this.bandwidth = this.options_.bandwidth;
25669 }
25670
25671 if (this.options_.throughput) {
25672 this.throughput = this.options_.throughput;
25673 }
25674
25675 Object.defineProperties(this.stats, {
25676 bandwidth: {
25677 get: function get() {
25678 return _this3.bandwidth || 0;
25679 },
25680 enumerable: true
25681 },
25682 mediaRequests: {
25683 get: function get() {
25684 return _this3.masterPlaylistController_.mediaRequests_() || 0;
25685 },
25686 enumerable: true
25687 },
25688 mediaRequestsAborted: {
25689 get: function get() {
25690 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
25691 },
25692 enumerable: true
25693 },
25694 mediaRequestsTimedout: {
25695 get: function get() {
25696 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
25697 },
25698 enumerable: true
25699 },
25700 mediaRequestsErrored: {
25701 get: function get() {
25702 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
25703 },
25704 enumerable: true
25705 },
25706 mediaTransferDuration: {
25707 get: function get() {
25708 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
25709 },
25710 enumerable: true
25711 },
25712 mediaBytesTransferred: {
25713 get: function get() {
25714 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
25715 },
25716 enumerable: true
25717 },
25718 mediaSecondsLoaded: {
25719 get: function get() {
25720 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
25721 },
25722 enumerable: true
25723 },
25724 mediaAppends: {
25725 get: function get() {
25726 return _this3.masterPlaylistController_.mediaAppends_() || 0;
25727 },
25728 enumerable: true
25729 },
25730 mainAppendsToLoadedData: {
25731 get: function get() {
25732 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
25733 },
25734 enumerable: true
25735 },
25736 audioAppendsToLoadedData: {
25737 get: function get() {
25738 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
25739 },
25740 enumerable: true
25741 },
25742 appendsToLoadedData: {
25743 get: function get() {
25744 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
25745 },
25746 enumerable: true
25747 },
25748 timeToLoadedData: {
25749 get: function get() {
25750 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
25751 },
25752 enumerable: true
25753 },
25754 buffered: {
25755 get: function get() {
25756 return timeRangesToArray(_this3.tech_.buffered());
25757 },
25758 enumerable: true
25759 },
25760 currentTime: {
25761 get: function get() {
25762 return _this3.tech_.currentTime();
25763 },
25764 enumerable: true
25765 },
25766 currentSource: {
25767 get: function get() {
25768 return _this3.tech_.currentSource_;
25769 },
25770 enumerable: true
25771 },
25772 currentTech: {
25773 get: function get() {
25774 return _this3.tech_.name_;
25775 },
25776 enumerable: true
25777 },
25778 duration: {
25779 get: function get() {
25780 return _this3.tech_.duration();
25781 },
25782 enumerable: true
25783 },
25784 master: {
25785 get: function get() {
25786 return _this3.playlists.master;
25787 },
25788 enumerable: true
25789 },
25790 playerDimensions: {
25791 get: function get() {
25792 return _this3.tech_.currentDimensions();
25793 },
25794 enumerable: true
25795 },
25796 seekable: {
25797 get: function get() {
25798 return timeRangesToArray(_this3.tech_.seekable());
25799 },
25800 enumerable: true
25801 },
25802 timestamp: {
25803 get: function get() {
25804 return Date.now();
25805 },
25806 enumerable: true
25807 },
25808 videoPlaybackQuality: {
25809 get: function get() {
25810 return _this3.tech_.getVideoPlaybackQuality();
25811 },
25812 enumerable: true
25813 }
25814 });
25815 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
25816 this.tech_.on('bandwidthupdate', function () {
25817 if (_this3.options_.useBandwidthFromLocalStorage) {
25818 updateVhsLocalStorage({
25819 bandwidth: _this3.bandwidth,
25820 throughput: Math.round(_this3.throughput)
25821 });
25822 }
25823 });
25824 this.masterPlaylistController_.on('selectedinitialmedia', function () {
25825 // Add the manual rendition mix-in to VhsHandler
25826 renditionSelectionMixin(_this3);
25827 });
25828 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
25829 _this3.setupEme_();
25830 }); // the bandwidth of the primary segment loader is our best
25831 // estimate of overall bandwidth
25832
25833 this.on(this.masterPlaylistController_, 'progress', function () {
25834 this.tech_.trigger('progress');
25835 }); // In the live case, we need to ignore the very first `seeking` event since
25836 // that will be the result of the seek-to-live behavior
25837
25838 this.on(this.masterPlaylistController_, 'firstplay', function () {
25839 this.ignoreNextSeekingEvent_ = true;
25840 });
25841 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
25842 // this can occur if someone sets the src in player.ready(), for instance
25843
25844 if (!this.tech_.el()) {
25845 return;
25846 }
25847
25848 this.mediaSourceUrl_ = window.URL.createObjectURL(this.masterPlaylistController_.mediaSource);
25849 this.tech_.src(this.mediaSourceUrl_);
25850 }
25851 /**
25852 * If necessary and EME is available, sets up EME options and waits for key session
25853 * creation.
25854 *
25855 * This function also updates the source updater so taht it can be used, as for some
25856 * browsers, EME must be configured before content is appended (if appending unencrypted
25857 * content before encrypted content).
25858 */
25859 ;
25860
25861 _proto.setupEme_ = function setupEme_() {
25862 var _this4 = this;
25863
25864 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
25865 var didSetupEmeOptions = setupEmeOptions({
25866 player: this.player_,
25867 sourceKeySystems: this.source_.keySystems,
25868 media: this.playlists.media(),
25869 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
25870 });
25871 this.player_.tech_.on('keystatuschange', function (e) {
25872 if (e.status === 'output-restricted') {
25873 _this4.masterPlaylistController_.blacklistCurrentPlaylist({
25874 playlist: _this4.masterPlaylistController_.media(),
25875 message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
25876 blacklistDuration: Infinity
25877 });
25878 }
25879 }); // In IE11 this is too early to initialize media keys, and IE11 does not support
25880 // promises.
25881
25882 if (videojs.browser.IE_VERSION === 11 || !didSetupEmeOptions) {
25883 // If EME options were not set up, we've done all we could to initialize EME.
25884 this.masterPlaylistController_.sourceUpdater_.initializedEme();
25885 return;
25886 }
25887
25888 this.logger_('waiting for EME key session creation');
25889 waitForKeySessionCreation({
25890 player: this.player_,
25891 sourceKeySystems: this.source_.keySystems,
25892 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
25893 mainPlaylists: this.playlists.master.playlists
25894 }).then(function () {
25895 _this4.logger_('created EME key session');
25896
25897 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
25898 }).catch(function (err) {
25899 _this4.logger_('error while creating EME key session', err);
25900
25901 _this4.player_.error({
25902 message: 'Failed to initialize media keys for EME',
25903 code: 3
25904 });
25905 });
25906 }
25907 /**
25908 * Initializes the quality levels and sets listeners to update them.
25909 *
25910 * @method setupQualityLevels_
25911 * @private
25912 */
25913 ;
25914
25915 _proto.setupQualityLevels_ = function setupQualityLevels_() {
25916 var _this5 = this;
25917
25918 var player = videojs.players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
25919 // or qualityLevels_ listeners have already been setup, do nothing.
25920
25921 if (!player || !player.qualityLevels || this.qualityLevels_) {
25922 return;
25923 }
25924
25925 this.qualityLevels_ = player.qualityLevels();
25926 this.masterPlaylistController_.on('selectedinitialmedia', function () {
25927 handleVhsLoadedMetadata(_this5.qualityLevels_, _this5);
25928 });
25929 this.playlists.on('mediachange', function () {
25930 handleVhsMediaChange(_this5.qualityLevels_, _this5.playlists);
25931 });
25932 }
25933 /**
25934 * return the version
25935 */
25936 ;
25937
25938 VhsHandler.version = function version$5() {
25939 return {
25940 '@videojs/http-streaming': version$4,
25941 'mux.js': version$3,
25942 'mpd-parser': version$2,
25943 'm3u8-parser': version$1,
25944 'aes-decrypter': version
25945 };
25946 }
25947 /**
25948 * return the version
25949 */
25950 ;
25951
25952 _proto.version = function version() {
25953 return this.constructor.version();
25954 };
25955
25956 _proto.canChangeType = function canChangeType() {
25957 return SourceUpdater.canChangeType();
25958 }
25959 /**
25960 * Begin playing the video.
25961 */
25962 ;
25963
25964 _proto.play = function play() {
25965 this.masterPlaylistController_.play();
25966 }
25967 /**
25968 * a wrapper around the function in MasterPlaylistController
25969 */
25970 ;
25971
25972 _proto.setCurrentTime = function setCurrentTime(currentTime) {
25973 this.masterPlaylistController_.setCurrentTime(currentTime);
25974 }
25975 /**
25976 * a wrapper around the function in MasterPlaylistController
25977 */
25978 ;
25979
25980 _proto.duration = function duration() {
25981 return this.masterPlaylistController_.duration();
25982 }
25983 /**
25984 * a wrapper around the function in MasterPlaylistController
25985 */
25986 ;
25987
25988 _proto.seekable = function seekable() {
25989 return this.masterPlaylistController_.seekable();
25990 }
25991 /**
25992 * Abort all outstanding work and cleanup.
25993 */
25994 ;
25995
25996 _proto.dispose = function dispose() {
25997 if (this.playbackWatcher_) {
25998 this.playbackWatcher_.dispose();
25999 }
26000
26001 if (this.masterPlaylistController_) {
26002 this.masterPlaylistController_.dispose();
26003 }
26004
26005 if (this.qualityLevels_) {
26006 this.qualityLevels_.dispose();
26007 }
26008
26009 if (this.player_) {
26010 delete this.player_.vhs;
26011 delete this.player_.dash;
26012 delete this.player_.hls;
26013 }
26014
26015 if (this.tech_ && this.tech_.vhs) {
26016 delete this.tech_.vhs;
26017 } // don't check this.tech_.hls as it will log a deprecated warning
26018
26019
26020 if (this.tech_) {
26021 delete this.tech_.hls;
26022 }
26023
26024 if (this.mediaSourceUrl_ && window.URL.revokeObjectURL) {
26025 window.URL.revokeObjectURL(this.mediaSourceUrl_);
26026 this.mediaSourceUrl_ = null;
26027 }
26028
26029 _Component.prototype.dispose.call(this);
26030 };
26031
26032 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
26033 return getProgramTime({
26034 playlist: this.masterPlaylistController_.media(),
26035 time: time,
26036 callback: callback
26037 });
26038 } // the player must be playing before calling this
26039 ;
26040
26041 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
26042 if (pauseAfterSeek === void 0) {
26043 pauseAfterSeek = true;
26044 }
26045
26046 if (retryCount === void 0) {
26047 retryCount = 2;
26048 }
26049
26050 return seekToProgramTime({
26051 programTime: programTime,
26052 playlist: this.masterPlaylistController_.media(),
26053 retryCount: retryCount,
26054 pauseAfterSeek: pauseAfterSeek,
26055 seekTo: this.options_.seekTo,
26056 tech: this.options_.tech,
26057 callback: callback
26058 });
26059 };
26060
26061 return VhsHandler;
26062}(Component);
26063/**
26064 * The Source Handler object, which informs video.js what additional
26065 * MIME types are supported and sets up playback. It is registered
26066 * automatically to the appropriate tech based on the capabilities of
26067 * the browser it is running in. It is not necessary to use or modify
26068 * this object in normal usage.
26069 */
26070
26071
26072var VhsSourceHandler = {
26073 name: 'videojs-http-streaming',
26074 VERSION: version$4,
26075 canHandleSource: function canHandleSource(srcObj, options) {
26076 if (options === void 0) {
26077 options = {};
26078 }
26079
26080 var localOptions = videojs.mergeOptions(videojs.options, options);
26081 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
26082 },
26083 handleSource: function handleSource(source, tech, options) {
26084 if (options === void 0) {
26085 options = {};
26086 }
26087
26088 var localOptions = videojs.mergeOptions(videojs.options, options);
26089 tech.vhs = new VhsHandler(source, tech, localOptions);
26090
26091 if (!videojs.hasOwnProperty('hls')) {
26092 Object.defineProperty(tech, 'hls', {
26093 get: function get() {
26094 videojs.log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
26095 return tech.vhs;
26096 },
26097 configurable: true
26098 });
26099 }
26100
26101 tech.vhs.xhr = xhrFactory();
26102 tech.vhs.src(source.src, source.type);
26103 return tech.vhs;
26104 },
26105 canPlayType: function canPlayType(type, options) {
26106 if (options === void 0) {
26107 options = {};
26108 }
26109
26110 var _videojs$mergeOptions = videojs.mergeOptions(videojs.options, options),
26111 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs.overrideNative,
26112 overrideNative = _videojs$mergeOptions2 === void 0 ? !videojs.browser.IS_ANY_SAFARI : _videojs$mergeOptions2;
26113
26114 var supportedType = simpleTypeFromSourceType(type);
26115 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || overrideNative);
26116 return canUseMsePlayback ? 'maybe' : '';
26117 }
26118};
26119/**
26120 * Check to see if the native MediaSource object exists and supports
26121 * an MP4 container with both H.264 video and AAC-LC audio.
26122 *
26123 * @return {boolean} if native media sources are supported
26124 */
26125
26126var supportsNativeMediaSources = function supportsNativeMediaSources() {
26127 return browserSupportsCodec('avc1.4d400d,mp4a.40.2');
26128}; // register source handlers with the appropriate techs
26129
26130
26131if (supportsNativeMediaSources()) {
26132 videojs.getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
26133}
26134
26135videojs.VhsHandler = VhsHandler;
26136Object.defineProperty(videojs, 'HlsHandler', {
26137 get: function get() {
26138 videojs.log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
26139 return VhsHandler;
26140 },
26141 configurable: true
26142});
26143videojs.VhsSourceHandler = VhsSourceHandler;
26144Object.defineProperty(videojs, 'HlsSourceHandler', {
26145 get: function get() {
26146 videojs.log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
26147 return VhsSourceHandler;
26148 },
26149 configurable: true
26150});
26151videojs.Vhs = Vhs;
26152Object.defineProperty(videojs, 'Hls', {
26153 get: function get() {
26154 videojs.log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
26155 return Vhs;
26156 },
26157 configurable: true
26158});
26159
26160if (!videojs.use) {
26161 videojs.registerComponent('Hls', Vhs);
26162 videojs.registerComponent('Vhs', Vhs);
26163}
26164
26165videojs.options.vhs = videojs.options.vhs || {};
26166videojs.options.hls = videojs.options.hls || {};
26167
26168if (!videojs.getPlugin || !videojs.getPlugin('reloadSourceOnError')) {
26169 var registerPlugin = videojs.registerPlugin || videojs.plugin;
26170 registerPlugin('reloadSourceOnError', reloadSourceOnError);
26171}
26172
26173export { LOCAL_STORAGE_KEY, Vhs, VhsHandler, VhsSourceHandler, emeKeySystems, expandDataUri, getAllPsshKeySystemsOptions, setupEmeOptions, waitForKeySessionCreation };