UNPKG

881 kBJavaScriptView Raw
1/*! @name @videojs/http-streaming @version 2.11.1 @license Apache-2.0 */
2'use strict';
3
4Object.defineProperty(exports, '__esModule', { value: true });
5
6var _assertThisInitialized = require('@babel/runtime/helpers/assertThisInitialized');
7var _inheritsLoose = require('@babel/runtime/helpers/inheritsLoose');
8var document = require('global/document');
9var window = require('global/window');
10var _resolveUrl = require('@videojs/vhs-utils/cjs/resolve-url.js');
11var videojs = require('video.js');
12var _extends = require('@babel/runtime/helpers/extends');
13var m3u8Parser = require('m3u8-parser');
14var codecs_js = require('@videojs/vhs-utils/cjs/codecs.js');
15var mediaTypes_js = require('@videojs/vhs-utils/cjs/media-types.js');
16var mpdParser = require('mpd-parser');
17var parseSidx = require('mux.js/lib/tools/parse-sidx');
18var id3Helpers = require('@videojs/vhs-utils/cjs/id3-helpers');
19var containers = require('@videojs/vhs-utils/cjs/containers');
20var byteHelpers = require('@videojs/vhs-utils/cjs/byte-helpers');
21var clock = require('mux.js/lib/utils/clock');
22
23function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
24
25var _assertThisInitialized__default = /*#__PURE__*/_interopDefaultLegacy(_assertThisInitialized);
26var _inheritsLoose__default = /*#__PURE__*/_interopDefaultLegacy(_inheritsLoose);
27var document__default = /*#__PURE__*/_interopDefaultLegacy(document);
28var window__default = /*#__PURE__*/_interopDefaultLegacy(window);
29var _resolveUrl__default = /*#__PURE__*/_interopDefaultLegacy(_resolveUrl);
30var videojs__default = /*#__PURE__*/_interopDefaultLegacy(videojs);
31var _extends__default = /*#__PURE__*/_interopDefaultLegacy(_extends);
32var parseSidx__default = /*#__PURE__*/_interopDefaultLegacy(parseSidx);
33
34/**
35 * @file resolve-url.js - Handling how URLs are resolved and manipulated
36 */
37var resolveUrl = _resolveUrl__default["default"];
38/**
39 * Checks whether xhr request was redirected and returns correct url depending
40 * on `handleManifestRedirects` option
41 *
42 * @api private
43 *
44 * @param {string} url - an url being requested
45 * @param {XMLHttpRequest} req - xhr request result
46 *
47 * @return {string}
48 */
49
50var resolveManifestRedirect = function resolveManifestRedirect(handleManifestRedirect, url, req) {
51 // To understand how the responseURL below is set and generated:
52 // - https://fetch.spec.whatwg.org/#concept-response-url
53 // - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
54 if (handleManifestRedirect && req && req.responseURL && url !== req.responseURL) {
55 return req.responseURL;
56 }
57
58 return url;
59};
60
61var logger = function logger(source) {
62 if (videojs__default["default"].log.debug) {
63 return videojs__default["default"].log.debug.bind(videojs__default["default"], 'VHS:', source + " >");
64 }
65
66 return function () {};
67};
68
69/**
70 * ranges
71 *
72 * Utilities for working with TimeRanges.
73 *
74 */
75
76var TIME_FUDGE_FACTOR = 1 / 30; // Comparisons between time values such as current time and the end of the buffered range
77// can be misleading because of precision differences or when the current media has poorly
78// aligned audio and video, which can cause values to be slightly off from what you would
79// expect. This value is what we consider to be safe to use in such comparisons to account
80// for these scenarios.
81
82var SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
83
84var filterRanges = function filterRanges(timeRanges, predicate) {
85 var results = [];
86 var i;
87
88 if (timeRanges && timeRanges.length) {
89 // Search for ranges that match the predicate
90 for (i = 0; i < timeRanges.length; i++) {
91 if (predicate(timeRanges.start(i), timeRanges.end(i))) {
92 results.push([timeRanges.start(i), timeRanges.end(i)]);
93 }
94 }
95 }
96
97 return videojs__default["default"].createTimeRanges(results);
98};
99/**
100 * Attempts to find the buffered TimeRange that contains the specified
101 * time.
102 *
103 * @param {TimeRanges} buffered - the TimeRanges object to query
104 * @param {number} time - the time to filter on.
105 * @return {TimeRanges} a new TimeRanges object
106 */
107
108
109var findRange = function findRange(buffered, time) {
110 return filterRanges(buffered, function (start, end) {
111 return start - SAFE_TIME_DELTA <= time && end + SAFE_TIME_DELTA >= time;
112 });
113};
114/**
115 * Returns the TimeRanges that begin later than the specified time.
116 *
117 * @param {TimeRanges} timeRanges - the TimeRanges object to query
118 * @param {number} time - the time to filter on.
119 * @return {TimeRanges} a new TimeRanges object.
120 */
121
122var findNextRange = function findNextRange(timeRanges, time) {
123 return filterRanges(timeRanges, function (start) {
124 return start - TIME_FUDGE_FACTOR >= time;
125 });
126};
127/**
128 * Returns gaps within a list of TimeRanges
129 *
130 * @param {TimeRanges} buffered - the TimeRanges object
131 * @return {TimeRanges} a TimeRanges object of gaps
132 */
133
134var findGaps = function findGaps(buffered) {
135 if (buffered.length < 2) {
136 return videojs__default["default"].createTimeRanges();
137 }
138
139 var ranges = [];
140
141 for (var i = 1; i < buffered.length; i++) {
142 var start = buffered.end(i - 1);
143 var end = buffered.start(i);
144 ranges.push([start, end]);
145 }
146
147 return videojs__default["default"].createTimeRanges(ranges);
148};
149/**
150 * Calculate the intersection of two TimeRanges
151 *
152 * @param {TimeRanges} bufferA
153 * @param {TimeRanges} bufferB
154 * @return {TimeRanges} The interesection of `bufferA` with `bufferB`
155 */
156
157var bufferIntersection = function bufferIntersection(bufferA, bufferB) {
158 var start = null;
159 var end = null;
160 var arity = 0;
161 var extents = [];
162 var ranges = [];
163
164 if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
165 return videojs__default["default"].createTimeRange();
166 } // Handle the case where we have both buffers and create an
167 // intersection of the two
168
169
170 var count = bufferA.length; // A) Gather up all start and end times
171
172 while (count--) {
173 extents.push({
174 time: bufferA.start(count),
175 type: 'start'
176 });
177 extents.push({
178 time: bufferA.end(count),
179 type: 'end'
180 });
181 }
182
183 count = bufferB.length;
184
185 while (count--) {
186 extents.push({
187 time: bufferB.start(count),
188 type: 'start'
189 });
190 extents.push({
191 time: bufferB.end(count),
192 type: 'end'
193 });
194 } // B) Sort them by time
195
196
197 extents.sort(function (a, b) {
198 return a.time - b.time;
199 }); // C) Go along one by one incrementing arity for start and decrementing
200 // arity for ends
201
202 for (count = 0; count < extents.length; count++) {
203 if (extents[count].type === 'start') {
204 arity++; // D) If arity is ever incremented to 2 we are entering an
205 // overlapping range
206
207 if (arity === 2) {
208 start = extents[count].time;
209 }
210 } else if (extents[count].type === 'end') {
211 arity--; // E) If arity is ever decremented to 1 we leaving an
212 // overlapping range
213
214 if (arity === 1) {
215 end = extents[count].time;
216 }
217 } // F) Record overlapping ranges
218
219
220 if (start !== null && end !== null) {
221 ranges.push([start, end]);
222 start = null;
223 end = null;
224 }
225 }
226
227 return videojs__default["default"].createTimeRanges(ranges);
228};
229/**
230 * Gets a human readable string for a TimeRange
231 *
232 * @param {TimeRange} range
233 * @return {string} a human readable string
234 */
235
236var printableRange = function printableRange(range) {
237 var strArr = [];
238
239 if (!range || !range.length) {
240 return '';
241 }
242
243 for (var i = 0; i < range.length; i++) {
244 strArr.push(range.start(i) + ' => ' + range.end(i));
245 }
246
247 return strArr.join(', ');
248};
249/**
250 * Calculates the amount of time left in seconds until the player hits the end of the
251 * buffer and causes a rebuffer
252 *
253 * @param {TimeRange} buffered
254 * The state of the buffer
255 * @param {Numnber} currentTime
256 * The current time of the player
257 * @param {number} playbackRate
258 * The current playback rate of the player. Defaults to 1.
259 * @return {number}
260 * Time until the player has to start rebuffering in seconds.
261 * @function timeUntilRebuffer
262 */
263
264var timeUntilRebuffer = function timeUntilRebuffer(buffered, currentTime, playbackRate) {
265 if (playbackRate === void 0) {
266 playbackRate = 1;
267 }
268
269 var bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
270 return (bufferedEnd - currentTime) / playbackRate;
271};
272/**
273 * Converts a TimeRanges object into an array representation
274 *
275 * @param {TimeRanges} timeRanges
276 * @return {Array}
277 */
278
279var timeRangesToArray = function timeRangesToArray(timeRanges) {
280 var timeRangesList = [];
281
282 for (var i = 0; i < timeRanges.length; i++) {
283 timeRangesList.push({
284 start: timeRanges.start(i),
285 end: timeRanges.end(i)
286 });
287 }
288
289 return timeRangesList;
290};
291/**
292 * Determines if two time range objects are different.
293 *
294 * @param {TimeRange} a
295 * the first time range object to check
296 *
297 * @param {TimeRange} b
298 * the second time range object to check
299 *
300 * @return {Boolean}
301 * Whether the time range objects differ
302 */
303
304var isRangeDifferent = function isRangeDifferent(a, b) {
305 // same object
306 if (a === b) {
307 return false;
308 } // one or the other is undefined
309
310
311 if (!a && b || !b && a) {
312 return true;
313 } // length is different
314
315
316 if (a.length !== b.length) {
317 return true;
318 } // see if any start/end pair is different
319
320
321 for (var i = 0; i < a.length; i++) {
322 if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
323 return true;
324 }
325 } // if the length and every pair is the same
326 // this is the same time range
327
328
329 return false;
330};
331var lastBufferedEnd = function lastBufferedEnd(a) {
332 if (!a || !a.length || !a.end) {
333 return;
334 }
335
336 return a.end(a.length - 1);
337};
338/**
339 * A utility function to add up the amount of time in a timeRange
340 * after a specified startTime.
341 * ie:[[0, 10], [20, 40], [50, 60]] with a startTime 0
342 * would return 40 as there are 40s seconds after 0 in the timeRange
343 *
344 * @param {TimeRange} range
345 * The range to check against
346 * @param {number} startTime
347 * The time in the time range that you should start counting from
348 *
349 * @return {number}
350 * The number of seconds in the buffer passed the specified time.
351 */
352
353var timeAheadOf = function timeAheadOf(range, startTime) {
354 var time = 0;
355
356 if (!range || !range.length) {
357 return time;
358 }
359
360 for (var i = 0; i < range.length; i++) {
361 var start = range.start(i);
362 var end = range.end(i); // startTime is after this range entirely
363
364 if (startTime > end) {
365 continue;
366 } // startTime is within this range
367
368
369 if (startTime > start && startTime <= end) {
370 time += end - startTime;
371 continue;
372 } // startTime is before this range.
373
374
375 time += end - start;
376 }
377
378 return time;
379};
380
381/**
382 * @file playlist.js
383 *
384 * Playlist related utilities.
385 */
386var createTimeRange = videojs__default["default"].createTimeRange;
387/**
388 * A function to get a combined list of parts and segments with durations
389 * and indexes.
390 *
391 * @param {Playlist} playlist the playlist to get the list for.
392 *
393 * @return {Array} The part/segment list.
394 */
395
396var getPartsAndSegments = function getPartsAndSegments(playlist) {
397 return (playlist.segments || []).reduce(function (acc, segment, si) {
398 if (segment.parts) {
399 segment.parts.forEach(function (part, pi) {
400 acc.push({
401 duration: part.duration,
402 segmentIndex: si,
403 partIndex: pi,
404 part: part,
405 segment: segment
406 });
407 });
408 } else {
409 acc.push({
410 duration: segment.duration,
411 segmentIndex: si,
412 partIndex: null,
413 segment: segment,
414 part: null
415 });
416 }
417
418 return acc;
419 }, []);
420};
421var getLastParts = function getLastParts(media) {
422 var lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
423 return lastSegment && lastSegment.parts || [];
424};
425var getKnownPartCount = function getKnownPartCount(_ref) {
426 var preloadSegment = _ref.preloadSegment;
427
428 if (!preloadSegment) {
429 return;
430 }
431
432 var parts = preloadSegment.parts,
433 preloadHints = preloadSegment.preloadHints;
434 var partCount = (preloadHints || []).reduce(function (count, hint) {
435 return count + (hint.type === 'PART' ? 1 : 0);
436 }, 0);
437 partCount += parts && parts.length ? parts.length : 0;
438 return partCount;
439};
440/**
441 * Get the number of seconds to delay from the end of a
442 * live playlist.
443 *
444 * @param {Playlist} master the master playlist
445 * @param {Playlist} media the media playlist
446 * @return {number} the hold back in seconds.
447 */
448
449var liveEdgeDelay = function liveEdgeDelay(master, media) {
450 if (media.endList) {
451 return 0;
452 } // dash suggestedPresentationDelay trumps everything
453
454
455 if (master && master.suggestedPresentationDelay) {
456 return master.suggestedPresentationDelay;
457 }
458
459 var hasParts = getLastParts(media).length > 0; // look for "part" delays from ll-hls first
460
461 if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
462 return media.serverControl.partHoldBack;
463 } else if (hasParts && media.partTargetDuration) {
464 return media.partTargetDuration * 3; // finally look for full segment delays
465 } else if (media.serverControl && media.serverControl.holdBack) {
466 return media.serverControl.holdBack;
467 } else if (media.targetDuration) {
468 return media.targetDuration * 3;
469 }
470
471 return 0;
472};
473/**
474 * walk backward until we find a duration we can use
475 * or return a failure
476 *
477 * @param {Playlist} playlist the playlist to walk through
478 * @param {Number} endSequence the mediaSequence to stop walking on
479 */
480
481var backwardDuration = function backwardDuration(playlist, endSequence) {
482 var result = 0;
483 var i = endSequence - playlist.mediaSequence; // if a start time is available for segment immediately following
484 // the interval, use it
485
486 var segment = playlist.segments[i]; // Walk backward until we find the latest segment with timeline
487 // information that is earlier than endSequence
488
489 if (segment) {
490 if (typeof segment.start !== 'undefined') {
491 return {
492 result: segment.start,
493 precise: true
494 };
495 }
496
497 if (typeof segment.end !== 'undefined') {
498 return {
499 result: segment.end - segment.duration,
500 precise: true
501 };
502 }
503 }
504
505 while (i--) {
506 segment = playlist.segments[i];
507
508 if (typeof segment.end !== 'undefined') {
509 return {
510 result: result + segment.end,
511 precise: true
512 };
513 }
514
515 result += segment.duration;
516
517 if (typeof segment.start !== 'undefined') {
518 return {
519 result: result + segment.start,
520 precise: true
521 };
522 }
523 }
524
525 return {
526 result: result,
527 precise: false
528 };
529};
530/**
531 * walk forward until we find a duration we can use
532 * or return a failure
533 *
534 * @param {Playlist} playlist the playlist to walk through
535 * @param {number} endSequence the mediaSequence to stop walking on
536 */
537
538
539var forwardDuration = function forwardDuration(playlist, endSequence) {
540 var result = 0;
541 var segment;
542 var i = endSequence - playlist.mediaSequence; // Walk forward until we find the earliest segment with timeline
543 // information
544
545 for (; i < playlist.segments.length; i++) {
546 segment = playlist.segments[i];
547
548 if (typeof segment.start !== 'undefined') {
549 return {
550 result: segment.start - result,
551 precise: true
552 };
553 }
554
555 result += segment.duration;
556
557 if (typeof segment.end !== 'undefined') {
558 return {
559 result: segment.end - result,
560 precise: true
561 };
562 }
563 } // indicate we didn't find a useful duration estimate
564
565
566 return {
567 result: -1,
568 precise: false
569 };
570};
571/**
572 * Calculate the media duration from the segments associated with a
573 * playlist. The duration of a subinterval of the available segments
574 * may be calculated by specifying an end index.
575 *
576 * @param {Object} playlist a media playlist object
577 * @param {number=} endSequence an exclusive upper boundary
578 * for the playlist. Defaults to playlist length.
579 * @param {number} expired the amount of time that has dropped
580 * off the front of the playlist in a live scenario
581 * @return {number} the duration between the first available segment
582 * and end index.
583 */
584
585
586var intervalDuration = function intervalDuration(playlist, endSequence, expired) {
587 if (typeof endSequence === 'undefined') {
588 endSequence = playlist.mediaSequence + playlist.segments.length;
589 }
590
591 if (endSequence < playlist.mediaSequence) {
592 return 0;
593 } // do a backward walk to estimate the duration
594
595
596 var backward = backwardDuration(playlist, endSequence);
597
598 if (backward.precise) {
599 // if we were able to base our duration estimate on timing
600 // information provided directly from the Media Source, return
601 // it
602 return backward.result;
603 } // walk forward to see if a precise duration estimate can be made
604 // that way
605
606
607 var forward = forwardDuration(playlist, endSequence);
608
609 if (forward.precise) {
610 // we found a segment that has been buffered and so it's
611 // position is known precisely
612 return forward.result;
613 } // return the less-precise, playlist-based duration estimate
614
615
616 return backward.result + expired;
617};
618/**
619 * Calculates the duration of a playlist. If a start and end index
620 * are specified, the duration will be for the subset of the media
621 * timeline between those two indices. The total duration for live
622 * playlists is always Infinity.
623 *
624 * @param {Object} playlist a media playlist object
625 * @param {number=} endSequence an exclusive upper
626 * boundary for the playlist. Defaults to the playlist media
627 * sequence number plus its length.
628 * @param {number=} expired the amount of time that has
629 * dropped off the front of the playlist in a live scenario
630 * @return {number} the duration between the start index and end
631 * index.
632 */
633
634
635var duration = function duration(playlist, endSequence, expired) {
636 if (!playlist) {
637 return 0;
638 }
639
640 if (typeof expired !== 'number') {
641 expired = 0;
642 } // if a slice of the total duration is not requested, use
643 // playlist-level duration indicators when they're present
644
645
646 if (typeof endSequence === 'undefined') {
647 // if present, use the duration specified in the playlist
648 if (playlist.totalDuration) {
649 return playlist.totalDuration;
650 } // duration should be Infinity for live playlists
651
652
653 if (!playlist.endList) {
654 return window__default["default"].Infinity;
655 }
656 } // calculate the total duration based on the segment durations
657
658
659 return intervalDuration(playlist, endSequence, expired);
660};
661/**
662 * Calculate the time between two indexes in the current playlist
663 * neight the start- nor the end-index need to be within the current
664 * playlist in which case, the targetDuration of the playlist is used
665 * to approximate the durations of the segments
666 *
667 * @param {Array} options.durationList list to iterate over for durations.
668 * @param {number} options.defaultDuration duration to use for elements before or after the durationList
669 * @param {number} options.startIndex partsAndSegments index to start
670 * @param {number} options.endIndex partsAndSegments index to end.
671 * @return {number} the number of seconds between startIndex and endIndex
672 */
673
674var sumDurations = function sumDurations(_ref2) {
675 var defaultDuration = _ref2.defaultDuration,
676 durationList = _ref2.durationList,
677 startIndex = _ref2.startIndex,
678 endIndex = _ref2.endIndex;
679 var durations = 0;
680
681 if (startIndex > endIndex) {
682 var _ref3 = [endIndex, startIndex];
683 startIndex = _ref3[0];
684 endIndex = _ref3[1];
685 }
686
687 if (startIndex < 0) {
688 for (var i = startIndex; i < Math.min(0, endIndex); i++) {
689 durations += defaultDuration;
690 }
691
692 startIndex = 0;
693 }
694
695 for (var _i = startIndex; _i < endIndex; _i++) {
696 durations += durationList[_i].duration;
697 }
698
699 return durations;
700};
701/**
702 * Calculates the playlist end time
703 *
704 * @param {Object} playlist a media playlist object
705 * @param {number=} expired the amount of time that has
706 * dropped off the front of the playlist in a live scenario
707 * @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
708 * playlist end calculation should consider the safe live end
709 * (truncate the playlist end by three segments). This is normally
710 * used for calculating the end of the playlist's seekable range.
711 * This takes into account the value of liveEdgePadding.
712 * Setting liveEdgePadding to 0 is equivalent to setting this to false.
713 * @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
714 * If this is provided, it is used in the safe live end calculation.
715 * Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
716 * Corresponds to suggestedPresentationDelay in DASH manifests.
717 * @return {number} the end time of playlist
718 * @function playlistEnd
719 */
720
721var playlistEnd = function playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
722 if (!playlist || !playlist.segments) {
723 return null;
724 }
725
726 if (playlist.endList) {
727 return duration(playlist);
728 }
729
730 if (expired === null) {
731 return null;
732 }
733
734 expired = expired || 0;
735 var lastSegmentTime = intervalDuration(playlist, playlist.mediaSequence + playlist.segments.length, expired);
736
737 if (useSafeLiveEnd) {
738 liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
739 lastSegmentTime -= liveEdgePadding;
740 } // don't return a time less than zero
741
742
743 return Math.max(0, lastSegmentTime);
744};
745/**
746 * Calculates the interval of time that is currently seekable in a
747 * playlist. The returned time ranges are relative to the earliest
748 * moment in the specified playlist that is still available. A full
749 * seekable implementation for live streams would need to offset
750 * these values by the duration of content that has expired from the
751 * stream.
752 *
753 * @param {Object} playlist a media playlist object
754 * dropped off the front of the playlist in a live scenario
755 * @param {number=} expired the amount of time that has
756 * dropped off the front of the playlist in a live scenario
757 * @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
758 * Corresponds to suggestedPresentationDelay in DASH manifests.
759 * @return {TimeRanges} the periods of time that are valid targets
760 * for seeking
761 */
762
763var seekable = function seekable(playlist, expired, liveEdgePadding) {
764 var useSafeLiveEnd = true;
765 var seekableStart = expired || 0;
766 var seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
767
768 if (seekableEnd === null) {
769 return createTimeRange();
770 }
771
772 return createTimeRange(seekableStart, seekableEnd);
773};
774/**
775 * Determine the index and estimated starting time of the segment that
776 * contains a specified playback position in a media playlist.
777 *
778 * @param {Object} options.playlist the media playlist to query
779 * @param {number} options.currentTime The number of seconds since the earliest
780 * possible position to determine the containing segment for
781 * @param {number} options.startTime the time when the segment/part starts
782 * @param {number} options.startingSegmentIndex the segment index to start looking at.
783 * @param {number?} [options.startingPartIndex] the part index to look at within the segment.
784 *
785 * @return {Object} an object with partIndex, segmentIndex, and startTime.
786 */
787
788var getMediaInfoForTime = function getMediaInfoForTime(_ref4) {
789 var playlist = _ref4.playlist,
790 currentTime = _ref4.currentTime,
791 startingSegmentIndex = _ref4.startingSegmentIndex,
792 startingPartIndex = _ref4.startingPartIndex,
793 startTime = _ref4.startTime,
794 experimentalExactManifestTimings = _ref4.experimentalExactManifestTimings;
795 var time = currentTime - startTime;
796 var partsAndSegments = getPartsAndSegments(playlist);
797 var startIndex = 0;
798
799 for (var i = 0; i < partsAndSegments.length; i++) {
800 var partAndSegment = partsAndSegments[i];
801
802 if (startingSegmentIndex !== partAndSegment.segmentIndex) {
803 continue;
804 } // skip this if part index does not match.
805
806
807 if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
808 continue;
809 }
810
811 startIndex = i;
812 break;
813 }
814
815 if (time < 0) {
816 // Walk backward from startIndex in the playlist, adding durations
817 // until we find a segment that contains `time` and return it
818 if (startIndex > 0) {
819 for (var _i2 = startIndex - 1; _i2 >= 0; _i2--) {
820 var _partAndSegment = partsAndSegments[_i2];
821 time += _partAndSegment.duration;
822
823 if (experimentalExactManifestTimings) {
824 if (time < 0) {
825 continue;
826 }
827 } else if (time + TIME_FUDGE_FACTOR <= 0) {
828 continue;
829 }
830
831 return {
832 partIndex: _partAndSegment.partIndex,
833 segmentIndex: _partAndSegment.segmentIndex,
834 startTime: startTime - sumDurations({
835 defaultDuration: playlist.targetDuration,
836 durationList: partsAndSegments,
837 startIndex: startIndex,
838 endIndex: _i2
839 })
840 };
841 }
842 } // We were unable to find a good segment within the playlist
843 // so select the first segment
844
845
846 return {
847 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
848 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
849 startTime: currentTime
850 };
851 } // When startIndex is negative, we first walk forward to first segment
852 // adding target durations. If we "run out of time" before getting to
853 // the first segment, return the first segment
854
855
856 if (startIndex < 0) {
857 for (var _i3 = startIndex; _i3 < 0; _i3++) {
858 time -= playlist.targetDuration;
859
860 if (time < 0) {
861 return {
862 partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
863 segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
864 startTime: currentTime
865 };
866 }
867 }
868
869 startIndex = 0;
870 } // Walk forward from startIndex in the playlist, subtracting durations
871 // until we find a segment that contains `time` and return it
872
873
874 for (var _i4 = startIndex; _i4 < partsAndSegments.length; _i4++) {
875 var _partAndSegment2 = partsAndSegments[_i4];
876 time -= _partAndSegment2.duration;
877
878 if (experimentalExactManifestTimings) {
879 if (time > 0) {
880 continue;
881 }
882 } else if (time - TIME_FUDGE_FACTOR >= 0) {
883 continue;
884 }
885
886 return {
887 partIndex: _partAndSegment2.partIndex,
888 segmentIndex: _partAndSegment2.segmentIndex,
889 startTime: startTime + sumDurations({
890 defaultDuration: playlist.targetDuration,
891 durationList: partsAndSegments,
892 startIndex: startIndex,
893 endIndex: _i4
894 })
895 };
896 } // We are out of possible candidates so load the last one...
897
898
899 return {
900 segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
901 partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
902 startTime: currentTime
903 };
904};
905/**
906 * Check whether the playlist is blacklisted or not.
907 *
908 * @param {Object} playlist the media playlist object
909 * @return {boolean} whether the playlist is blacklisted or not
910 * @function isBlacklisted
911 */
912
913var isBlacklisted = function isBlacklisted(playlist) {
914 return playlist.excludeUntil && playlist.excludeUntil > Date.now();
915};
916/**
917 * Check whether the playlist is compatible with current playback configuration or has
918 * been blacklisted permanently for being incompatible.
919 *
920 * @param {Object} playlist the media playlist object
921 * @return {boolean} whether the playlist is incompatible or not
922 * @function isIncompatible
923 */
924
925var isIncompatible = function isIncompatible(playlist) {
926 return playlist.excludeUntil && playlist.excludeUntil === Infinity;
927};
928/**
929 * Check whether the playlist is enabled or not.
930 *
931 * @param {Object} playlist the media playlist object
932 * @return {boolean} whether the playlist is enabled or not
933 * @function isEnabled
934 */
935
936var isEnabled = function isEnabled(playlist) {
937 var blacklisted = isBlacklisted(playlist);
938 return !playlist.disabled && !blacklisted;
939};
940/**
941 * Check whether the playlist has been manually disabled through the representations api.
942 *
943 * @param {Object} playlist the media playlist object
944 * @return {boolean} whether the playlist is disabled manually or not
945 * @function isDisabled
946 */
947
948var isDisabled = function isDisabled(playlist) {
949 return playlist.disabled;
950};
951/**
952 * Returns whether the current playlist is an AES encrypted HLS stream
953 *
954 * @return {boolean} true if it's an AES encrypted HLS stream
955 */
956
957var isAes = function isAes(media) {
958 for (var i = 0; i < media.segments.length; i++) {
959 if (media.segments[i].key) {
960 return true;
961 }
962 }
963
964 return false;
965};
966/**
967 * Checks if the playlist has a value for the specified attribute
968 *
969 * @param {string} attr
970 * Attribute to check for
971 * @param {Object} playlist
972 * The media playlist object
973 * @return {boolean}
974 * Whether the playlist contains a value for the attribute or not
975 * @function hasAttribute
976 */
977
978var hasAttribute = function hasAttribute(attr, playlist) {
979 return playlist.attributes && playlist.attributes[attr];
980};
981/**
982 * Estimates the time required to complete a segment download from the specified playlist
983 *
984 * @param {number} segmentDuration
985 * Duration of requested segment
986 * @param {number} bandwidth
987 * Current measured bandwidth of the player
988 * @param {Object} playlist
989 * The media playlist object
990 * @param {number=} bytesReceived
991 * Number of bytes already received for the request. Defaults to 0
992 * @return {number|NaN}
993 * The estimated time to request the segment. NaN if bandwidth information for
994 * the given playlist is unavailable
995 * @function estimateSegmentRequestTime
996 */
997
998var estimateSegmentRequestTime = function estimateSegmentRequestTime(segmentDuration, bandwidth, playlist, bytesReceived) {
999 if (bytesReceived === void 0) {
1000 bytesReceived = 0;
1001 }
1002
1003 if (!hasAttribute('BANDWIDTH', playlist)) {
1004 return NaN;
1005 }
1006
1007 var size = segmentDuration * playlist.attributes.BANDWIDTH;
1008 return (size - bytesReceived * 8) / bandwidth;
1009};
1010/*
1011 * Returns whether the current playlist is the lowest rendition
1012 *
1013 * @return {Boolean} true if on lowest rendition
1014 */
1015
1016var isLowestEnabledRendition = function isLowestEnabledRendition(master, media) {
1017 if (master.playlists.length === 1) {
1018 return true;
1019 }
1020
1021 var currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
1022 return master.playlists.filter(function (playlist) {
1023 if (!isEnabled(playlist)) {
1024 return false;
1025 }
1026
1027 return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
1028 }).length === 0;
1029};
1030var playlistMatch = function playlistMatch(a, b) {
1031 // both playlits are null
1032 // or only one playlist is non-null
1033 // no match
1034 if (!a && !b || !a && b || a && !b) {
1035 return false;
1036 } // playlist objects are the same, match
1037
1038
1039 if (a === b) {
1040 return true;
1041 } // first try to use id as it should be the most
1042 // accurate
1043
1044
1045 if (a.id && b.id && a.id === b.id) {
1046 return true;
1047 } // next try to use reslovedUri as it should be the
1048 // second most accurate.
1049
1050
1051 if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
1052 return true;
1053 } // finally try to use uri as it should be accurate
1054 // but might miss a few cases for relative uris
1055
1056
1057 if (a.uri && b.uri && a.uri === b.uri) {
1058 return true;
1059 }
1060
1061 return false;
1062};
1063
1064var someAudioVariant = function someAudioVariant(master, callback) {
1065 var AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
1066 var found = false;
1067
1068 for (var groupName in AUDIO) {
1069 for (var label in AUDIO[groupName]) {
1070 found = callback(AUDIO[groupName][label]);
1071
1072 if (found) {
1073 break;
1074 }
1075 }
1076
1077 if (found) {
1078 break;
1079 }
1080 }
1081
1082 return !!found;
1083};
1084
1085var isAudioOnly = function isAudioOnly(master) {
1086 // we are audio only if we have no main playlists but do
1087 // have media group playlists.
1088 if (!master || !master.playlists || !master.playlists.length) {
1089 // without audio variants or playlists this
1090 // is not an audio only master.
1091 var found = someAudioVariant(master, function (variant) {
1092 return variant.playlists && variant.playlists.length || variant.uri;
1093 });
1094 return found;
1095 } // if every playlist has only an audio codec it is audio only
1096
1097
1098 var _loop = function _loop(i) {
1099 var playlist = master.playlists[i];
1100 var CODECS = playlist.attributes && playlist.attributes.CODECS; // all codecs are audio, this is an audio playlist.
1101
1102 if (CODECS && CODECS.split(',').every(function (c) {
1103 return codecs_js.isAudioCodec(c);
1104 })) {
1105 return "continue";
1106 } // playlist is in an audio group it is audio only
1107
1108
1109 var found = someAudioVariant(master, function (variant) {
1110 return playlistMatch(playlist, variant);
1111 });
1112
1113 if (found) {
1114 return "continue";
1115 } // if we make it here this playlist isn't audio and we
1116 // are not audio only
1117
1118
1119 return {
1120 v: false
1121 };
1122 };
1123
1124 for (var i = 0; i < master.playlists.length; i++) {
1125 var _ret = _loop(i);
1126
1127 if (_ret === "continue") continue;
1128 if (typeof _ret === "object") return _ret.v;
1129 } // if we make it past every playlist without returning, then
1130 // this is an audio only playlist.
1131
1132
1133 return true;
1134}; // exports
1135
1136var Playlist = {
1137 liveEdgeDelay: liveEdgeDelay,
1138 duration: duration,
1139 seekable: seekable,
1140 getMediaInfoForTime: getMediaInfoForTime,
1141 isEnabled: isEnabled,
1142 isDisabled: isDisabled,
1143 isBlacklisted: isBlacklisted,
1144 isIncompatible: isIncompatible,
1145 playlistEnd: playlistEnd,
1146 isAes: isAes,
1147 hasAttribute: hasAttribute,
1148 estimateSegmentRequestTime: estimateSegmentRequestTime,
1149 isLowestEnabledRendition: isLowestEnabledRendition,
1150 isAudioOnly: isAudioOnly,
1151 playlistMatch: playlistMatch
1152};
1153
1154var log = videojs__default["default"].log;
1155var createPlaylistID = function createPlaylistID(index, uri) {
1156 return index + "-" + uri;
1157};
1158/**
1159 * Parses a given m3u8 playlist
1160 *
1161 * @param {Function} [onwarn]
1162 * a function to call when the parser triggers a warning event.
1163 * @param {Function} [oninfo]
1164 * a function to call when the parser triggers an info event.
1165 * @param {string} manifestString
1166 * The downloaded manifest string
1167 * @param {Object[]} [customTagParsers]
1168 * An array of custom tag parsers for the m3u8-parser instance
1169 * @param {Object[]} [customTagMappers]
1170 * An array of custom tag mappers for the m3u8-parser instance
1171 * @param {boolean} [experimentalLLHLS=false]
1172 * Whether to keep ll-hls features in the manifest after parsing.
1173 * @return {Object}
1174 * The manifest object
1175 */
1176
1177var parseManifest = function parseManifest(_ref) {
1178 var onwarn = _ref.onwarn,
1179 oninfo = _ref.oninfo,
1180 manifestString = _ref.manifestString,
1181 _ref$customTagParsers = _ref.customTagParsers,
1182 customTagParsers = _ref$customTagParsers === void 0 ? [] : _ref$customTagParsers,
1183 _ref$customTagMappers = _ref.customTagMappers,
1184 customTagMappers = _ref$customTagMappers === void 0 ? [] : _ref$customTagMappers,
1185 experimentalLLHLS = _ref.experimentalLLHLS;
1186 var parser = new m3u8Parser.Parser();
1187
1188 if (onwarn) {
1189 parser.on('warn', onwarn);
1190 }
1191
1192 if (oninfo) {
1193 parser.on('info', oninfo);
1194 }
1195
1196 customTagParsers.forEach(function (customParser) {
1197 return parser.addParser(customParser);
1198 });
1199 customTagMappers.forEach(function (mapper) {
1200 return parser.addTagMapper(mapper);
1201 });
1202 parser.push(manifestString);
1203 parser.end();
1204 var manifest = parser.manifest; // remove llhls features from the parsed manifest
1205 // if we don't want llhls support.
1206
1207 if (!experimentalLLHLS) {
1208 ['preloadSegment', 'skip', 'serverControl', 'renditionReports', 'partInf', 'partTargetDuration'].forEach(function (k) {
1209 if (manifest.hasOwnProperty(k)) {
1210 delete manifest[k];
1211 }
1212 });
1213
1214 if (manifest.segments) {
1215 manifest.segments.forEach(function (segment) {
1216 ['parts', 'preloadHints'].forEach(function (k) {
1217 if (segment.hasOwnProperty(k)) {
1218 delete segment[k];
1219 }
1220 });
1221 });
1222 }
1223 }
1224
1225 if (!manifest.targetDuration) {
1226 var targetDuration = 10;
1227
1228 if (manifest.segments && manifest.segments.length) {
1229 targetDuration = manifest.segments.reduce(function (acc, s) {
1230 return Math.max(acc, s.duration);
1231 }, 0);
1232 }
1233
1234 if (onwarn) {
1235 onwarn("manifest has no targetDuration defaulting to " + targetDuration);
1236 }
1237
1238 manifest.targetDuration = targetDuration;
1239 }
1240
1241 var parts = getLastParts(manifest);
1242
1243 if (parts.length && !manifest.partTargetDuration) {
1244 var partTargetDuration = parts.reduce(function (acc, p) {
1245 return Math.max(acc, p.duration);
1246 }, 0);
1247
1248 if (onwarn) {
1249 onwarn("manifest has no partTargetDuration defaulting to " + partTargetDuration);
1250 log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
1251 }
1252
1253 manifest.partTargetDuration = partTargetDuration;
1254 }
1255
1256 return manifest;
1257};
1258/**
1259 * Loops through all supported media groups in master and calls the provided
1260 * callback for each group
1261 *
1262 * @param {Object} master
1263 * The parsed master manifest object
1264 * @param {Function} callback
1265 * Callback to call for each media group
1266 */
1267
1268var forEachMediaGroup = function forEachMediaGroup(master, callback) {
1269 if (!master.mediaGroups) {
1270 return;
1271 }
1272
1273 ['AUDIO', 'SUBTITLES'].forEach(function (mediaType) {
1274 if (!master.mediaGroups[mediaType]) {
1275 return;
1276 }
1277
1278 for (var groupKey in master.mediaGroups[mediaType]) {
1279 for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
1280 var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
1281 callback(mediaProperties, mediaType, groupKey, labelKey);
1282 }
1283 }
1284 });
1285};
1286/**
1287 * Adds properties and attributes to the playlist to keep consistent functionality for
1288 * playlists throughout VHS.
1289 *
1290 * @param {Object} config
1291 * Arguments object
1292 * @param {Object} config.playlist
1293 * The media playlist
1294 * @param {string} [config.uri]
1295 * The uri to the media playlist (if media playlist is not from within a master
1296 * playlist)
1297 * @param {string} id
1298 * ID to use for the playlist
1299 */
1300
1301var setupMediaPlaylist = function setupMediaPlaylist(_ref2) {
1302 var playlist = _ref2.playlist,
1303 uri = _ref2.uri,
1304 id = _ref2.id;
1305 playlist.id = id;
1306 playlist.playlistErrors_ = 0;
1307
1308 if (uri) {
1309 // For media playlists, m3u8-parser does not have access to a URI, as HLS media
1310 // playlists do not contain their own source URI, but one is needed for consistency in
1311 // VHS.
1312 playlist.uri = uri;
1313 } // For HLS master playlists, even though certain attributes MUST be defined, the
1314 // stream may still be played without them.
1315 // For HLS media playlists, m3u8-parser does not attach an attributes object to the
1316 // manifest.
1317 //
1318 // To avoid undefined reference errors through the project, and make the code easier
1319 // to write/read, add an empty attributes object for these cases.
1320
1321
1322 playlist.attributes = playlist.attributes || {};
1323};
1324/**
1325 * Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
1326 * necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
1327 * playlist references to the playlists array.
1328 *
1329 * @param {Object} master
1330 * The master playlist
1331 */
1332
1333var setupMediaPlaylists = function setupMediaPlaylists(master) {
1334 var i = master.playlists.length;
1335
1336 while (i--) {
1337 var playlist = master.playlists[i];
1338 setupMediaPlaylist({
1339 playlist: playlist,
1340 id: createPlaylistID(i, playlist.uri)
1341 });
1342 playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
1343 master.playlists[playlist.id] = playlist; // URI reference added for backwards compatibility
1344
1345 master.playlists[playlist.uri] = playlist; // Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
1346 // the stream can be played without it. Although an attributes property may have been
1347 // added to the playlist to prevent undefined references, issue a warning to fix the
1348 // manifest.
1349
1350 if (!playlist.attributes.BANDWIDTH) {
1351 log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
1352 }
1353 }
1354};
1355/**
1356 * Adds resolvedUri properties to each media group.
1357 *
1358 * @param {Object} master
1359 * The master playlist
1360 */
1361
1362var resolveMediaGroupUris = function resolveMediaGroupUris(master) {
1363 forEachMediaGroup(master, function (properties) {
1364 if (properties.uri) {
1365 properties.resolvedUri = resolveUrl(master.uri, properties.uri);
1366 }
1367 });
1368};
1369/**
1370 * Creates a master playlist wrapper to insert a sole media playlist into.
1371 *
1372 * @param {Object} media
1373 * Media playlist
1374 * @param {string} uri
1375 * The media URI
1376 *
1377 * @return {Object}
1378 * Master playlist
1379 */
1380
1381var masterForMedia = function masterForMedia(media, uri) {
1382 var id = createPlaylistID(0, uri);
1383 var master = {
1384 mediaGroups: {
1385 'AUDIO': {},
1386 'VIDEO': {},
1387 'CLOSED-CAPTIONS': {},
1388 'SUBTITLES': {}
1389 },
1390 uri: window__default["default"].location.href,
1391 resolvedUri: window__default["default"].location.href,
1392 playlists: [{
1393 uri: uri,
1394 id: id,
1395 resolvedUri: uri,
1396 // m3u8-parser does not attach an attributes property to media playlists so make
1397 // sure that the property is attached to avoid undefined reference errors
1398 attributes: {}
1399 }]
1400 }; // set up ID reference
1401
1402 master.playlists[id] = master.playlists[0]; // URI reference added for backwards compatibility
1403
1404 master.playlists[uri] = master.playlists[0];
1405 return master;
1406};
1407/**
1408 * Does an in-place update of the master manifest to add updated playlist URI references
1409 * as well as other properties needed by VHS that aren't included by the parser.
1410 *
1411 * @param {Object} master
1412 * Master manifest object
1413 * @param {string} uri
1414 * The source URI
1415 */
1416
1417var addPropertiesToMaster = function addPropertiesToMaster(master, uri) {
1418 master.uri = uri;
1419
1420 for (var i = 0; i < master.playlists.length; i++) {
1421 if (!master.playlists[i].uri) {
1422 // Set up phony URIs for the playlists since playlists are referenced by their URIs
1423 // throughout VHS, but some formats (e.g., DASH) don't have external URIs
1424 // TODO: consider adding dummy URIs in mpd-parser
1425 var phonyUri = "placeholder-uri-" + i;
1426 master.playlists[i].uri = phonyUri;
1427 }
1428 }
1429
1430 var audioOnlyMaster = isAudioOnly(master);
1431 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1432 var groupId = "placeholder-uri-" + mediaType + "-" + groupKey + "-" + labelKey; // add a playlist array under properties
1433
1434 if (!properties.playlists || !properties.playlists.length) {
1435 // If the manifest is audio only and this media group does not have a uri, check
1436 // if the media group is located in the main list of playlists. If it is, don't add
1437 // placeholder properties as it shouldn't be considered an alternate audio track.
1438 if (audioOnlyMaster && mediaType === 'AUDIO' && !properties.uri) {
1439 for (var _i = 0; _i < master.playlists.length; _i++) {
1440 var p = master.playlists[_i];
1441
1442 if (p.attributes && p.attributes.AUDIO && p.attributes.AUDIO === groupKey) {
1443 return;
1444 }
1445 }
1446 }
1447
1448 properties.playlists = [_extends__default["default"]({}, properties)];
1449 }
1450
1451 properties.playlists.forEach(function (p, i) {
1452 var id = createPlaylistID(i, groupId);
1453
1454 if (p.uri) {
1455 p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
1456 } else {
1457 // DEPRECATED, this has been added to prevent a breaking change.
1458 // previously we only ever had a single media group playlist, so
1459 // we mark the first playlist uri without prepending the index as we used to
1460 // ideally we would do all of the playlists the same way.
1461 p.uri = i === 0 ? groupId : id; // don't resolve a placeholder uri to an absolute url, just use
1462 // the placeholder again
1463
1464 p.resolvedUri = p.uri;
1465 }
1466
1467 p.id = p.id || id; // add an empty attributes object, all playlists are
1468 // expected to have this.
1469
1470 p.attributes = p.attributes || {}; // setup ID and URI references (URI for backwards compatibility)
1471
1472 master.playlists[p.id] = p;
1473 master.playlists[p.uri] = p;
1474 });
1475 });
1476 setupMediaPlaylists(master);
1477 resolveMediaGroupUris(master);
1478};
1479
1480var mergeOptions$2 = videojs__default["default"].mergeOptions,
1481 EventTarget$1 = videojs__default["default"].EventTarget;
1482
1483var addLLHLSQueryDirectives = function addLLHLSQueryDirectives(uri, media) {
1484 if (media.endList || !media.serverControl) {
1485 return uri;
1486 }
1487
1488 var parameters = {};
1489
1490 if (media.serverControl.canBlockReload) {
1491 var preloadSegment = media.preloadSegment; // next msn is a zero based value, length is not.
1492
1493 var nextMSN = media.mediaSequence + media.segments.length; // If preload segment has parts then it is likely
1494 // that we are going to request a part of that preload segment.
1495 // the logic below is used to determine that.
1496
1497 if (preloadSegment) {
1498 var parts = preloadSegment.parts || []; // _HLS_part is a zero based index
1499
1500 var nextPart = getKnownPartCount(media) - 1; // if nextPart is > -1 and not equal to just the
1501 // length of parts, then we know we had part preload hints
1502 // and we need to add the _HLS_part= query
1503
1504 if (nextPart > -1 && nextPart !== parts.length - 1) {
1505 // add existing parts to our preload hints
1506 // eslint-disable-next-line
1507 parameters._HLS_part = nextPart;
1508 } // this if statement makes sure that we request the msn
1509 // of the preload segment if:
1510 // 1. the preload segment had parts (and was not yet a full segment)
1511 // but was added to our segments array
1512 // 2. the preload segment had preload hints for parts that are not in
1513 // the manifest yet.
1514 // in all other cases we want the segment after the preload segment
1515 // which will be given by using media.segments.length because it is 1 based
1516 // rather than 0 based.
1517
1518
1519 if (nextPart > -1 || parts.length) {
1520 nextMSN--;
1521 }
1522 } // add _HLS_msn= in front of any _HLS_part query
1523 // eslint-disable-next-line
1524
1525
1526 parameters._HLS_msn = nextMSN;
1527 }
1528
1529 if (media.serverControl && media.serverControl.canSkipUntil) {
1530 // add _HLS_skip= infront of all other queries.
1531 // eslint-disable-next-line
1532 parameters._HLS_skip = media.serverControl.canSkipDateranges ? 'v2' : 'YES';
1533 }
1534
1535 if (Object.keys(parameters).length) {
1536 var parsedUri = new window__default["default"].URL(uri);
1537 ['_HLS_skip', '_HLS_msn', '_HLS_part'].forEach(function (name) {
1538 if (!parameters.hasOwnProperty(name)) {
1539 return;
1540 }
1541
1542 parsedUri.searchParams.set(name, parameters[name]);
1543 });
1544 uri = parsedUri.toString();
1545 }
1546
1547 return uri;
1548};
1549/**
1550 * Returns a new segment object with properties and
1551 * the parts array merged.
1552 *
1553 * @param {Object} a the old segment
1554 * @param {Object} b the new segment
1555 *
1556 * @return {Object} the merged segment
1557 */
1558
1559
1560var updateSegment = function updateSegment(a, b) {
1561 if (!a) {
1562 return b;
1563 }
1564
1565 var result = mergeOptions$2(a, b); // if only the old segment has preload hints
1566 // and the new one does not, remove preload hints.
1567
1568 if (a.preloadHints && !b.preloadHints) {
1569 delete result.preloadHints;
1570 } // if only the old segment has parts
1571 // then the parts are no longer valid
1572
1573
1574 if (a.parts && !b.parts) {
1575 delete result.parts; // if both segments have parts
1576 // copy part propeties from the old segment
1577 // to the new one.
1578 } else if (a.parts && b.parts) {
1579 for (var i = 0; i < b.parts.length; i++) {
1580 if (a.parts && a.parts[i]) {
1581 result.parts[i] = mergeOptions$2(a.parts[i], b.parts[i]);
1582 }
1583 }
1584 } // set skipped to false for segments that have
1585 // have had information merged from the old segment.
1586
1587
1588 if (!a.skipped && b.skipped) {
1589 result.skipped = false;
1590 } // set preload to false for segments that have
1591 // had information added in the new segment.
1592
1593
1594 if (a.preload && !b.preload) {
1595 result.preload = false;
1596 }
1597
1598 return result;
1599};
1600/**
1601 * Returns a new array of segments that is the result of merging
1602 * properties from an older list of segments onto an updated
1603 * list. No properties on the updated playlist will be ovewritten.
1604 *
1605 * @param {Array} original the outdated list of segments
1606 * @param {Array} update the updated list of segments
1607 * @param {number=} offset the index of the first update
1608 * segment in the original segment list. For non-live playlists,
1609 * this should always be zero and does not need to be
1610 * specified. For live playlists, it should be the difference
1611 * between the media sequence numbers in the original and updated
1612 * playlists.
1613 * @return {Array} a list of merged segment objects
1614 */
1615
1616var updateSegments = function updateSegments(original, update, offset) {
1617 var oldSegments = original.slice();
1618 var newSegments = update.slice();
1619 offset = offset || 0;
1620 var result = [];
1621 var currentMap;
1622
1623 for (var newIndex = 0; newIndex < newSegments.length; newIndex++) {
1624 var oldSegment = oldSegments[newIndex + offset];
1625 var newSegment = newSegments[newIndex];
1626
1627 if (oldSegment) {
1628 currentMap = oldSegment.map || currentMap;
1629 result.push(updateSegment(oldSegment, newSegment));
1630 } else {
1631 // carry over map to new segment if it is missing
1632 if (currentMap && !newSegment.map) {
1633 newSegment.map = currentMap;
1634 }
1635
1636 result.push(newSegment);
1637 }
1638 }
1639
1640 return result;
1641};
1642var resolveSegmentUris = function resolveSegmentUris(segment, baseUri) {
1643 // preloadSegment will not have a uri at all
1644 // as the segment isn't actually in the manifest yet, only parts
1645 if (!segment.resolvedUri && segment.uri) {
1646 segment.resolvedUri = resolveUrl(baseUri, segment.uri);
1647 }
1648
1649 if (segment.key && !segment.key.resolvedUri) {
1650 segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
1651 }
1652
1653 if (segment.map && !segment.map.resolvedUri) {
1654 segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
1655 }
1656
1657 if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
1658 segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
1659 }
1660
1661 if (segment.parts && segment.parts.length) {
1662 segment.parts.forEach(function (p) {
1663 if (p.resolvedUri) {
1664 return;
1665 }
1666
1667 p.resolvedUri = resolveUrl(baseUri, p.uri);
1668 });
1669 }
1670
1671 if (segment.preloadHints && segment.preloadHints.length) {
1672 segment.preloadHints.forEach(function (p) {
1673 if (p.resolvedUri) {
1674 return;
1675 }
1676
1677 p.resolvedUri = resolveUrl(baseUri, p.uri);
1678 });
1679 }
1680};
1681
1682var getAllSegments = function getAllSegments(media) {
1683 var segments = media.segments || [];
1684 var preloadSegment = media.preloadSegment; // a preloadSegment with only preloadHints is not currently
1685 // a usable segment, only include a preloadSegment that has
1686 // parts.
1687
1688 if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
1689 // if preloadHints has a MAP that means that the
1690 // init segment is going to change. We cannot use any of the parts
1691 // from this preload segment.
1692 if (preloadSegment.preloadHints) {
1693 for (var i = 0; i < preloadSegment.preloadHints.length; i++) {
1694 if (preloadSegment.preloadHints[i].type === 'MAP') {
1695 return segments;
1696 }
1697 }
1698 } // set the duration for our preload segment to target duration.
1699
1700
1701 preloadSegment.duration = media.targetDuration;
1702 preloadSegment.preload = true;
1703 segments.push(preloadSegment);
1704 }
1705
1706 return segments;
1707}; // consider the playlist unchanged if the playlist object is the same or
1708// the number of segments is equal, the media sequence number is unchanged,
1709// and this playlist hasn't become the end of the playlist
1710
1711
1712var isPlaylistUnchanged = function isPlaylistUnchanged(a, b) {
1713 return a === b || a.segments && b.segments && a.segments.length === b.segments.length && a.endList === b.endList && a.mediaSequence === b.mediaSequence;
1714};
1715/**
1716 * Returns a new master playlist that is the result of merging an
1717 * updated media playlist into the original version. If the
1718 * updated media playlist does not match any of the playlist
1719 * entries in the original master playlist, null is returned.
1720 *
1721 * @param {Object} master a parsed master M3U8 object
1722 * @param {Object} media a parsed media M3U8 object
1723 * @return {Object} a new object that represents the original
1724 * master playlist with the updated media playlist merged in, or
1725 * null if the merge produced no change.
1726 */
1727
1728var updateMaster$1 = function updateMaster(master, newMedia, unchangedCheck) {
1729 if (unchangedCheck === void 0) {
1730 unchangedCheck = isPlaylistUnchanged;
1731 }
1732
1733 var result = mergeOptions$2(master, {});
1734 var oldMedia = result.playlists[newMedia.id];
1735
1736 if (!oldMedia) {
1737 return null;
1738 }
1739
1740 if (unchangedCheck(oldMedia, newMedia)) {
1741 return null;
1742 }
1743
1744 newMedia.segments = getAllSegments(newMedia);
1745 var mergedPlaylist = mergeOptions$2(oldMedia, newMedia); // always use the new media's preload segment
1746
1747 if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
1748 delete mergedPlaylist.preloadSegment;
1749 } // if the update could overlap existing segment information, merge the two segment lists
1750
1751
1752 if (oldMedia.segments) {
1753 if (newMedia.skip) {
1754 newMedia.segments = newMedia.segments || []; // add back in objects for skipped segments, so that we merge
1755 // old properties into the new segments
1756
1757 for (var i = 0; i < newMedia.skip.skippedSegments; i++) {
1758 newMedia.segments.unshift({
1759 skipped: true
1760 });
1761 }
1762 }
1763
1764 mergedPlaylist.segments = updateSegments(oldMedia.segments, newMedia.segments, newMedia.mediaSequence - oldMedia.mediaSequence);
1765 } // resolve any segment URIs to prevent us from having to do it later
1766
1767
1768 mergedPlaylist.segments.forEach(function (segment) {
1769 resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
1770 }); // TODO Right now in the playlists array there are two references to each playlist, one
1771 // that is referenced by index, and one by URI. The index reference may no longer be
1772 // necessary.
1773
1774 for (var _i = 0; _i < result.playlists.length; _i++) {
1775 if (result.playlists[_i].id === newMedia.id) {
1776 result.playlists[_i] = mergedPlaylist;
1777 }
1778 }
1779
1780 result.playlists[newMedia.id] = mergedPlaylist; // URI reference added for backwards compatibility
1781
1782 result.playlists[newMedia.uri] = mergedPlaylist; // update media group playlist references.
1783
1784 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
1785 if (!properties.playlists) {
1786 return;
1787 }
1788
1789 for (var _i2 = 0; _i2 < properties.playlists.length; _i2++) {
1790 if (newMedia.id === properties.playlists[_i2].id) {
1791 properties.playlists[_i2] = newMedia;
1792 }
1793 }
1794 });
1795 return result;
1796};
1797/**
1798 * Calculates the time to wait before refreshing a live playlist
1799 *
1800 * @param {Object} media
1801 * The current media
1802 * @param {boolean} update
1803 * True if there were any updates from the last refresh, false otherwise
1804 * @return {number}
1805 * The time in ms to wait before refreshing the live playlist
1806 */
1807
1808var refreshDelay = function refreshDelay(media, update) {
1809 var segments = media.segments || [];
1810 var lastSegment = segments[segments.length - 1];
1811 var lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
1812 var lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
1813
1814 if (update && lastDuration) {
1815 return lastDuration * 1000;
1816 } // if the playlist is unchanged since the last reload or last segment duration
1817 // cannot be determined, try again after half the target duration
1818
1819
1820 return (media.partTargetDuration || media.targetDuration || 10) * 500;
1821};
1822/**
1823 * Load a playlist from a remote location
1824 *
1825 * @class PlaylistLoader
1826 * @extends Stream
1827 * @param {string|Object} src url or object of manifest
1828 * @param {boolean} withCredentials the withCredentials xhr option
1829 * @class
1830 */
1831
1832var PlaylistLoader = /*#__PURE__*/function (_EventTarget) {
1833 _inheritsLoose__default["default"](PlaylistLoader, _EventTarget);
1834
1835 function PlaylistLoader(src, vhs, options) {
1836 var _this;
1837
1838 if (options === void 0) {
1839 options = {};
1840 }
1841
1842 _this = _EventTarget.call(this) || this;
1843
1844 if (!src) {
1845 throw new Error('A non-empty playlist URL or object is required');
1846 }
1847
1848 _this.logger_ = logger('PlaylistLoader');
1849 var _options = options,
1850 _options$withCredenti = _options.withCredentials,
1851 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
1852 _options$handleManife = _options.handleManifestRedirects,
1853 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
1854 _this.src = src;
1855 _this.vhs_ = vhs;
1856 _this.withCredentials = withCredentials;
1857 _this.handleManifestRedirects = handleManifestRedirects;
1858 var vhsOptions = vhs.options_;
1859 _this.customTagParsers = vhsOptions && vhsOptions.customTagParsers || [];
1860 _this.customTagMappers = vhsOptions && vhsOptions.customTagMappers || [];
1861 _this.experimentalLLHLS = vhsOptions && vhsOptions.experimentalLLHLS || false; // force experimentalLLHLS for IE 11
1862
1863 if (videojs__default["default"].browser.IE_VERSION) {
1864 _this.experimentalLLHLS = false;
1865 } // initialize the loader state
1866
1867
1868 _this.state = 'HAVE_NOTHING'; // live playlist staleness timeout
1869
1870 _this.handleMediaupdatetimeout_ = _this.handleMediaupdatetimeout_.bind(_assertThisInitialized__default["default"](_this));
1871
1872 _this.on('mediaupdatetimeout', _this.handleMediaupdatetimeout_);
1873
1874 return _this;
1875 }
1876
1877 var _proto = PlaylistLoader.prototype;
1878
1879 _proto.handleMediaupdatetimeout_ = function handleMediaupdatetimeout_() {
1880 var _this2 = this;
1881
1882 if (this.state !== 'HAVE_METADATA') {
1883 // only refresh the media playlist if no other activity is going on
1884 return;
1885 }
1886
1887 var media = this.media();
1888 var uri = resolveUrl(this.master.uri, media.uri);
1889
1890 if (this.experimentalLLHLS) {
1891 uri = addLLHLSQueryDirectives(uri, media);
1892 }
1893
1894 this.state = 'HAVE_CURRENT_METADATA';
1895 this.request = this.vhs_.xhr({
1896 uri: uri,
1897 withCredentials: this.withCredentials
1898 }, function (error, req) {
1899 // disposed
1900 if (!_this2.request) {
1901 return;
1902 }
1903
1904 if (error) {
1905 return _this2.playlistRequestError(_this2.request, _this2.media(), 'HAVE_METADATA');
1906 }
1907
1908 _this2.haveMetadata({
1909 playlistString: _this2.request.responseText,
1910 url: _this2.media().uri,
1911 id: _this2.media().id
1912 });
1913 });
1914 };
1915
1916 _proto.playlistRequestError = function playlistRequestError(xhr, playlist, startingState) {
1917 var uri = playlist.uri,
1918 id = playlist.id; // any in-flight request is now finished
1919
1920 this.request = null;
1921
1922 if (startingState) {
1923 this.state = startingState;
1924 }
1925
1926 this.error = {
1927 playlist: this.master.playlists[id],
1928 status: xhr.status,
1929 message: "HLS playlist request error at URL: " + uri + ".",
1930 responseText: xhr.responseText,
1931 code: xhr.status >= 500 ? 4 : 2
1932 };
1933 this.trigger('error');
1934 };
1935
1936 _proto.parseManifest_ = function parseManifest_(_ref) {
1937 var _this3 = this;
1938
1939 var url = _ref.url,
1940 manifestString = _ref.manifestString;
1941 return parseManifest({
1942 onwarn: function onwarn(_ref2) {
1943 var message = _ref2.message;
1944 return _this3.logger_("m3u8-parser warn for " + url + ": " + message);
1945 },
1946 oninfo: function oninfo(_ref3) {
1947 var message = _ref3.message;
1948 return _this3.logger_("m3u8-parser info for " + url + ": " + message);
1949 },
1950 manifestString: manifestString,
1951 customTagParsers: this.customTagParsers,
1952 customTagMappers: this.customTagMappers,
1953 experimentalLLHLS: this.experimentalLLHLS
1954 });
1955 }
1956 /**
1957 * Update the playlist loader's state in response to a new or updated playlist.
1958 *
1959 * @param {string} [playlistString]
1960 * Playlist string (if playlistObject is not provided)
1961 * @param {Object} [playlistObject]
1962 * Playlist object (if playlistString is not provided)
1963 * @param {string} url
1964 * URL of playlist
1965 * @param {string} id
1966 * ID to use for playlist
1967 */
1968 ;
1969
1970 _proto.haveMetadata = function haveMetadata(_ref4) {
1971 var playlistString = _ref4.playlistString,
1972 playlistObject = _ref4.playlistObject,
1973 url = _ref4.url,
1974 id = _ref4.id;
1975 // any in-flight request is now finished
1976 this.request = null;
1977 this.state = 'HAVE_METADATA';
1978 var playlist = playlistObject || this.parseManifest_({
1979 url: url,
1980 manifestString: playlistString
1981 });
1982 playlist.lastRequest = Date.now();
1983 setupMediaPlaylist({
1984 playlist: playlist,
1985 uri: url,
1986 id: id
1987 }); // merge this playlist into the master
1988
1989 var update = updateMaster$1(this.master, playlist);
1990 this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
1991
1992 if (update) {
1993 this.master = update;
1994 this.media_ = this.master.playlists[id];
1995 } else {
1996 this.trigger('playlistunchanged');
1997 }
1998
1999 this.updateMediaUpdateTimeout_(refreshDelay(this.media(), !!update));
2000 this.trigger('loadedplaylist');
2001 }
2002 /**
2003 * Abort any outstanding work and clean up.
2004 */
2005 ;
2006
2007 _proto.dispose = function dispose() {
2008 this.trigger('dispose');
2009 this.stopRequest();
2010 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2011 window__default["default"].clearTimeout(this.finalRenditionTimeout);
2012 this.off();
2013 };
2014
2015 _proto.stopRequest = function stopRequest() {
2016 if (this.request) {
2017 var oldRequest = this.request;
2018 this.request = null;
2019 oldRequest.onreadystatechange = null;
2020 oldRequest.abort();
2021 }
2022 }
2023 /**
2024 * When called without any arguments, returns the currently
2025 * active media playlist. When called with a single argument,
2026 * triggers the playlist loader to asynchronously switch to the
2027 * specified media playlist. Calling this method while the
2028 * loader is in the HAVE_NOTHING causes an error to be emitted
2029 * but otherwise has no effect.
2030 *
2031 * @param {Object=} playlist the parsed media playlist
2032 * object to switch to
2033 * @param {boolean=} shouldDelay whether we should delay the request by half target duration
2034 *
2035 * @return {Playlist} the current loaded media
2036 */
2037 ;
2038
2039 _proto.media = function media(playlist, shouldDelay) {
2040 var _this4 = this;
2041
2042 // getter
2043 if (!playlist) {
2044 return this.media_;
2045 } // setter
2046
2047
2048 if (this.state === 'HAVE_NOTHING') {
2049 throw new Error('Cannot switch media playlist from ' + this.state);
2050 } // find the playlist object if the target playlist has been
2051 // specified by URI
2052
2053
2054 if (typeof playlist === 'string') {
2055 if (!this.master.playlists[playlist]) {
2056 throw new Error('Unknown playlist URI: ' + playlist);
2057 }
2058
2059 playlist = this.master.playlists[playlist];
2060 }
2061
2062 window__default["default"].clearTimeout(this.finalRenditionTimeout);
2063
2064 if (shouldDelay) {
2065 var delay = (playlist.partTargetDuration || playlist.targetDuration) / 2 * 1000 || 5 * 1000;
2066 this.finalRenditionTimeout = window__default["default"].setTimeout(this.media.bind(this, playlist, false), delay);
2067 return;
2068 }
2069
2070 var startingState = this.state;
2071 var mediaChange = !this.media_ || playlist.id !== this.media_.id;
2072 var masterPlaylistRef = this.master.playlists[playlist.id]; // switch to fully loaded playlists immediately
2073
2074 if (masterPlaylistRef && masterPlaylistRef.endList || // handle the case of a playlist object (e.g., if using vhs-json with a resolved
2075 // media playlist or, for the case of demuxed audio, a resolved audio media group)
2076 playlist.endList && playlist.segments.length) {
2077 // abort outstanding playlist requests
2078 if (this.request) {
2079 this.request.onreadystatechange = null;
2080 this.request.abort();
2081 this.request = null;
2082 }
2083
2084 this.state = 'HAVE_METADATA';
2085 this.media_ = playlist; // trigger media change if the active media has been updated
2086
2087 if (mediaChange) {
2088 this.trigger('mediachanging');
2089
2090 if (startingState === 'HAVE_MASTER') {
2091 // The initial playlist was a master manifest, and the first media selected was
2092 // also provided (in the form of a resolved playlist object) as part of the
2093 // source object (rather than just a URL). Therefore, since the media playlist
2094 // doesn't need to be requested, loadedmetadata won't trigger as part of the
2095 // normal flow, and needs an explicit trigger here.
2096 this.trigger('loadedmetadata');
2097 } else {
2098 this.trigger('mediachange');
2099 }
2100 }
2101
2102 return;
2103 } // We update/set the timeout here so that live playlists
2104 // that are not a media change will "start" the loader as expected.
2105 // We expect that this function will start the media update timeout
2106 // cycle again. This also prevents a playlist switch failure from
2107 // causing us to stall during live.
2108
2109
2110 this.updateMediaUpdateTimeout_(refreshDelay(playlist, true)); // switching to the active playlist is a no-op
2111
2112 if (!mediaChange) {
2113 return;
2114 }
2115
2116 this.state = 'SWITCHING_MEDIA'; // there is already an outstanding playlist request
2117
2118 if (this.request) {
2119 if (playlist.resolvedUri === this.request.url) {
2120 // requesting to switch to the same playlist multiple times
2121 // has no effect after the first
2122 return;
2123 }
2124
2125 this.request.onreadystatechange = null;
2126 this.request.abort();
2127 this.request = null;
2128 } // request the new playlist
2129
2130
2131 if (this.media_) {
2132 this.trigger('mediachanging');
2133 }
2134
2135 this.request = this.vhs_.xhr({
2136 uri: playlist.resolvedUri,
2137 withCredentials: this.withCredentials
2138 }, function (error, req) {
2139 // disposed
2140 if (!_this4.request) {
2141 return;
2142 }
2143
2144 playlist.lastRequest = Date.now();
2145 playlist.resolvedUri = resolveManifestRedirect(_this4.handleManifestRedirects, playlist.resolvedUri, req);
2146
2147 if (error) {
2148 return _this4.playlistRequestError(_this4.request, playlist, startingState);
2149 }
2150
2151 _this4.haveMetadata({
2152 playlistString: req.responseText,
2153 url: playlist.uri,
2154 id: playlist.id
2155 }); // fire loadedmetadata the first time a media playlist is loaded
2156
2157
2158 if (startingState === 'HAVE_MASTER') {
2159 _this4.trigger('loadedmetadata');
2160 } else {
2161 _this4.trigger('mediachange');
2162 }
2163 });
2164 }
2165 /**
2166 * pause loading of the playlist
2167 */
2168 ;
2169
2170 _proto.pause = function pause() {
2171 if (this.mediaUpdateTimeout) {
2172 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2173 this.mediaUpdateTimeout = null;
2174 }
2175
2176 this.stopRequest();
2177
2178 if (this.state === 'HAVE_NOTHING') {
2179 // If we pause the loader before any data has been retrieved, its as if we never
2180 // started, so reset to an unstarted state.
2181 this.started = false;
2182 } // Need to restore state now that no activity is happening
2183
2184
2185 if (this.state === 'SWITCHING_MEDIA') {
2186 // if the loader was in the process of switching media, it should either return to
2187 // HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
2188 // playlist yet. This is determined by the existence of loader.media_
2189 if (this.media_) {
2190 this.state = 'HAVE_METADATA';
2191 } else {
2192 this.state = 'HAVE_MASTER';
2193 }
2194 } else if (this.state === 'HAVE_CURRENT_METADATA') {
2195 this.state = 'HAVE_METADATA';
2196 }
2197 }
2198 /**
2199 * start loading of the playlist
2200 */
2201 ;
2202
2203 _proto.load = function load(shouldDelay) {
2204 var _this5 = this;
2205
2206 if (this.mediaUpdateTimeout) {
2207 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2208 this.mediaUpdateTimeout = null;
2209 }
2210
2211 var media = this.media();
2212
2213 if (shouldDelay) {
2214 var delay = media ? (media.partTargetDuration || media.targetDuration) / 2 * 1000 : 5 * 1000;
2215 this.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
2216 _this5.mediaUpdateTimeout = null;
2217
2218 _this5.load();
2219 }, delay);
2220 return;
2221 }
2222
2223 if (!this.started) {
2224 this.start();
2225 return;
2226 }
2227
2228 if (media && !media.endList) {
2229 this.trigger('mediaupdatetimeout');
2230 } else {
2231 this.trigger('loadedplaylist');
2232 }
2233 };
2234
2235 _proto.updateMediaUpdateTimeout_ = function updateMediaUpdateTimeout_(delay) {
2236 var _this6 = this;
2237
2238 if (this.mediaUpdateTimeout) {
2239 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
2240 this.mediaUpdateTimeout = null;
2241 } // we only have use mediaupdatetimeout for live playlists.
2242
2243
2244 if (!this.media() || this.media().endList) {
2245 return;
2246 }
2247
2248 this.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
2249 _this6.mediaUpdateTimeout = null;
2250
2251 _this6.trigger('mediaupdatetimeout');
2252
2253 _this6.updateMediaUpdateTimeout_(delay);
2254 }, delay);
2255 }
2256 /**
2257 * start loading of the playlist
2258 */
2259 ;
2260
2261 _proto.start = function start() {
2262 var _this7 = this;
2263
2264 this.started = true;
2265
2266 if (typeof this.src === 'object') {
2267 // in the case of an entirely constructed manifest object (meaning there's no actual
2268 // manifest on a server), default the uri to the page's href
2269 if (!this.src.uri) {
2270 this.src.uri = window__default["default"].location.href;
2271 } // resolvedUri is added on internally after the initial request. Since there's no
2272 // request for pre-resolved manifests, add on resolvedUri here.
2273
2274
2275 this.src.resolvedUri = this.src.uri; // Since a manifest object was passed in as the source (instead of a URL), the first
2276 // request can be skipped (since the top level of the manifest, at a minimum, is
2277 // already available as a parsed manifest object). However, if the manifest object
2278 // represents a master playlist, some media playlists may need to be resolved before
2279 // the starting segment list is available. Therefore, go directly to setup of the
2280 // initial playlist, and let the normal flow continue from there.
2281 //
2282 // Note that the call to setup is asynchronous, as other sections of VHS may assume
2283 // that the first request is asynchronous.
2284
2285 setTimeout(function () {
2286 _this7.setupInitialPlaylist(_this7.src);
2287 }, 0);
2288 return;
2289 } // request the specified URL
2290
2291
2292 this.request = this.vhs_.xhr({
2293 uri: this.src,
2294 withCredentials: this.withCredentials
2295 }, function (error, req) {
2296 // disposed
2297 if (!_this7.request) {
2298 return;
2299 } // clear the loader's request reference
2300
2301
2302 _this7.request = null;
2303
2304 if (error) {
2305 _this7.error = {
2306 status: req.status,
2307 message: "HLS playlist request error at URL: " + _this7.src + ".",
2308 responseText: req.responseText,
2309 // MEDIA_ERR_NETWORK
2310 code: 2
2311 };
2312
2313 if (_this7.state === 'HAVE_NOTHING') {
2314 _this7.started = false;
2315 }
2316
2317 return _this7.trigger('error');
2318 }
2319
2320 _this7.src = resolveManifestRedirect(_this7.handleManifestRedirects, _this7.src, req);
2321
2322 var manifest = _this7.parseManifest_({
2323 manifestString: req.responseText,
2324 url: _this7.src
2325 });
2326
2327 _this7.setupInitialPlaylist(manifest);
2328 });
2329 };
2330
2331 _proto.srcUri = function srcUri() {
2332 return typeof this.src === 'string' ? this.src : this.src.uri;
2333 }
2334 /**
2335 * Given a manifest object that's either a master or media playlist, trigger the proper
2336 * events and set the state of the playlist loader.
2337 *
2338 * If the manifest object represents a master playlist, `loadedplaylist` will be
2339 * triggered to allow listeners to select a playlist. If none is selected, the loader
2340 * will default to the first one in the playlists array.
2341 *
2342 * If the manifest object represents a media playlist, `loadedplaylist` will be
2343 * triggered followed by `loadedmetadata`, as the only available playlist is loaded.
2344 *
2345 * In the case of a media playlist, a master playlist object wrapper with one playlist
2346 * will be created so that all logic can handle playlists in the same fashion (as an
2347 * assumed manifest object schema).
2348 *
2349 * @param {Object} manifest
2350 * The parsed manifest object
2351 */
2352 ;
2353
2354 _proto.setupInitialPlaylist = function setupInitialPlaylist(manifest) {
2355 this.state = 'HAVE_MASTER';
2356
2357 if (manifest.playlists) {
2358 this.master = manifest;
2359 addPropertiesToMaster(this.master, this.srcUri()); // If the initial master playlist has playlists wtih segments already resolved,
2360 // then resolve URIs in advance, as they are usually done after a playlist request,
2361 // which may not happen if the playlist is resolved.
2362
2363 manifest.playlists.forEach(function (playlist) {
2364 playlist.segments = getAllSegments(playlist);
2365 playlist.segments.forEach(function (segment) {
2366 resolveSegmentUris(segment, playlist.resolvedUri);
2367 });
2368 });
2369 this.trigger('loadedplaylist');
2370
2371 if (!this.request) {
2372 // no media playlist was specifically selected so start
2373 // from the first listed one
2374 this.media(this.master.playlists[0]);
2375 }
2376
2377 return;
2378 } // In order to support media playlists passed in as vhs-json, the case where the uri
2379 // is not provided as part of the manifest should be considered, and an appropriate
2380 // default used.
2381
2382
2383 var uri = this.srcUri() || window__default["default"].location.href;
2384 this.master = masterForMedia(manifest, uri);
2385 this.haveMetadata({
2386 playlistObject: manifest,
2387 url: uri,
2388 id: this.master.playlists[0].id
2389 });
2390 this.trigger('loadedmetadata');
2391 };
2392
2393 return PlaylistLoader;
2394}(EventTarget$1);
2395
2396/**
2397 * @file xhr.js
2398 */
2399var videojsXHR = videojs__default["default"].xhr,
2400 mergeOptions$1 = videojs__default["default"].mergeOptions;
2401
2402var callbackWrapper = function callbackWrapper(request, error, response, callback) {
2403 var reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
2404
2405 if (!error && reqResponse) {
2406 request.responseTime = Date.now();
2407 request.roundTripTime = request.responseTime - request.requestTime;
2408 request.bytesReceived = reqResponse.byteLength || reqResponse.length;
2409
2410 if (!request.bandwidth) {
2411 request.bandwidth = Math.floor(request.bytesReceived / request.roundTripTime * 8 * 1000);
2412 }
2413 }
2414
2415 if (response.headers) {
2416 request.responseHeaders = response.headers;
2417 } // videojs.xhr now uses a specific code on the error
2418 // object to signal that a request has timed out instead
2419 // of setting a boolean on the request object
2420
2421
2422 if (error && error.code === 'ETIMEDOUT') {
2423 request.timedout = true;
2424 } // videojs.xhr no longer considers status codes outside of 200 and 0
2425 // (for file uris) to be errors, but the old XHR did, so emulate that
2426 // behavior. Status 206 may be used in response to byterange requests.
2427
2428
2429 if (!error && !request.aborted && response.statusCode !== 200 && response.statusCode !== 206 && response.statusCode !== 0) {
2430 error = new Error('XHR Failed with a response of: ' + (request && (reqResponse || request.responseText)));
2431 }
2432
2433 callback(error, request);
2434};
2435
2436var xhrFactory = function xhrFactory() {
2437 var xhr = function XhrFunction(options, callback) {
2438 // Add a default timeout
2439 options = mergeOptions$1({
2440 timeout: 45e3
2441 }, options); // Allow an optional user-specified function to modify the option
2442 // object before we construct the xhr request
2443
2444 var beforeRequest = XhrFunction.beforeRequest || videojs__default["default"].Vhs.xhr.beforeRequest;
2445
2446 if (beforeRequest && typeof beforeRequest === 'function') {
2447 var newOptions = beforeRequest(options);
2448
2449 if (newOptions) {
2450 options = newOptions;
2451 }
2452 } // Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
2453 // TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
2454
2455
2456 var xhrMethod = videojs__default["default"].Vhs.xhr.original === true ? videojsXHR : videojs__default["default"].Vhs.xhr;
2457 var request = xhrMethod(options, function (error, response) {
2458 return callbackWrapper(request, error, response, callback);
2459 });
2460 var originalAbort = request.abort;
2461
2462 request.abort = function () {
2463 request.aborted = true;
2464 return originalAbort.apply(request, arguments);
2465 };
2466
2467 request.uri = options.uri;
2468 request.requestTime = Date.now();
2469 return request;
2470 };
2471
2472 xhr.original = true;
2473 return xhr;
2474};
2475/**
2476 * Turns segment byterange into a string suitable for use in
2477 * HTTP Range requests
2478 *
2479 * @param {Object} byterange - an object with two values defining the start and end
2480 * of a byte-range
2481 */
2482
2483
2484var byterangeStr = function byterangeStr(byterange) {
2485 // `byterangeEnd` is one less than `offset + length` because the HTTP range
2486 // header uses inclusive ranges
2487 var byterangeEnd = byterange.offset + byterange.length - 1;
2488 var byterangeStart = byterange.offset;
2489 return 'bytes=' + byterangeStart + '-' + byterangeEnd;
2490};
2491/**
2492 * Defines headers for use in the xhr request for a particular segment.
2493 *
2494 * @param {Object} segment - a simplified copy of the segmentInfo object
2495 * from SegmentLoader
2496 */
2497
2498
2499var segmentXhrHeaders = function segmentXhrHeaders(segment) {
2500 var headers = {};
2501
2502 if (segment.byterange) {
2503 headers.Range = byterangeStr(segment.byterange);
2504 }
2505
2506 return headers;
2507};
2508
2509/**
2510 * @file bin-utils.js
2511 */
2512
2513/**
2514 * convert a TimeRange to text
2515 *
2516 * @param {TimeRange} range the timerange to use for conversion
2517 * @param {number} i the iterator on the range to convert
2518 * @return {string} the range in string format
2519 */
2520var textRange = function textRange(range, i) {
2521 return range.start(i) + '-' + range.end(i);
2522};
2523/**
2524 * format a number as hex string
2525 *
2526 * @param {number} e The number
2527 * @param {number} i the iterator
2528 * @return {string} the hex formatted number as a string
2529 */
2530
2531
2532var formatHexString = function formatHexString(e, i) {
2533 var value = e.toString(16);
2534 return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
2535};
2536
2537var formatAsciiString = function formatAsciiString(e) {
2538 if (e >= 0x20 && e < 0x7e) {
2539 return String.fromCharCode(e);
2540 }
2541
2542 return '.';
2543};
2544/**
2545 * Creates an object for sending to a web worker modifying properties that are TypedArrays
2546 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
2547 *
2548 * @param {Object} message
2549 * Object of properties and values to send to the web worker
2550 * @return {Object}
2551 * Modified message with TypedArray values expanded
2552 * @function createTransferableMessage
2553 */
2554
2555
2556var createTransferableMessage = function createTransferableMessage(message) {
2557 var transferable = {};
2558 Object.keys(message).forEach(function (key) {
2559 var value = message[key];
2560
2561 if (ArrayBuffer.isView(value)) {
2562 transferable[key] = {
2563 bytes: value.buffer,
2564 byteOffset: value.byteOffset,
2565 byteLength: value.byteLength
2566 };
2567 } else {
2568 transferable[key] = value;
2569 }
2570 });
2571 return transferable;
2572};
2573/**
2574 * Returns a unique string identifier for a media initialization
2575 * segment.
2576 *
2577 * @param {Object} initSegment
2578 * the init segment object.
2579 *
2580 * @return {string} the generated init segment id
2581 */
2582
2583var initSegmentId = function initSegmentId(initSegment) {
2584 var byterange = initSegment.byterange || {
2585 length: Infinity,
2586 offset: 0
2587 };
2588 return [byterange.length, byterange.offset, initSegment.resolvedUri].join(',');
2589};
2590/**
2591 * Returns a unique string identifier for a media segment key.
2592 *
2593 * @param {Object} key the encryption key
2594 * @return {string} the unique id for the media segment key.
2595 */
2596
2597var segmentKeyId = function segmentKeyId(key) {
2598 return key.resolvedUri;
2599};
2600/**
2601 * utils to help dump binary data to the console
2602 *
2603 * @param {Array|TypedArray} data
2604 * data to dump to a string
2605 *
2606 * @return {string} the data as a hex string.
2607 */
2608
2609var hexDump = function hexDump(data) {
2610 var bytes = Array.prototype.slice.call(data);
2611 var step = 16;
2612 var result = '';
2613 var hex;
2614 var ascii;
2615
2616 for (var j = 0; j < bytes.length / step; j++) {
2617 hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
2618 ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
2619 result += hex + ' ' + ascii + '\n';
2620 }
2621
2622 return result;
2623};
2624var tagDump = function tagDump(_ref) {
2625 var bytes = _ref.bytes;
2626 return hexDump(bytes);
2627};
2628var textRanges = function textRanges(ranges) {
2629 var result = '';
2630 var i;
2631
2632 for (i = 0; i < ranges.length; i++) {
2633 result += textRange(ranges, i) + ' ';
2634 }
2635
2636 return result;
2637};
2638
2639var utils = /*#__PURE__*/Object.freeze({
2640 __proto__: null,
2641 createTransferableMessage: createTransferableMessage,
2642 initSegmentId: initSegmentId,
2643 segmentKeyId: segmentKeyId,
2644 hexDump: hexDump,
2645 tagDump: tagDump,
2646 textRanges: textRanges
2647});
2648
2649// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
2650// 25% was arbitrarily chosen, and may need to be refined over time.
2651
2652var SEGMENT_END_FUDGE_PERCENT = 0.25;
2653/**
2654 * Converts a player time (any time that can be gotten/set from player.currentTime(),
2655 * e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
2656 * program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
2657 *
2658 * The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
2659 * point" (a point where we have a mapping from program time to player time, with player
2660 * time being the post transmux start of the segment).
2661 *
2662 * For more details, see [this doc](../../docs/program-time-from-player-time.md).
2663 *
2664 * @param {number} playerTime the player time
2665 * @param {Object} segment the segment which contains the player time
2666 * @return {Date} program time
2667 */
2668
2669var playerTimeToProgramTime = function playerTimeToProgramTime(playerTime, segment) {
2670 if (!segment.dateTimeObject) {
2671 // Can't convert without an "anchor point" for the program time (i.e., a time that can
2672 // be used to map the start of a segment with a real world time).
2673 return null;
2674 }
2675
2676 var transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
2677 var transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart; // get the start of the content from before old content is prepended
2678
2679 var startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
2680 var offsetFromSegmentStart = playerTime - startOfSegment;
2681 return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
2682};
2683var originalSegmentVideoDuration = function originalSegmentVideoDuration(videoTimingInfo) {
2684 return videoTimingInfo.transmuxedPresentationEnd - videoTimingInfo.transmuxedPresentationStart - videoTimingInfo.transmuxerPrependedSeconds;
2685};
2686/**
2687 * Finds a segment that contains the time requested given as an ISO-8601 string. The
2688 * returned segment might be an estimate or an accurate match.
2689 *
2690 * @param {string} programTime The ISO-8601 programTime to find a match for
2691 * @param {Object} playlist A playlist object to search within
2692 */
2693
2694var findSegmentForProgramTime = function findSegmentForProgramTime(programTime, playlist) {
2695 // Assumptions:
2696 // - verifyProgramDateTimeTags has already been run
2697 // - live streams have been started
2698 var dateTimeObject;
2699
2700 try {
2701 dateTimeObject = new Date(programTime);
2702 } catch (e) {
2703 return null;
2704 }
2705
2706 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2707 return null;
2708 }
2709
2710 var segment = playlist.segments[0];
2711
2712 if (dateTimeObject < segment.dateTimeObject) {
2713 // Requested time is before stream start.
2714 return null;
2715 }
2716
2717 for (var i = 0; i < playlist.segments.length - 1; i++) {
2718 segment = playlist.segments[i];
2719 var nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
2720
2721 if (dateTimeObject < nextSegmentStart) {
2722 break;
2723 }
2724 }
2725
2726 var lastSegment = playlist.segments[playlist.segments.length - 1];
2727 var lastSegmentStart = lastSegment.dateTimeObject;
2728 var lastSegmentDuration = lastSegment.videoTimingInfo ? originalSegmentVideoDuration(lastSegment.videoTimingInfo) : lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
2729 var lastSegmentEnd = new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
2730
2731 if (dateTimeObject > lastSegmentEnd) {
2732 // Beyond the end of the stream, or our best guess of the end of the stream.
2733 return null;
2734 }
2735
2736 if (dateTimeObject > lastSegmentStart) {
2737 segment = lastSegment;
2738 }
2739
2740 return {
2741 segment: segment,
2742 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : Playlist.duration(playlist, playlist.mediaSequence + playlist.segments.indexOf(segment)),
2743 // Although, given that all segments have accurate date time objects, the segment
2744 // selected should be accurate, unless the video has been transmuxed at some point
2745 // (determined by the presence of the videoTimingInfo object), the segment's "player
2746 // time" (the start time in the player) can't be considered accurate.
2747 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2748 };
2749};
2750/**
2751 * Finds a segment that contains the given player time(in seconds).
2752 *
2753 * @param {number} time The player time to find a match for
2754 * @param {Object} playlist A playlist object to search within
2755 */
2756
2757var findSegmentForPlayerTime = function findSegmentForPlayerTime(time, playlist) {
2758 // Assumptions:
2759 // - there will always be a segment.duration
2760 // - we can start from zero
2761 // - segments are in time order
2762 if (!playlist || !playlist.segments || playlist.segments.length === 0) {
2763 return null;
2764 }
2765
2766 var segmentEnd = 0;
2767 var segment;
2768
2769 for (var i = 0; i < playlist.segments.length; i++) {
2770 segment = playlist.segments[i]; // videoTimingInfo is set after the segment is downloaded and transmuxed, and
2771 // should contain the most accurate values we have for the segment's player times.
2772 //
2773 // Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
2774 // back to an estimate based on the manifest derived (inaccurate) segment.duration, to
2775 // calculate an end value.
2776
2777 segmentEnd = segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
2778
2779 if (time <= segmentEnd) {
2780 break;
2781 }
2782 }
2783
2784 var lastSegment = playlist.segments[playlist.segments.length - 1];
2785
2786 if (lastSegment.videoTimingInfo && lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
2787 // The time requested is beyond the stream end.
2788 return null;
2789 }
2790
2791 if (time > segmentEnd) {
2792 // The time is within or beyond the last segment.
2793 //
2794 // Check to see if the time is beyond a reasonable guess of the end of the stream.
2795 if (time > segmentEnd + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT) {
2796 // Technically, because the duration value is only an estimate, the time may still
2797 // exist in the last segment, however, there isn't enough information to make even
2798 // a reasonable estimate.
2799 return null;
2800 }
2801
2802 segment = lastSegment;
2803 }
2804
2805 return {
2806 segment: segment,
2807 estimatedStart: segment.videoTimingInfo ? segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
2808 // Because videoTimingInfo is only set after transmux, it is the only way to get
2809 // accurate timing values.
2810 type: segment.videoTimingInfo ? 'accurate' : 'estimate'
2811 };
2812};
2813/**
2814 * Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
2815 * If the offset returned is positive, the programTime occurs after the
2816 * comparisonTimestamp.
2817 * If the offset is negative, the programTime occurs before the comparisonTimestamp.
2818 *
2819 * @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
2820 * @param {string} programTime The programTime as an ISO-8601 string
2821 * @return {number} offset
2822 */
2823
2824var getOffsetFromTimestamp = function getOffsetFromTimestamp(comparisonTimeStamp, programTime) {
2825 var segmentDateTime;
2826 var programDateTime;
2827
2828 try {
2829 segmentDateTime = new Date(comparisonTimeStamp);
2830 programDateTime = new Date(programTime);
2831 } catch (e) {// TODO handle error
2832 }
2833
2834 var segmentTimeEpoch = segmentDateTime.getTime();
2835 var programTimeEpoch = programDateTime.getTime();
2836 return (programTimeEpoch - segmentTimeEpoch) / 1000;
2837};
2838/**
2839 * Checks that all segments in this playlist have programDateTime tags.
2840 *
2841 * @param {Object} playlist A playlist object
2842 */
2843
2844var verifyProgramDateTimeTags = function verifyProgramDateTimeTags(playlist) {
2845 if (!playlist.segments || playlist.segments.length === 0) {
2846 return false;
2847 }
2848
2849 for (var i = 0; i < playlist.segments.length; i++) {
2850 var segment = playlist.segments[i];
2851
2852 if (!segment.dateTimeObject) {
2853 return false;
2854 }
2855 }
2856
2857 return true;
2858};
2859/**
2860 * Returns the programTime of the media given a playlist and a playerTime.
2861 * The playlist must have programDateTime tags for a programDateTime tag to be returned.
2862 * If the segments containing the time requested have not been buffered yet, an estimate
2863 * may be returned to the callback.
2864 *
2865 * @param {Object} args
2866 * @param {Object} args.playlist A playlist object to search within
2867 * @param {number} time A playerTime in seconds
2868 * @param {Function} callback(err, programTime)
2869 * @return {string} err.message A detailed error message
2870 * @return {Object} programTime
2871 * @return {number} programTime.mediaSeconds The streamTime in seconds
2872 * @return {string} programTime.programDateTime The programTime as an ISO-8601 String
2873 */
2874
2875var getProgramTime = function getProgramTime(_ref) {
2876 var playlist = _ref.playlist,
2877 _ref$time = _ref.time,
2878 time = _ref$time === void 0 ? undefined : _ref$time,
2879 callback = _ref.callback;
2880
2881 if (!callback) {
2882 throw new Error('getProgramTime: callback must be provided');
2883 }
2884
2885 if (!playlist || time === undefined) {
2886 return callback({
2887 message: 'getProgramTime: playlist and time must be provided'
2888 });
2889 }
2890
2891 var matchedSegment = findSegmentForPlayerTime(time, playlist);
2892
2893 if (!matchedSegment) {
2894 return callback({
2895 message: 'valid programTime was not found'
2896 });
2897 }
2898
2899 if (matchedSegment.type === 'estimate') {
2900 return callback({
2901 message: 'Accurate programTime could not be determined.' + ' Please seek to e.seekTime and try again',
2902 seekTime: matchedSegment.estimatedStart
2903 });
2904 }
2905
2906 var programTimeObject = {
2907 mediaSeconds: time
2908 };
2909 var programTime = playerTimeToProgramTime(time, matchedSegment.segment);
2910
2911 if (programTime) {
2912 programTimeObject.programDateTime = programTime.toISOString();
2913 }
2914
2915 return callback(null, programTimeObject);
2916};
2917/**
2918 * Seeks in the player to a time that matches the given programTime ISO-8601 string.
2919 *
2920 * @param {Object} args
2921 * @param {string} args.programTime A programTime to seek to as an ISO-8601 String
2922 * @param {Object} args.playlist A playlist to look within
2923 * @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
2924 * @param {Function} args.seekTo A method to perform a seek
2925 * @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
2926 * @param {Object} args.tech The tech to seek on
2927 * @param {Function} args.callback(err, newTime) A callback to return the new time to
2928 * @return {string} err.message A detailed error message
2929 * @return {number} newTime The exact time that was seeked to in seconds
2930 */
2931
2932var seekToProgramTime = function seekToProgramTime(_ref2) {
2933 var programTime = _ref2.programTime,
2934 playlist = _ref2.playlist,
2935 _ref2$retryCount = _ref2.retryCount,
2936 retryCount = _ref2$retryCount === void 0 ? 2 : _ref2$retryCount,
2937 seekTo = _ref2.seekTo,
2938 _ref2$pauseAfterSeek = _ref2.pauseAfterSeek,
2939 pauseAfterSeek = _ref2$pauseAfterSeek === void 0 ? true : _ref2$pauseAfterSeek,
2940 tech = _ref2.tech,
2941 callback = _ref2.callback;
2942
2943 if (!callback) {
2944 throw new Error('seekToProgramTime: callback must be provided');
2945 }
2946
2947 if (typeof programTime === 'undefined' || !playlist || !seekTo) {
2948 return callback({
2949 message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
2950 });
2951 }
2952
2953 if (!playlist.endList && !tech.hasStarted_) {
2954 return callback({
2955 message: 'player must be playing a live stream to start buffering'
2956 });
2957 }
2958
2959 if (!verifyProgramDateTimeTags(playlist)) {
2960 return callback({
2961 message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
2962 });
2963 }
2964
2965 var matchedSegment = findSegmentForProgramTime(programTime, playlist); // no match
2966
2967 if (!matchedSegment) {
2968 return callback({
2969 message: programTime + " was not found in the stream"
2970 });
2971 }
2972
2973 var segment = matchedSegment.segment;
2974 var mediaOffset = getOffsetFromTimestamp(segment.dateTimeObject, programTime);
2975
2976 if (matchedSegment.type === 'estimate') {
2977 // we've run out of retries
2978 if (retryCount === 0) {
2979 return callback({
2980 message: programTime + " is not buffered yet. Try again"
2981 });
2982 }
2983
2984 seekTo(matchedSegment.estimatedStart + mediaOffset);
2985 tech.one('seeked', function () {
2986 seekToProgramTime({
2987 programTime: programTime,
2988 playlist: playlist,
2989 retryCount: retryCount - 1,
2990 seekTo: seekTo,
2991 pauseAfterSeek: pauseAfterSeek,
2992 tech: tech,
2993 callback: callback
2994 });
2995 });
2996 return;
2997 } // Since the segment.start value is determined from the buffered end or ending time
2998 // of the prior segment, the seekToTime doesn't need to account for any transmuxer
2999 // modifications.
3000
3001
3002 var seekToTime = segment.start + mediaOffset;
3003
3004 var seekedCallback = function seekedCallback() {
3005 return callback(null, tech.currentTime());
3006 }; // listen for seeked event
3007
3008
3009 tech.one('seeked', seekedCallback); // pause before seeking as video.js will restore this state
3010
3011 if (pauseAfterSeek) {
3012 tech.pause();
3013 }
3014
3015 seekTo(seekToTime);
3016};
3017
3018// which will only happen if the request is complete.
3019
3020var callbackOnCompleted = function callbackOnCompleted(request, cb) {
3021 if (request.readyState === 4) {
3022 return cb();
3023 }
3024
3025 return;
3026};
3027
3028var containerRequest = function containerRequest(uri, xhr, cb) {
3029 var bytes = [];
3030 var id3Offset;
3031 var finished = false;
3032
3033 var endRequestAndCallback = function endRequestAndCallback(err, req, type, _bytes) {
3034 req.abort();
3035 finished = true;
3036 return cb(err, req, type, _bytes);
3037 };
3038
3039 var progressListener = function progressListener(error, request) {
3040 if (finished) {
3041 return;
3042 }
3043
3044 if (error) {
3045 return endRequestAndCallback(error, request, '', bytes);
3046 } // grap the new part of content that was just downloaded
3047
3048
3049 var newPart = request.responseText.substring(bytes && bytes.byteLength || 0, request.responseText.length); // add that onto bytes
3050
3051 bytes = byteHelpers.concatTypedArrays(bytes, byteHelpers.stringToBytes(newPart, true));
3052 id3Offset = id3Offset || id3Helpers.getId3Offset(bytes); // we need at least 10 bytes to determine a type
3053 // or we need at least two bytes after an id3Offset
3054
3055 if (bytes.length < 10 || id3Offset && bytes.length < id3Offset + 2) {
3056 return callbackOnCompleted(request, function () {
3057 return endRequestAndCallback(error, request, '', bytes);
3058 });
3059 }
3060
3061 var type = containers.detectContainerForBytes(bytes); // if this looks like a ts segment but we don't have enough data
3062 // to see the second sync byte, wait until we have enough data
3063 // before declaring it ts
3064
3065 if (type === 'ts' && bytes.length < 188) {
3066 return callbackOnCompleted(request, function () {
3067 return endRequestAndCallback(error, request, '', bytes);
3068 });
3069 } // this may be an unsynced ts segment
3070 // wait for 376 bytes before detecting no container
3071
3072
3073 if (!type && bytes.length < 376) {
3074 return callbackOnCompleted(request, function () {
3075 return endRequestAndCallback(error, request, '', bytes);
3076 });
3077 }
3078
3079 return endRequestAndCallback(null, request, type, bytes);
3080 };
3081
3082 var options = {
3083 uri: uri,
3084 beforeSend: function beforeSend(request) {
3085 // this forces the browser to pass the bytes to us unprocessed
3086 request.overrideMimeType('text/plain; charset=x-user-defined');
3087 request.addEventListener('progress', function (_ref) {
3088 _ref.total;
3089 _ref.loaded;
3090 return callbackWrapper(request, null, {
3091 statusCode: request.status
3092 }, progressListener);
3093 });
3094 }
3095 };
3096 var request = xhr(options, function (error, response) {
3097 return callbackWrapper(request, error, response, progressListener);
3098 });
3099 return request;
3100};
3101
3102var EventTarget = videojs__default["default"].EventTarget,
3103 mergeOptions = videojs__default["default"].mergeOptions;
3104
3105var dashPlaylistUnchanged = function dashPlaylistUnchanged(a, b) {
3106 if (!isPlaylistUnchanged(a, b)) {
3107 return false;
3108 } // for dash the above check will often return true in scenarios where
3109 // the playlist actually has changed because mediaSequence isn't a
3110 // dash thing, and we often set it to 1. So if the playlists have the same amount
3111 // of segments we return true.
3112 // So for dash we need to make sure that the underlying segments are different.
3113 // if sidx changed then the playlists are different.
3114
3115
3116 if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
3117 return false;
3118 } else if (!a.sidx && b.sidx || a.sidx && !b.sidx) {
3119 return false;
3120 } // one or the other does not have segments
3121 // there was a change.
3122
3123
3124 if (a.segments && !b.segments || !a.segments && b.segments) {
3125 return false;
3126 } // neither has segments nothing changed
3127
3128
3129 if (!a.segments && !b.segments) {
3130 return true;
3131 } // check segments themselves
3132
3133
3134 for (var i = 0; i < a.segments.length; i++) {
3135 var aSegment = a.segments[i];
3136 var bSegment = b.segments[i]; // if uris are different between segments there was a change
3137
3138 if (aSegment.uri !== bSegment.uri) {
3139 return false;
3140 } // neither segment has a byterange, there will be no byterange change.
3141
3142
3143 if (!aSegment.byterange && !bSegment.byterange) {
3144 continue;
3145 }
3146
3147 var aByterange = aSegment.byterange;
3148 var bByterange = bSegment.byterange; // if byterange only exists on one of the segments, there was a change.
3149
3150 if (aByterange && !bByterange || !aByterange && bByterange) {
3151 return false;
3152 } // if both segments have byterange with different offsets, there was a change.
3153
3154
3155 if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
3156 return false;
3157 }
3158 } // if everything was the same with segments, this is the same playlist.
3159
3160
3161 return true;
3162};
3163/**
3164 * Parses the master XML string and updates playlist URI references.
3165 *
3166 * @param {Object} config
3167 * Object of arguments
3168 * @param {string} config.masterXml
3169 * The mpd XML
3170 * @param {string} config.srcUrl
3171 * The mpd URL
3172 * @param {Date} config.clientOffset
3173 * A time difference between server and client
3174 * @param {Object} config.sidxMapping
3175 * SIDX mappings for moof/mdat URIs and byte ranges
3176 * @return {Object}
3177 * The parsed mpd manifest object
3178 */
3179
3180
3181var parseMasterXml = function parseMasterXml(_ref) {
3182 var masterXml = _ref.masterXml,
3183 srcUrl = _ref.srcUrl,
3184 clientOffset = _ref.clientOffset,
3185 sidxMapping = _ref.sidxMapping;
3186 var master = mpdParser.parse(masterXml, {
3187 manifestUri: srcUrl,
3188 clientOffset: clientOffset,
3189 sidxMapping: sidxMapping
3190 });
3191 addPropertiesToMaster(master, srcUrl);
3192 return master;
3193};
3194/**
3195 * Returns a new master manifest that is the result of merging an updated master manifest
3196 * into the original version.
3197 *
3198 * @param {Object} oldMaster
3199 * The old parsed mpd object
3200 * @param {Object} newMaster
3201 * The updated parsed mpd object
3202 * @return {Object}
3203 * A new object representing the original master manifest with the updated media
3204 * playlists merged in
3205 */
3206
3207var updateMaster = function updateMaster(oldMaster, newMaster, sidxMapping) {
3208 var noChanges = true;
3209 var update = mergeOptions(oldMaster, {
3210 // These are top level properties that can be updated
3211 duration: newMaster.duration,
3212 minimumUpdatePeriod: newMaster.minimumUpdatePeriod
3213 }); // First update the playlists in playlist list
3214
3215 for (var i = 0; i < newMaster.playlists.length; i++) {
3216 var playlist = newMaster.playlists[i];
3217
3218 if (playlist.sidx) {
3219 var sidxKey = mpdParser.generateSidxKey(playlist.sidx); // add sidx segments to the playlist if we have all the sidx info already
3220
3221 if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
3222 mpdParser.addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
3223 }
3224 }
3225
3226 var playlistUpdate = updateMaster$1(update, playlist, dashPlaylistUnchanged);
3227
3228 if (playlistUpdate) {
3229 update = playlistUpdate;
3230 noChanges = false;
3231 }
3232 } // Then update media group playlists
3233
3234
3235 forEachMediaGroup(newMaster, function (properties, type, group, label) {
3236 if (properties.playlists && properties.playlists.length) {
3237 var id = properties.playlists[0].id;
3238
3239 var _playlistUpdate = updateMaster$1(update, properties.playlists[0], dashPlaylistUnchanged);
3240
3241 if (_playlistUpdate) {
3242 update = _playlistUpdate; // update the playlist reference within media groups
3243
3244 update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
3245 noChanges = false;
3246 }
3247 }
3248 });
3249
3250 if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3251 noChanges = false;
3252 }
3253
3254 if (noChanges) {
3255 return null;
3256 }
3257
3258 return update;
3259}; // SIDX should be equivalent if the URI and byteranges of the SIDX match.
3260// If the SIDXs have maps, the two maps should match,
3261// both `a` and `b` missing SIDXs is considered matching.
3262// If `a` or `b` but not both have a map, they aren't matching.
3263
3264var equivalentSidx = function equivalentSidx(a, b) {
3265 var neitherMap = Boolean(!a.map && !b.map);
3266 var equivalentMap = neitherMap || Boolean(a.map && b.map && a.map.byterange.offset === b.map.byterange.offset && a.map.byterange.length === b.map.byterange.length);
3267 return equivalentMap && a.uri === b.uri && a.byterange.offset === b.byterange.offset && a.byterange.length === b.byterange.length;
3268}; // exported for testing
3269
3270
3271var compareSidxEntry = function compareSidxEntry(playlists, oldSidxMapping) {
3272 var newSidxMapping = {};
3273
3274 for (var id in playlists) {
3275 var playlist = playlists[id];
3276 var currentSidxInfo = playlist.sidx;
3277
3278 if (currentSidxInfo) {
3279 var key = mpdParser.generateSidxKey(currentSidxInfo);
3280
3281 if (!oldSidxMapping[key]) {
3282 break;
3283 }
3284
3285 var savedSidxInfo = oldSidxMapping[key].sidxInfo;
3286
3287 if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
3288 newSidxMapping[key] = oldSidxMapping[key];
3289 }
3290 }
3291 }
3292
3293 return newSidxMapping;
3294};
3295/**
3296 * A function that filters out changed items as they need to be requested separately.
3297 *
3298 * The method is exported for testing
3299 *
3300 * @param {Object} master the parsed mpd XML returned via mpd-parser
3301 * @param {Object} oldSidxMapping the SIDX to compare against
3302 */
3303
3304var filterChangedSidxMappings = function filterChangedSidxMappings(master, oldSidxMapping) {
3305 var videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
3306 var mediaGroupSidx = videoSidx;
3307 forEachMediaGroup(master, function (properties, mediaType, groupKey, labelKey) {
3308 if (properties.playlists && properties.playlists.length) {
3309 var playlists = properties.playlists;
3310 mediaGroupSidx = mergeOptions(mediaGroupSidx, compareSidxEntry(playlists, oldSidxMapping));
3311 }
3312 });
3313 return mediaGroupSidx;
3314};
3315
3316var DashPlaylistLoader = /*#__PURE__*/function (_EventTarget) {
3317 _inheritsLoose__default["default"](DashPlaylistLoader, _EventTarget);
3318
3319 // DashPlaylistLoader must accept either a src url or a playlist because subsequent
3320 // playlist loader setups from media groups will expect to be able to pass a playlist
3321 // (since there aren't external URLs to media playlists with DASH)
3322 function DashPlaylistLoader(srcUrlOrPlaylist, vhs, options, masterPlaylistLoader) {
3323 var _this;
3324
3325 if (options === void 0) {
3326 options = {};
3327 }
3328
3329 _this = _EventTarget.call(this) || this;
3330 _this.masterPlaylistLoader_ = masterPlaylistLoader || _assertThisInitialized__default["default"](_this);
3331
3332 if (!masterPlaylistLoader) {
3333 _this.isMaster_ = true;
3334 }
3335
3336 var _options = options,
3337 _options$withCredenti = _options.withCredentials,
3338 withCredentials = _options$withCredenti === void 0 ? false : _options$withCredenti,
3339 _options$handleManife = _options.handleManifestRedirects,
3340 handleManifestRedirects = _options$handleManife === void 0 ? false : _options$handleManife;
3341 _this.vhs_ = vhs;
3342 _this.withCredentials = withCredentials;
3343 _this.handleManifestRedirects = handleManifestRedirects;
3344
3345 if (!srcUrlOrPlaylist) {
3346 throw new Error('A non-empty playlist URL or object is required');
3347 } // event naming?
3348
3349
3350 _this.on('minimumUpdatePeriod', function () {
3351 _this.refreshXml_();
3352 }); // live playlist staleness timeout
3353
3354
3355 _this.on('mediaupdatetimeout', function () {
3356 _this.refreshMedia_(_this.media().id);
3357 });
3358
3359 _this.state = 'HAVE_NOTHING';
3360 _this.loadedPlaylists_ = {};
3361 _this.logger_ = logger('DashPlaylistLoader'); // initialize the loader state
3362 // The masterPlaylistLoader will be created with a string
3363
3364 if (_this.isMaster_) {
3365 _this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist; // TODO: reset sidxMapping between period changes
3366 // once multi-period is refactored
3367
3368 _this.masterPlaylistLoader_.sidxMapping_ = {};
3369 } else {
3370 _this.childPlaylist_ = srcUrlOrPlaylist;
3371 }
3372
3373 return _this;
3374 }
3375
3376 var _proto = DashPlaylistLoader.prototype;
3377
3378 _proto.requestErrored_ = function requestErrored_(err, request, startingState) {
3379 // disposed
3380 if (!this.request) {
3381 return true;
3382 } // pending request is cleared
3383
3384
3385 this.request = null;
3386
3387 if (err) {
3388 // use the provided error object or create one
3389 // based on the request/response
3390 this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
3391 status: request.status,
3392 message: 'DASH request error at URL: ' + request.uri,
3393 response: request.response,
3394 // MEDIA_ERR_NETWORK
3395 code: 2
3396 };
3397
3398 if (startingState) {
3399 this.state = startingState;
3400 }
3401
3402 this.trigger('error');
3403 return true;
3404 }
3405 }
3406 /**
3407 * Verify that the container of the sidx segment can be parsed
3408 * and if it can, get and parse that segment.
3409 */
3410 ;
3411
3412 _proto.addSidxSegments_ = function addSidxSegments_(playlist, startingState, cb) {
3413 var _this2 = this;
3414
3415 var sidxKey = playlist.sidx && mpdParser.generateSidxKey(playlist.sidx); // playlist lacks sidx or sidx segments were added to this playlist already.
3416
3417 if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
3418 // keep this function async
3419 this.mediaRequest_ = window__default["default"].setTimeout(function () {
3420 return cb(false);
3421 }, 0);
3422 return;
3423 } // resolve the segment URL relative to the playlist
3424
3425
3426 var uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
3427
3428 var fin = function fin(err, request) {
3429 if (_this2.requestErrored_(err, request, startingState)) {
3430 return;
3431 }
3432
3433 var sidxMapping = _this2.masterPlaylistLoader_.sidxMapping_;
3434 var sidx;
3435
3436 try {
3437 sidx = parseSidx__default["default"](byteHelpers.toUint8(request.response).subarray(8));
3438 } catch (e) {
3439 // sidx parsing failed.
3440 _this2.requestErrored_(e, request, startingState);
3441
3442 return;
3443 }
3444
3445 sidxMapping[sidxKey] = {
3446 sidxInfo: playlist.sidx,
3447 sidx: sidx
3448 };
3449 mpdParser.addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
3450 return cb(true);
3451 };
3452
3453 this.request = containerRequest(uri, this.vhs_.xhr, function (err, request, container, bytes) {
3454 if (err) {
3455 return fin(err, request);
3456 }
3457
3458 if (!container || container !== 'mp4') {
3459 return fin({
3460 status: request.status,
3461 message: "Unsupported " + (container || 'unknown') + " container type for sidx segment at URL: " + uri,
3462 // response is just bytes in this case
3463 // but we really don't want to return that.
3464 response: '',
3465 playlist: playlist,
3466 internal: true,
3467 blacklistDuration: Infinity,
3468 // MEDIA_ERR_NETWORK
3469 code: 2
3470 }, request);
3471 } // if we already downloaded the sidx bytes in the container request, use them
3472
3473
3474 var _playlist$sidx$bytera = playlist.sidx.byterange,
3475 offset = _playlist$sidx$bytera.offset,
3476 length = _playlist$sidx$bytera.length;
3477
3478 if (bytes.length >= length + offset) {
3479 return fin(err, {
3480 response: bytes.subarray(offset, offset + length),
3481 status: request.status,
3482 uri: request.uri
3483 });
3484 } // otherwise request sidx bytes
3485
3486
3487 _this2.request = _this2.vhs_.xhr({
3488 uri: uri,
3489 responseType: 'arraybuffer',
3490 headers: segmentXhrHeaders({
3491 byterange: playlist.sidx.byterange
3492 })
3493 }, fin);
3494 });
3495 };
3496
3497 _proto.dispose = function dispose() {
3498 this.trigger('dispose');
3499 this.stopRequest();
3500 this.loadedPlaylists_ = {};
3501 window__default["default"].clearTimeout(this.minimumUpdatePeriodTimeout_);
3502 window__default["default"].clearTimeout(this.mediaRequest_);
3503 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
3504 this.mediaUpdateTimeout = null;
3505 this.mediaRequest_ = null;
3506 this.minimumUpdatePeriodTimeout_ = null;
3507
3508 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3509 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3510 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3511 }
3512
3513 this.off();
3514 };
3515
3516 _proto.hasPendingRequest = function hasPendingRequest() {
3517 return this.request || this.mediaRequest_;
3518 };
3519
3520 _proto.stopRequest = function stopRequest() {
3521 if (this.request) {
3522 var oldRequest = this.request;
3523 this.request = null;
3524 oldRequest.onreadystatechange = null;
3525 oldRequest.abort();
3526 }
3527 };
3528
3529 _proto.media = function media(playlist) {
3530 var _this3 = this;
3531
3532 // getter
3533 if (!playlist) {
3534 return this.media_;
3535 } // setter
3536
3537
3538 if (this.state === 'HAVE_NOTHING') {
3539 throw new Error('Cannot switch media playlist from ' + this.state);
3540 }
3541
3542 var startingState = this.state; // find the playlist object if the target playlist has been specified by URI
3543
3544 if (typeof playlist === 'string') {
3545 if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
3546 throw new Error('Unknown playlist URI: ' + playlist);
3547 }
3548
3549 playlist = this.masterPlaylistLoader_.master.playlists[playlist];
3550 }
3551
3552 var mediaChange = !this.media_ || playlist.id !== this.media_.id; // switch to previously loaded playlists immediately
3553
3554 if (mediaChange && this.loadedPlaylists_[playlist.id] && this.loadedPlaylists_[playlist.id].endList) {
3555 this.state = 'HAVE_METADATA';
3556 this.media_ = playlist; // trigger media change if the active media has been updated
3557
3558 if (mediaChange) {
3559 this.trigger('mediachanging');
3560 this.trigger('mediachange');
3561 }
3562
3563 return;
3564 } // switching to the active playlist is a no-op
3565
3566
3567 if (!mediaChange) {
3568 return;
3569 } // switching from an already loaded playlist
3570
3571
3572 if (this.media_) {
3573 this.trigger('mediachanging');
3574 }
3575
3576 this.addSidxSegments_(playlist, startingState, function (sidxChanged) {
3577 // everything is ready just continue to haveMetadata
3578 _this3.haveMetadata({
3579 startingState: startingState,
3580 playlist: playlist
3581 });
3582 });
3583 };
3584
3585 _proto.haveMetadata = function haveMetadata(_ref2) {
3586 var startingState = _ref2.startingState,
3587 playlist = _ref2.playlist;
3588 this.state = 'HAVE_METADATA';
3589 this.loadedPlaylists_[playlist.id] = playlist;
3590 this.mediaRequest_ = null; // This will trigger loadedplaylist
3591
3592 this.refreshMedia_(playlist.id); // fire loadedmetadata the first time a media playlist is loaded
3593 // to resolve setup of media groups
3594
3595 if (startingState === 'HAVE_MASTER') {
3596 this.trigger('loadedmetadata');
3597 } else {
3598 // trigger media change if the active media has been updated
3599 this.trigger('mediachange');
3600 }
3601 };
3602
3603 _proto.pause = function pause() {
3604 if (this.masterPlaylistLoader_.createMupOnMedia_) {
3605 this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
3606 this.masterPlaylistLoader_.createMupOnMedia_ = null;
3607 }
3608
3609 this.stopRequest();
3610 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
3611 this.mediaUpdateTimeout = null;
3612
3613 if (this.isMaster_) {
3614 window__default["default"].clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
3615 this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
3616 }
3617
3618 if (this.state === 'HAVE_NOTHING') {
3619 // If we pause the loader before any data has been retrieved, its as if we never
3620 // started, so reset to an unstarted state.
3621 this.started = false;
3622 }
3623 };
3624
3625 _proto.load = function load(isFinalRendition) {
3626 var _this4 = this;
3627
3628 window__default["default"].clearTimeout(this.mediaUpdateTimeout);
3629 this.mediaUpdateTimeout = null;
3630 var media = this.media();
3631
3632 if (isFinalRendition) {
3633 var delay = media ? media.targetDuration / 2 * 1000 : 5 * 1000;
3634 this.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
3635 return _this4.load();
3636 }, delay);
3637 return;
3638 } // because the playlists are internal to the manifest, load should either load the
3639 // main manifest, or do nothing but trigger an event
3640
3641
3642 if (!this.started) {
3643 this.start();
3644 return;
3645 }
3646
3647 if (media && !media.endList) {
3648 // Check to see if this is the master loader and the MUP was cleared (this happens
3649 // when the loader was paused). `media` should be set at this point since one is always
3650 // set during `start()`.
3651 if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
3652 // Trigger minimumUpdatePeriod to refresh the master manifest
3653 this.trigger('minimumUpdatePeriod'); // Since there was no prior minimumUpdatePeriodTimeout it should be recreated
3654
3655 this.updateMinimumUpdatePeriodTimeout_();
3656 }
3657
3658 this.trigger('mediaupdatetimeout');
3659 } else {
3660 this.trigger('loadedplaylist');
3661 }
3662 };
3663
3664 _proto.start = function start() {
3665 var _this5 = this;
3666
3667 this.started = true; // We don't need to request the master manifest again
3668 // Call this asynchronously to match the xhr request behavior below
3669
3670 if (!this.isMaster_) {
3671 this.mediaRequest_ = window__default["default"].setTimeout(function () {
3672 return _this5.haveMaster_();
3673 }, 0);
3674 return;
3675 }
3676
3677 this.requestMaster_(function (req, masterChanged) {
3678 _this5.haveMaster_();
3679
3680 if (!_this5.hasPendingRequest() && !_this5.media_) {
3681 _this5.media(_this5.masterPlaylistLoader_.master.playlists[0]);
3682 }
3683 });
3684 };
3685
3686 _proto.requestMaster_ = function requestMaster_(cb) {
3687 var _this6 = this;
3688
3689 this.request = this.vhs_.xhr({
3690 uri: this.masterPlaylistLoader_.srcUrl,
3691 withCredentials: this.withCredentials
3692 }, function (error, req) {
3693 if (_this6.requestErrored_(error, req)) {
3694 if (_this6.state === 'HAVE_NOTHING') {
3695 _this6.started = false;
3696 }
3697
3698 return;
3699 }
3700
3701 var masterChanged = req.responseText !== _this6.masterPlaylistLoader_.masterXml_;
3702 _this6.masterPlaylistLoader_.masterXml_ = req.responseText;
3703
3704 if (req.responseHeaders && req.responseHeaders.date) {
3705 _this6.masterLoaded_ = Date.parse(req.responseHeaders.date);
3706 } else {
3707 _this6.masterLoaded_ = Date.now();
3708 }
3709
3710 _this6.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(_this6.handleManifestRedirects, _this6.masterPlaylistLoader_.srcUrl, req);
3711
3712 if (masterChanged) {
3713 _this6.handleMaster_();
3714
3715 _this6.syncClientServerClock_(function () {
3716 return cb(req, masterChanged);
3717 });
3718
3719 return;
3720 }
3721
3722 return cb(req, masterChanged);
3723 });
3724 }
3725 /**
3726 * Parses the master xml for UTCTiming node to sync the client clock to the server
3727 * clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
3728 *
3729 * @param {Function} done
3730 * Function to call when clock sync has completed
3731 */
3732 ;
3733
3734 _proto.syncClientServerClock_ = function syncClientServerClock_(done) {
3735 var _this7 = this;
3736
3737 var utcTiming = mpdParser.parseUTCTiming(this.masterPlaylistLoader_.masterXml_); // No UTCTiming element found in the mpd. Use Date header from mpd request as the
3738 // server clock
3739
3740 if (utcTiming === null) {
3741 this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
3742 return done();
3743 }
3744
3745 if (utcTiming.method === 'DIRECT') {
3746 this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
3747 return done();
3748 }
3749
3750 this.request = this.vhs_.xhr({
3751 uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
3752 method: utcTiming.method,
3753 withCredentials: this.withCredentials
3754 }, function (error, req) {
3755 // disposed
3756 if (!_this7.request) {
3757 return;
3758 }
3759
3760 if (error) {
3761 // sync request failed, fall back to using date header from mpd
3762 // TODO: log warning
3763 _this7.masterPlaylistLoader_.clientOffset_ = _this7.masterLoaded_ - Date.now();
3764 return done();
3765 }
3766
3767 var serverTime;
3768
3769 if (utcTiming.method === 'HEAD') {
3770 if (!req.responseHeaders || !req.responseHeaders.date) {
3771 // expected date header not preset, fall back to using date header from mpd
3772 // TODO: log warning
3773 serverTime = _this7.masterLoaded_;
3774 } else {
3775 serverTime = Date.parse(req.responseHeaders.date);
3776 }
3777 } else {
3778 serverTime = Date.parse(req.responseText);
3779 }
3780
3781 _this7.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
3782 done();
3783 });
3784 };
3785
3786 _proto.haveMaster_ = function haveMaster_() {
3787 this.state = 'HAVE_MASTER';
3788
3789 if (this.isMaster_) {
3790 // We have the master playlist at this point, so
3791 // trigger this to allow MasterPlaylistController
3792 // to make an initial playlist selection
3793 this.trigger('loadedplaylist');
3794 } else if (!this.media_) {
3795 // no media playlist was specifically selected so select
3796 // the one the child playlist loader was created with
3797 this.media(this.childPlaylist_);
3798 }
3799 };
3800
3801 _proto.handleMaster_ = function handleMaster_() {
3802 // clear media request
3803 this.mediaRequest_ = null;
3804 var newMaster = parseMasterXml({
3805 masterXml: this.masterPlaylistLoader_.masterXml_,
3806 srcUrl: this.masterPlaylistLoader_.srcUrl,
3807 clientOffset: this.masterPlaylistLoader_.clientOffset_,
3808 sidxMapping: this.masterPlaylistLoader_.sidxMapping_
3809 });
3810 var oldMaster = this.masterPlaylistLoader_.master; // if we have an old master to compare the new master against
3811
3812 if (oldMaster) {
3813 newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
3814 } // only update master if we have a new master
3815
3816
3817 this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
3818 var location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
3819
3820 if (location && location !== this.masterPlaylistLoader_.srcUrl) {
3821 this.masterPlaylistLoader_.srcUrl = location;
3822 }
3823
3824 if (!oldMaster || newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
3825 this.updateMinimumUpdatePeriodTimeout_();
3826 }
3827
3828 return Boolean(newMaster);
3829 };
3830
3831 _proto.updateMinimumUpdatePeriodTimeout_ = function updateMinimumUpdatePeriodTimeout_() {
3832 var mpl = this.masterPlaylistLoader_; // cancel any pending creation of mup on media
3833 // a new one will be added if needed.
3834
3835 if (mpl.createMupOnMedia_) {
3836 mpl.off('loadedmetadata', mpl.createMupOnMedia_);
3837 mpl.createMupOnMedia_ = null;
3838 } // clear any pending timeouts
3839
3840
3841 if (mpl.minimumUpdatePeriodTimeout_) {
3842 window__default["default"].clearTimeout(mpl.minimumUpdatePeriodTimeout_);
3843 mpl.minimumUpdatePeriodTimeout_ = null;
3844 }
3845
3846 var mup = mpl.master && mpl.master.minimumUpdatePeriod; // If the minimumUpdatePeriod has a value of 0, that indicates that the current
3847 // MPD has no future validity, so a new one will need to be acquired when new
3848 // media segments are to be made available. Thus, we use the target duration
3849 // in this case
3850
3851 if (mup === 0) {
3852 if (mpl.media()) {
3853 mup = mpl.media().targetDuration * 1000;
3854 } else {
3855 mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
3856 mpl.one('loadedmetadata', mpl.createMupOnMedia_);
3857 }
3858 } // if minimumUpdatePeriod is invalid or <= zero, which
3859 // can happen when a live video becomes VOD. skip timeout
3860 // creation.
3861
3862
3863 if (typeof mup !== 'number' || mup <= 0) {
3864 if (mup < 0) {
3865 this.logger_("found invalid minimumUpdatePeriod of " + mup + ", not setting a timeout");
3866 }
3867
3868 return;
3869 }
3870
3871 this.createMUPTimeout_(mup);
3872 };
3873
3874 _proto.createMUPTimeout_ = function createMUPTimeout_(mup) {
3875 var mpl = this.masterPlaylistLoader_;
3876 mpl.minimumUpdatePeriodTimeout_ = window__default["default"].setTimeout(function () {
3877 mpl.minimumUpdatePeriodTimeout_ = null;
3878 mpl.trigger('minimumUpdatePeriod');
3879 mpl.createMUPTimeout_(mup);
3880 }, mup);
3881 }
3882 /**
3883 * Sends request to refresh the master xml and updates the parsed master manifest
3884 */
3885 ;
3886
3887 _proto.refreshXml_ = function refreshXml_() {
3888 var _this8 = this;
3889
3890 this.requestMaster_(function (req, masterChanged) {
3891 if (!masterChanged) {
3892 return;
3893 }
3894
3895 if (_this8.media_) {
3896 _this8.media_ = _this8.masterPlaylistLoader_.master.playlists[_this8.media_.id];
3897 } // This will filter out updated sidx info from the mapping
3898
3899
3900 _this8.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(_this8.masterPlaylistLoader_.master, _this8.masterPlaylistLoader_.sidxMapping_);
3901
3902 _this8.addSidxSegments_(_this8.media(), _this8.state, function (sidxChanged) {
3903 // TODO: do we need to reload the current playlist?
3904 _this8.refreshMedia_(_this8.media().id);
3905 });
3906 });
3907 }
3908 /**
3909 * Refreshes the media playlist by re-parsing the master xml and updating playlist
3910 * references. If this is an alternate loader, the updated parsed manifest is retrieved
3911 * from the master loader.
3912 */
3913 ;
3914
3915 _proto.refreshMedia_ = function refreshMedia_(mediaID) {
3916 var _this9 = this;
3917
3918 if (!mediaID) {
3919 throw new Error('refreshMedia_ must take a media id');
3920 } // for master we have to reparse the master xml
3921 // to re-create segments based on current timing values
3922 // which may change media. We only skip updating master
3923 // if this is the first time this.media_ is being set.
3924 // as master was just parsed in that case.
3925
3926
3927 if (this.media_ && this.isMaster_) {
3928 this.handleMaster_();
3929 }
3930
3931 var playlists = this.masterPlaylistLoader_.master.playlists;
3932 var mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
3933
3934 if (mediaChanged) {
3935 this.media_ = playlists[mediaID];
3936 } else {
3937 this.trigger('playlistunchanged');
3938 }
3939
3940 if (!this.mediaUpdateTimeout) {
3941 var createMediaUpdateTimeout = function createMediaUpdateTimeout() {
3942 if (_this9.media().endList) {
3943 return;
3944 }
3945
3946 _this9.mediaUpdateTimeout = window__default["default"].setTimeout(function () {
3947 _this9.trigger('mediaupdatetimeout');
3948
3949 createMediaUpdateTimeout();
3950 }, refreshDelay(_this9.media(), Boolean(mediaChanged)));
3951 };
3952
3953 createMediaUpdateTimeout();
3954 }
3955
3956 this.trigger('loadedplaylist');
3957 };
3958
3959 return DashPlaylistLoader;
3960}(EventTarget);
3961
3962var Config = {
3963 GOAL_BUFFER_LENGTH: 30,
3964 MAX_GOAL_BUFFER_LENGTH: 60,
3965 BACK_BUFFER_LENGTH: 30,
3966 GOAL_BUFFER_LENGTH_RATE: 1,
3967 // 0.5 MB/s
3968 INITIAL_BANDWIDTH: 4194304,
3969 // A fudge factor to apply to advertised playlist bitrates to account for
3970 // temporary flucations in client bandwidth
3971 BANDWIDTH_VARIANCE: 1.2,
3972 // How much of the buffer must be filled before we consider upswitching
3973 BUFFER_LOW_WATER_LINE: 0,
3974 MAX_BUFFER_LOW_WATER_LINE: 30,
3975 // TODO: Remove this when experimentalBufferBasedABR is removed
3976 EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
3977 BUFFER_LOW_WATER_LINE_RATE: 1,
3978 // If the buffer is greater than the high water line, we won't switch down
3979 BUFFER_HIGH_WATER_LINE: 30
3980};
3981
3982var stringToArrayBuffer = function stringToArrayBuffer(string) {
3983 var view = new Uint8Array(new ArrayBuffer(string.length));
3984
3985 for (var i = 0; i < string.length; i++) {
3986 view[i] = string.charCodeAt(i);
3987 }
3988
3989 return view.buffer;
3990};
3991
3992/* global Blob, BlobBuilder, Worker */
3993// unify worker interface
3994var browserWorkerPolyFill = function browserWorkerPolyFill(workerObj) {
3995 // node only supports on/off
3996 workerObj.on = workerObj.addEventListener;
3997 workerObj.off = workerObj.removeEventListener;
3998 return workerObj;
3999};
4000
4001var createObjectURL = function createObjectURL(str) {
4002 try {
4003 return URL.createObjectURL(new Blob([str], {
4004 type: 'application/javascript'
4005 }));
4006 } catch (e) {
4007 var blob = new BlobBuilder();
4008 blob.append(str);
4009 return URL.createObjectURL(blob.getBlob());
4010 }
4011};
4012
4013var factory = function factory(code) {
4014 return function () {
4015 var objectUrl = createObjectURL(code);
4016 var worker = browserWorkerPolyFill(new Worker(objectUrl));
4017 worker.objURL = objectUrl;
4018 var terminate = worker.terminate;
4019 worker.on = worker.addEventListener;
4020 worker.off = worker.removeEventListener;
4021
4022 worker.terminate = function () {
4023 URL.revokeObjectURL(objectUrl);
4024 return terminate.call(this);
4025 };
4026
4027 return worker;
4028 };
4029};
4030var transform = function transform(code) {
4031 return "var browserWorkerPolyFill = " + browserWorkerPolyFill.toString() + ";\n" + 'browserWorkerPolyFill(self);\n' + code;
4032};
4033
4034var getWorkerString = function getWorkerString(fn) {
4035 return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
4036};
4037
4038/* rollup-plugin-worker-factory start for worker!/Users/bcasey/Projects/videojs-http-streaming/src/transmuxer-worker.js */
4039var workerCode$1 = transform(getWorkerString(function () {
4040 /**
4041 * mux.js
4042 *
4043 * Copyright (c) Brightcove
4044 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4045 *
4046 * A lightweight readable stream implemention that handles event dispatching.
4047 * Objects that inherit from streams should call init in their constructors.
4048 */
4049
4050 var Stream = function Stream() {
4051 this.init = function () {
4052 var listeners = {};
4053 /**
4054 * Add a listener for a specified event type.
4055 * @param type {string} the event name
4056 * @param listener {function} the callback to be invoked when an event of
4057 * the specified type occurs
4058 */
4059
4060 this.on = function (type, listener) {
4061 if (!listeners[type]) {
4062 listeners[type] = [];
4063 }
4064
4065 listeners[type] = listeners[type].concat(listener);
4066 };
4067 /**
4068 * Remove a listener for a specified event type.
4069 * @param type {string} the event name
4070 * @param listener {function} a function previously registered for this
4071 * type of event through `on`
4072 */
4073
4074
4075 this.off = function (type, listener) {
4076 var index;
4077
4078 if (!listeners[type]) {
4079 return false;
4080 }
4081
4082 index = listeners[type].indexOf(listener);
4083 listeners[type] = listeners[type].slice();
4084 listeners[type].splice(index, 1);
4085 return index > -1;
4086 };
4087 /**
4088 * Trigger an event of the specified type on this stream. Any additional
4089 * arguments to this function are passed as parameters to event listeners.
4090 * @param type {string} the event name
4091 */
4092
4093
4094 this.trigger = function (type) {
4095 var callbacks, i, length, args;
4096 callbacks = listeners[type];
4097
4098 if (!callbacks) {
4099 return;
4100 } // Slicing the arguments on every invocation of this method
4101 // can add a significant amount of overhead. Avoid the
4102 // intermediate object creation for the common case of a
4103 // single callback argument
4104
4105
4106 if (arguments.length === 2) {
4107 length = callbacks.length;
4108
4109 for (i = 0; i < length; ++i) {
4110 callbacks[i].call(this, arguments[1]);
4111 }
4112 } else {
4113 args = [];
4114 i = arguments.length;
4115
4116 for (i = 1; i < arguments.length; ++i) {
4117 args.push(arguments[i]);
4118 }
4119
4120 length = callbacks.length;
4121
4122 for (i = 0; i < length; ++i) {
4123 callbacks[i].apply(this, args);
4124 }
4125 }
4126 };
4127 /**
4128 * Destroys the stream and cleans up.
4129 */
4130
4131
4132 this.dispose = function () {
4133 listeners = {};
4134 };
4135 };
4136 };
4137 /**
4138 * Forwards all `data` events on this stream to the destination stream. The
4139 * destination stream should provide a method `push` to receive the data
4140 * events as they arrive.
4141 * @param destination {stream} the stream that will receive all `data` events
4142 * @param autoFlush {boolean} if false, we will not call `flush` on the destination
4143 * when the current stream emits a 'done' event
4144 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
4145 */
4146
4147
4148 Stream.prototype.pipe = function (destination) {
4149 this.on('data', function (data) {
4150 destination.push(data);
4151 });
4152 this.on('done', function (flushSource) {
4153 destination.flush(flushSource);
4154 });
4155 this.on('partialdone', function (flushSource) {
4156 destination.partialFlush(flushSource);
4157 });
4158 this.on('endedtimeline', function (flushSource) {
4159 destination.endTimeline(flushSource);
4160 });
4161 this.on('reset', function (flushSource) {
4162 destination.reset(flushSource);
4163 });
4164 return destination;
4165 }; // Default stream functions that are expected to be overridden to perform
4166 // actual work. These are provided by the prototype as a sort of no-op
4167 // implementation so that we don't have to check for their existence in the
4168 // `pipe` function above.
4169
4170
4171 Stream.prototype.push = function (data) {
4172 this.trigger('data', data);
4173 };
4174
4175 Stream.prototype.flush = function (flushSource) {
4176 this.trigger('done', flushSource);
4177 };
4178
4179 Stream.prototype.partialFlush = function (flushSource) {
4180 this.trigger('partialdone', flushSource);
4181 };
4182
4183 Stream.prototype.endTimeline = function (flushSource) {
4184 this.trigger('endedtimeline', flushSource);
4185 };
4186
4187 Stream.prototype.reset = function (flushSource) {
4188 this.trigger('reset', flushSource);
4189 };
4190
4191 var stream = Stream;
4192 /**
4193 * mux.js
4194 *
4195 * Copyright (c) Brightcove
4196 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4197 *
4198 * Functions that generate fragmented MP4s suitable for use with Media
4199 * Source Extensions.
4200 */
4201
4202 var UINT32_MAX = Math.pow(2, 32) - 1;
4203 var box, dinf, esds, ftyp, mdat, mfhd, minf, moof, moov, mvex, mvhd, trak, tkhd, mdia, mdhd, hdlr, sdtp, stbl, stsd, traf, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS; // pre-calculate constants
4204
4205 (function () {
4206 var i;
4207 types = {
4208 avc1: [],
4209 // codingname
4210 avcC: [],
4211 btrt: [],
4212 dinf: [],
4213 dref: [],
4214 esds: [],
4215 ftyp: [],
4216 hdlr: [],
4217 mdat: [],
4218 mdhd: [],
4219 mdia: [],
4220 mfhd: [],
4221 minf: [],
4222 moof: [],
4223 moov: [],
4224 mp4a: [],
4225 // codingname
4226 mvex: [],
4227 mvhd: [],
4228 pasp: [],
4229 sdtp: [],
4230 smhd: [],
4231 stbl: [],
4232 stco: [],
4233 stsc: [],
4234 stsd: [],
4235 stsz: [],
4236 stts: [],
4237 styp: [],
4238 tfdt: [],
4239 tfhd: [],
4240 traf: [],
4241 trak: [],
4242 trun: [],
4243 trex: [],
4244 tkhd: [],
4245 vmhd: []
4246 }; // In environments where Uint8Array is undefined (e.g., IE8), skip set up so that we
4247 // don't throw an error
4248
4249 if (typeof Uint8Array === 'undefined') {
4250 return;
4251 }
4252
4253 for (i in types) {
4254 if (types.hasOwnProperty(i)) {
4255 types[i] = [i.charCodeAt(0), i.charCodeAt(1), i.charCodeAt(2), i.charCodeAt(3)];
4256 }
4257 }
4258
4259 MAJOR_BRAND = new Uint8Array(['i'.charCodeAt(0), 's'.charCodeAt(0), 'o'.charCodeAt(0), 'm'.charCodeAt(0)]);
4260 AVC1_BRAND = new Uint8Array(['a'.charCodeAt(0), 'v'.charCodeAt(0), 'c'.charCodeAt(0), '1'.charCodeAt(0)]);
4261 MINOR_VERSION = new Uint8Array([0, 0, 0, 1]);
4262 VIDEO_HDLR = new Uint8Array([0x00, // version 0
4263 0x00, 0x00, 0x00, // flags
4264 0x00, 0x00, 0x00, 0x00, // pre_defined
4265 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
4266 0x00, 0x00, 0x00, 0x00, // reserved
4267 0x00, 0x00, 0x00, 0x00, // reserved
4268 0x00, 0x00, 0x00, 0x00, // reserved
4269 0x56, 0x69, 0x64, 0x65, 0x6f, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler'
4270 ]);
4271 AUDIO_HDLR = new Uint8Array([0x00, // version 0
4272 0x00, 0x00, 0x00, // flags
4273 0x00, 0x00, 0x00, 0x00, // pre_defined
4274 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun'
4275 0x00, 0x00, 0x00, 0x00, // reserved
4276 0x00, 0x00, 0x00, 0x00, // reserved
4277 0x00, 0x00, 0x00, 0x00, // reserved
4278 0x53, 0x6f, 0x75, 0x6e, 0x64, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler'
4279 ]);
4280 HDLR_TYPES = {
4281 video: VIDEO_HDLR,
4282 audio: AUDIO_HDLR
4283 };
4284 DREF = new Uint8Array([0x00, // version 0
4285 0x00, 0x00, 0x00, // flags
4286 0x00, 0x00, 0x00, 0x01, // entry_count
4287 0x00, 0x00, 0x00, 0x0c, // entry_size
4288 0x75, 0x72, 0x6c, 0x20, // 'url' type
4289 0x00, // version 0
4290 0x00, 0x00, 0x01 // entry_flags
4291 ]);
4292 SMHD = new Uint8Array([0x00, // version
4293 0x00, 0x00, 0x00, // flags
4294 0x00, 0x00, // balance, 0 means centered
4295 0x00, 0x00 // reserved
4296 ]);
4297 STCO = new Uint8Array([0x00, // version
4298 0x00, 0x00, 0x00, // flags
4299 0x00, 0x00, 0x00, 0x00 // entry_count
4300 ]);
4301 STSC = STCO;
4302 STSZ = new Uint8Array([0x00, // version
4303 0x00, 0x00, 0x00, // flags
4304 0x00, 0x00, 0x00, 0x00, // sample_size
4305 0x00, 0x00, 0x00, 0x00 // sample_count
4306 ]);
4307 STTS = STCO;
4308 VMHD = new Uint8Array([0x00, // version
4309 0x00, 0x00, 0x01, // flags
4310 0x00, 0x00, // graphicsmode
4311 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // opcolor
4312 ]);
4313 })();
4314
4315 box = function box(type) {
4316 var payload = [],
4317 size = 0,
4318 i,
4319 result,
4320 view;
4321
4322 for (i = 1; i < arguments.length; i++) {
4323 payload.push(arguments[i]);
4324 }
4325
4326 i = payload.length; // calculate the total size we need to allocate
4327
4328 while (i--) {
4329 size += payload[i].byteLength;
4330 }
4331
4332 result = new Uint8Array(size + 8);
4333 view = new DataView(result.buffer, result.byteOffset, result.byteLength);
4334 view.setUint32(0, result.byteLength);
4335 result.set(type, 4); // copy the payload into the result
4336
4337 for (i = 0, size = 8; i < payload.length; i++) {
4338 result.set(payload[i], size);
4339 size += payload[i].byteLength;
4340 }
4341
4342 return result;
4343 };
4344
4345 dinf = function dinf() {
4346 return box(types.dinf, box(types.dref, DREF));
4347 };
4348
4349 esds = function esds(track) {
4350 return box(types.esds, new Uint8Array([0x00, // version
4351 0x00, 0x00, 0x00, // flags
4352 // ES_Descriptor
4353 0x03, // tag, ES_DescrTag
4354 0x19, // length
4355 0x00, 0x00, // ES_ID
4356 0x00, // streamDependenceFlag, URL_flag, reserved, streamPriority
4357 // DecoderConfigDescriptor
4358 0x04, // tag, DecoderConfigDescrTag
4359 0x11, // length
4360 0x40, // object type
4361 0x15, // streamType
4362 0x00, 0x06, 0x00, // bufferSizeDB
4363 0x00, 0x00, 0xda, 0xc0, // maxBitrate
4364 0x00, 0x00, 0xda, 0xc0, // avgBitrate
4365 // DecoderSpecificInfo
4366 0x05, // tag, DecoderSpecificInfoTag
4367 0x02, // length
4368 // ISO/IEC 14496-3, AudioSpecificConfig
4369 // for samplingFrequencyIndex see ISO/IEC 13818-7:2006, 8.1.3.2.2, Table 35
4370 track.audioobjecttype << 3 | track.samplingfrequencyindex >>> 1, track.samplingfrequencyindex << 7 | track.channelcount << 3, 0x06, 0x01, 0x02 // GASpecificConfig
4371 ]));
4372 };
4373
4374 ftyp = function ftyp() {
4375 return box(types.ftyp, MAJOR_BRAND, MINOR_VERSION, MAJOR_BRAND, AVC1_BRAND);
4376 };
4377
4378 hdlr = function hdlr(type) {
4379 return box(types.hdlr, HDLR_TYPES[type]);
4380 };
4381
4382 mdat = function mdat(data) {
4383 return box(types.mdat, data);
4384 };
4385
4386 mdhd = function mdhd(track) {
4387 var result = new Uint8Array([0x00, // version 0
4388 0x00, 0x00, 0x00, // flags
4389 0x00, 0x00, 0x00, 0x02, // creation_time
4390 0x00, 0x00, 0x00, 0x03, // modification_time
4391 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4392 track.duration >>> 24 & 0xFF, track.duration >>> 16 & 0xFF, track.duration >>> 8 & 0xFF, track.duration & 0xFF, // duration
4393 0x55, 0xc4, // 'und' language (undetermined)
4394 0x00, 0x00]); // Use the sample rate from the track metadata, when it is
4395 // defined. The sample rate can be parsed out of an ADTS header, for
4396 // instance.
4397
4398 if (track.samplerate) {
4399 result[12] = track.samplerate >>> 24 & 0xFF;
4400 result[13] = track.samplerate >>> 16 & 0xFF;
4401 result[14] = track.samplerate >>> 8 & 0xFF;
4402 result[15] = track.samplerate & 0xFF;
4403 }
4404
4405 return box(types.mdhd, result);
4406 };
4407
4408 mdia = function mdia(track) {
4409 return box(types.mdia, mdhd(track), hdlr(track.type), minf(track));
4410 };
4411
4412 mfhd = function mfhd(sequenceNumber) {
4413 return box(types.mfhd, new Uint8Array([0x00, 0x00, 0x00, 0x00, // flags
4414 (sequenceNumber & 0xFF000000) >> 24, (sequenceNumber & 0xFF0000) >> 16, (sequenceNumber & 0xFF00) >> 8, sequenceNumber & 0xFF // sequence_number
4415 ]));
4416 };
4417
4418 minf = function minf(track) {
4419 return box(types.minf, track.type === 'video' ? box(types.vmhd, VMHD) : box(types.smhd, SMHD), dinf(), stbl(track));
4420 };
4421
4422 moof = function moof(sequenceNumber, tracks) {
4423 var trackFragments = [],
4424 i = tracks.length; // build traf boxes for each track fragment
4425
4426 while (i--) {
4427 trackFragments[i] = traf(tracks[i]);
4428 }
4429
4430 return box.apply(null, [types.moof, mfhd(sequenceNumber)].concat(trackFragments));
4431 };
4432 /**
4433 * Returns a movie box.
4434 * @param tracks {array} the tracks associated with this movie
4435 * @see ISO/IEC 14496-12:2012(E), section 8.2.1
4436 */
4437
4438
4439 moov = function moov(tracks) {
4440 var i = tracks.length,
4441 boxes = [];
4442
4443 while (i--) {
4444 boxes[i] = trak(tracks[i]);
4445 }
4446
4447 return box.apply(null, [types.moov, mvhd(0xffffffff)].concat(boxes).concat(mvex(tracks)));
4448 };
4449
4450 mvex = function mvex(tracks) {
4451 var i = tracks.length,
4452 boxes = [];
4453
4454 while (i--) {
4455 boxes[i] = trex(tracks[i]);
4456 }
4457
4458 return box.apply(null, [types.mvex].concat(boxes));
4459 };
4460
4461 mvhd = function mvhd(duration) {
4462 var bytes = new Uint8Array([0x00, // version 0
4463 0x00, 0x00, 0x00, // flags
4464 0x00, 0x00, 0x00, 0x01, // creation_time
4465 0x00, 0x00, 0x00, 0x02, // modification_time
4466 0x00, 0x01, 0x5f, 0x90, // timescale, 90,000 "ticks" per second
4467 (duration & 0xFF000000) >> 24, (duration & 0xFF0000) >> 16, (duration & 0xFF00) >> 8, duration & 0xFF, // duration
4468 0x00, 0x01, 0x00, 0x00, // 1.0 rate
4469 0x01, 0x00, // 1.0 volume
4470 0x00, 0x00, // reserved
4471 0x00, 0x00, 0x00, 0x00, // reserved
4472 0x00, 0x00, 0x00, 0x00, // reserved
4473 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4474 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4475 0xff, 0xff, 0xff, 0xff // next_track_ID
4476 ]);
4477 return box(types.mvhd, bytes);
4478 };
4479
4480 sdtp = function sdtp(track) {
4481 var samples = track.samples || [],
4482 bytes = new Uint8Array(4 + samples.length),
4483 flags,
4484 i; // leave the full box header (4 bytes) all zero
4485 // write the sample table
4486
4487 for (i = 0; i < samples.length; i++) {
4488 flags = samples[i].flags;
4489 bytes[i + 4] = flags.dependsOn << 4 | flags.isDependedOn << 2 | flags.hasRedundancy;
4490 }
4491
4492 return box(types.sdtp, bytes);
4493 };
4494
4495 stbl = function stbl(track) {
4496 return box(types.stbl, stsd(track), box(types.stts, STTS), box(types.stsc, STSC), box(types.stsz, STSZ), box(types.stco, STCO));
4497 };
4498
4499 (function () {
4500 var videoSample, audioSample;
4501
4502 stsd = function stsd(track) {
4503 return box(types.stsd, new Uint8Array([0x00, // version 0
4504 0x00, 0x00, 0x00, // flags
4505 0x00, 0x00, 0x00, 0x01]), track.type === 'video' ? videoSample(track) : audioSample(track));
4506 };
4507
4508 videoSample = function videoSample(track) {
4509 var sps = track.sps || [],
4510 pps = track.pps || [],
4511 sequenceParameterSets = [],
4512 pictureParameterSets = [],
4513 i,
4514 avc1Box; // assemble the SPSs
4515
4516 for (i = 0; i < sps.length; i++) {
4517 sequenceParameterSets.push((sps[i].byteLength & 0xFF00) >>> 8);
4518 sequenceParameterSets.push(sps[i].byteLength & 0xFF); // sequenceParameterSetLength
4519
4520 sequenceParameterSets = sequenceParameterSets.concat(Array.prototype.slice.call(sps[i])); // SPS
4521 } // assemble the PPSs
4522
4523
4524 for (i = 0; i < pps.length; i++) {
4525 pictureParameterSets.push((pps[i].byteLength & 0xFF00) >>> 8);
4526 pictureParameterSets.push(pps[i].byteLength & 0xFF);
4527 pictureParameterSets = pictureParameterSets.concat(Array.prototype.slice.call(pps[i]));
4528 }
4529
4530 avc1Box = [types.avc1, new Uint8Array([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4531 0x00, 0x01, // data_reference_index
4532 0x00, 0x00, // pre_defined
4533 0x00, 0x00, // reserved
4534 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // pre_defined
4535 (track.width & 0xff00) >> 8, track.width & 0xff, // width
4536 (track.height & 0xff00) >> 8, track.height & 0xff, // height
4537 0x00, 0x48, 0x00, 0x00, // horizresolution
4538 0x00, 0x48, 0x00, 0x00, // vertresolution
4539 0x00, 0x00, 0x00, 0x00, // reserved
4540 0x00, 0x01, // frame_count
4541 0x13, 0x76, 0x69, 0x64, 0x65, 0x6f, 0x6a, 0x73, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x2d, 0x68, 0x6c, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // compressorname
4542 0x00, 0x18, // depth = 24
4543 0x11, 0x11 // pre_defined = -1
4544 ]), box(types.avcC, new Uint8Array([0x01, // configurationVersion
4545 track.profileIdc, // AVCProfileIndication
4546 track.profileCompatibility, // profile_compatibility
4547 track.levelIdc, // AVCLevelIndication
4548 0xff // lengthSizeMinusOne, hard-coded to 4 bytes
4549 ].concat([sps.length], // numOfSequenceParameterSets
4550 sequenceParameterSets, // "SPS"
4551 [pps.length], // numOfPictureParameterSets
4552 pictureParameterSets // "PPS"
4553 ))), box(types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB
4554 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate
4555 0x00, 0x2d, 0xc6, 0xc0 // avgBitrate
4556 ]))];
4557
4558 if (track.sarRatio) {
4559 var hSpacing = track.sarRatio[0],
4560 vSpacing = track.sarRatio[1];
4561 avc1Box.push(box(types.pasp, new Uint8Array([(hSpacing & 0xFF000000) >> 24, (hSpacing & 0xFF0000) >> 16, (hSpacing & 0xFF00) >> 8, hSpacing & 0xFF, (vSpacing & 0xFF000000) >> 24, (vSpacing & 0xFF0000) >> 16, (vSpacing & 0xFF00) >> 8, vSpacing & 0xFF])));
4562 }
4563
4564 return box.apply(null, avc1Box);
4565 };
4566
4567 audioSample = function audioSample(track) {
4568 return box(types.mp4a, new Uint8Array([// SampleEntry, ISO/IEC 14496-12
4569 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4570 0x00, 0x01, // data_reference_index
4571 // AudioSampleEntry, ISO/IEC 14496-12
4572 0x00, 0x00, 0x00, 0x00, // reserved
4573 0x00, 0x00, 0x00, 0x00, // reserved
4574 (track.channelcount & 0xff00) >> 8, track.channelcount & 0xff, // channelcount
4575 (track.samplesize & 0xff00) >> 8, track.samplesize & 0xff, // samplesize
4576 0x00, 0x00, // pre_defined
4577 0x00, 0x00, // reserved
4578 (track.samplerate & 0xff00) >> 8, track.samplerate & 0xff, 0x00, 0x00 // samplerate, 16.16
4579 // MP4AudioSampleEntry, ISO/IEC 14496-14
4580 ]), esds(track));
4581 };
4582 })();
4583
4584 tkhd = function tkhd(track) {
4585 var result = new Uint8Array([0x00, // version 0
4586 0x00, 0x00, 0x07, // flags
4587 0x00, 0x00, 0x00, 0x00, // creation_time
4588 0x00, 0x00, 0x00, 0x00, // modification_time
4589 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4590 0x00, 0x00, 0x00, 0x00, // reserved
4591 (track.duration & 0xFF000000) >> 24, (track.duration & 0xFF0000) >> 16, (track.duration & 0xFF00) >> 8, track.duration & 0xFF, // duration
4592 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // reserved
4593 0x00, 0x00, // layer
4594 0x00, 0x00, // alternate_group
4595 0x01, 0x00, // non-audio track volume
4596 0x00, 0x00, // reserved
4597 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix
4598 (track.width & 0xFF00) >> 8, track.width & 0xFF, 0x00, 0x00, // width
4599 (track.height & 0xFF00) >> 8, track.height & 0xFF, 0x00, 0x00 // height
4600 ]);
4601 return box(types.tkhd, result);
4602 };
4603 /**
4604 * Generate a track fragment (traf) box. A traf box collects metadata
4605 * about tracks in a movie fragment (moof) box.
4606 */
4607
4608
4609 traf = function traf(track) {
4610 var trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable, dataOffset, upperWordBaseMediaDecodeTime, lowerWordBaseMediaDecodeTime;
4611 trackFragmentHeader = box(types.tfhd, new Uint8Array([0x00, // version 0
4612 0x00, 0x00, 0x3a, // flags
4613 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4614 0x00, 0x00, 0x00, 0x01, // sample_description_index
4615 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4616 0x00, 0x00, 0x00, 0x00, // default_sample_size
4617 0x00, 0x00, 0x00, 0x00 // default_sample_flags
4618 ]));
4619 upperWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime / (UINT32_MAX + 1));
4620 lowerWordBaseMediaDecodeTime = Math.floor(track.baseMediaDecodeTime % (UINT32_MAX + 1));
4621 trackFragmentDecodeTime = box(types.tfdt, new Uint8Array([0x01, // version 1
4622 0x00, 0x00, 0x00, // flags
4623 // baseMediaDecodeTime
4624 upperWordBaseMediaDecodeTime >>> 24 & 0xFF, upperWordBaseMediaDecodeTime >>> 16 & 0xFF, upperWordBaseMediaDecodeTime >>> 8 & 0xFF, upperWordBaseMediaDecodeTime & 0xFF, lowerWordBaseMediaDecodeTime >>> 24 & 0xFF, lowerWordBaseMediaDecodeTime >>> 16 & 0xFF, lowerWordBaseMediaDecodeTime >>> 8 & 0xFF, lowerWordBaseMediaDecodeTime & 0xFF])); // the data offset specifies the number of bytes from the start of
4625 // the containing moof to the first payload byte of the associated
4626 // mdat
4627
4628 dataOffset = 32 + // tfhd
4629 20 + // tfdt
4630 8 + // traf header
4631 16 + // mfhd
4632 8 + // moof header
4633 8; // mdat header
4634 // audio tracks require less metadata
4635
4636 if (track.type === 'audio') {
4637 trackFragmentRun = trun$1(track, dataOffset);
4638 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun);
4639 } // video tracks should contain an independent and disposable samples
4640 // box (sdtp)
4641 // generate one and adjust offsets to match
4642
4643
4644 sampleDependencyTable = sdtp(track);
4645 trackFragmentRun = trun$1(track, sampleDependencyTable.length + dataOffset);
4646 return box(types.traf, trackFragmentHeader, trackFragmentDecodeTime, trackFragmentRun, sampleDependencyTable);
4647 };
4648 /**
4649 * Generate a track box.
4650 * @param track {object} a track definition
4651 * @return {Uint8Array} the track box
4652 */
4653
4654
4655 trak = function trak(track) {
4656 track.duration = track.duration || 0xffffffff;
4657 return box(types.trak, tkhd(track), mdia(track));
4658 };
4659
4660 trex = function trex(track) {
4661 var result = new Uint8Array([0x00, // version 0
4662 0x00, 0x00, 0x00, // flags
4663 (track.id & 0xFF000000) >> 24, (track.id & 0xFF0000) >> 16, (track.id & 0xFF00) >> 8, track.id & 0xFF, // track_ID
4664 0x00, 0x00, 0x00, 0x01, // default_sample_description_index
4665 0x00, 0x00, 0x00, 0x00, // default_sample_duration
4666 0x00, 0x00, 0x00, 0x00, // default_sample_size
4667 0x00, 0x01, 0x00, 0x01 // default_sample_flags
4668 ]); // the last two bytes of default_sample_flags is the sample
4669 // degradation priority, a hint about the importance of this sample
4670 // relative to others. Lower the degradation priority for all sample
4671 // types other than video.
4672
4673 if (track.type !== 'video') {
4674 result[result.length - 1] = 0x00;
4675 }
4676
4677 return box(types.trex, result);
4678 };
4679
4680 (function () {
4681 var audioTrun, videoTrun, trunHeader; // This method assumes all samples are uniform. That is, if a
4682 // duration is present for the first sample, it will be present for
4683 // all subsequent samples.
4684 // see ISO/IEC 14496-12:2012, Section 8.8.8.1
4685
4686 trunHeader = function trunHeader(samples, offset) {
4687 var durationPresent = 0,
4688 sizePresent = 0,
4689 flagsPresent = 0,
4690 compositionTimeOffset = 0; // trun flag constants
4691
4692 if (samples.length) {
4693 if (samples[0].duration !== undefined) {
4694 durationPresent = 0x1;
4695 }
4696
4697 if (samples[0].size !== undefined) {
4698 sizePresent = 0x2;
4699 }
4700
4701 if (samples[0].flags !== undefined) {
4702 flagsPresent = 0x4;
4703 }
4704
4705 if (samples[0].compositionTimeOffset !== undefined) {
4706 compositionTimeOffset = 0x8;
4707 }
4708 }
4709
4710 return [0x00, // version 0
4711 0x00, durationPresent | sizePresent | flagsPresent | compositionTimeOffset, 0x01, // flags
4712 (samples.length & 0xFF000000) >>> 24, (samples.length & 0xFF0000) >>> 16, (samples.length & 0xFF00) >>> 8, samples.length & 0xFF, // sample_count
4713 (offset & 0xFF000000) >>> 24, (offset & 0xFF0000) >>> 16, (offset & 0xFF00) >>> 8, offset & 0xFF // data_offset
4714 ];
4715 };
4716
4717 videoTrun = function videoTrun(track, offset) {
4718 var bytesOffest, bytes, header, samples, sample, i;
4719 samples = track.samples || [];
4720 offset += 8 + 12 + 16 * samples.length;
4721 header = trunHeader(samples, offset);
4722 bytes = new Uint8Array(header.length + samples.length * 16);
4723 bytes.set(header);
4724 bytesOffest = header.length;
4725
4726 for (i = 0; i < samples.length; i++) {
4727 sample = samples[i];
4728 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4729 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4730 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4731 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4732
4733 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4734 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4735 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4736 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4737
4738 bytes[bytesOffest++] = sample.flags.isLeading << 2 | sample.flags.dependsOn;
4739 bytes[bytesOffest++] = sample.flags.isDependedOn << 6 | sample.flags.hasRedundancy << 4 | sample.flags.paddingValue << 1 | sample.flags.isNonSyncSample;
4740 bytes[bytesOffest++] = sample.flags.degradationPriority & 0xF0 << 8;
4741 bytes[bytesOffest++] = sample.flags.degradationPriority & 0x0F; // sample_flags
4742
4743 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF000000) >>> 24;
4744 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF0000) >>> 16;
4745 bytes[bytesOffest++] = (sample.compositionTimeOffset & 0xFF00) >>> 8;
4746 bytes[bytesOffest++] = sample.compositionTimeOffset & 0xFF; // sample_composition_time_offset
4747 }
4748
4749 return box(types.trun, bytes);
4750 };
4751
4752 audioTrun = function audioTrun(track, offset) {
4753 var bytes, bytesOffest, header, samples, sample, i;
4754 samples = track.samples || [];
4755 offset += 8 + 12 + 8 * samples.length;
4756 header = trunHeader(samples, offset);
4757 bytes = new Uint8Array(header.length + samples.length * 8);
4758 bytes.set(header);
4759 bytesOffest = header.length;
4760
4761 for (i = 0; i < samples.length; i++) {
4762 sample = samples[i];
4763 bytes[bytesOffest++] = (sample.duration & 0xFF000000) >>> 24;
4764 bytes[bytesOffest++] = (sample.duration & 0xFF0000) >>> 16;
4765 bytes[bytesOffest++] = (sample.duration & 0xFF00) >>> 8;
4766 bytes[bytesOffest++] = sample.duration & 0xFF; // sample_duration
4767
4768 bytes[bytesOffest++] = (sample.size & 0xFF000000) >>> 24;
4769 bytes[bytesOffest++] = (sample.size & 0xFF0000) >>> 16;
4770 bytes[bytesOffest++] = (sample.size & 0xFF00) >>> 8;
4771 bytes[bytesOffest++] = sample.size & 0xFF; // sample_size
4772 }
4773
4774 return box(types.trun, bytes);
4775 };
4776
4777 trun$1 = function trun(track, offset) {
4778 if (track.type === 'audio') {
4779 return audioTrun(track, offset);
4780 }
4781
4782 return videoTrun(track, offset);
4783 };
4784 })();
4785
4786 var mp4Generator = {
4787 ftyp: ftyp,
4788 mdat: mdat,
4789 moof: moof,
4790 moov: moov,
4791 initSegment: function initSegment(tracks) {
4792 var fileType = ftyp(),
4793 movie = moov(tracks),
4794 result;
4795 result = new Uint8Array(fileType.byteLength + movie.byteLength);
4796 result.set(fileType);
4797 result.set(movie, fileType.byteLength);
4798 return result;
4799 }
4800 };
4801 /**
4802 * mux.js
4803 *
4804 * Copyright (c) Brightcove
4805 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
4806 */
4807 // Convert an array of nal units into an array of frames with each frame being
4808 // composed of the nal units that make up that frame
4809 // Also keep track of cummulative data about the frame from the nal units such
4810 // as the frame duration, starting pts, etc.
4811
4812 var groupNalsIntoFrames = function groupNalsIntoFrames(nalUnits) {
4813 var i,
4814 currentNal,
4815 currentFrame = [],
4816 frames = []; // TODO added for LHLS, make sure this is OK
4817
4818 frames.byteLength = 0;
4819 frames.nalCount = 0;
4820 frames.duration = 0;
4821 currentFrame.byteLength = 0;
4822
4823 for (i = 0; i < nalUnits.length; i++) {
4824 currentNal = nalUnits[i]; // Split on 'aud'-type nal units
4825
4826 if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
4827 // Since the very first nal unit is expected to be an AUD
4828 // only push to the frames array when currentFrame is not empty
4829 if (currentFrame.length) {
4830 currentFrame.duration = currentNal.dts - currentFrame.dts; // TODO added for LHLS, make sure this is OK
4831
4832 frames.byteLength += currentFrame.byteLength;
4833 frames.nalCount += currentFrame.length;
4834 frames.duration += currentFrame.duration;
4835 frames.push(currentFrame);
4836 }
4837
4838 currentFrame = [currentNal];
4839 currentFrame.byteLength = currentNal.data.byteLength;
4840 currentFrame.pts = currentNal.pts;
4841 currentFrame.dts = currentNal.dts;
4842 } else {
4843 // Specifically flag key frames for ease of use later
4844 if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
4845 currentFrame.keyFrame = true;
4846 }
4847
4848 currentFrame.duration = currentNal.dts - currentFrame.dts;
4849 currentFrame.byteLength += currentNal.data.byteLength;
4850 currentFrame.push(currentNal);
4851 }
4852 } // For the last frame, use the duration of the previous frame if we
4853 // have nothing better to go on
4854
4855
4856 if (frames.length && (!currentFrame.duration || currentFrame.duration <= 0)) {
4857 currentFrame.duration = frames[frames.length - 1].duration;
4858 } // Push the final frame
4859 // TODO added for LHLS, make sure this is OK
4860
4861
4862 frames.byteLength += currentFrame.byteLength;
4863 frames.nalCount += currentFrame.length;
4864 frames.duration += currentFrame.duration;
4865 frames.push(currentFrame);
4866 return frames;
4867 }; // Convert an array of frames into an array of Gop with each Gop being composed
4868 // of the frames that make up that Gop
4869 // Also keep track of cummulative data about the Gop from the frames such as the
4870 // Gop duration, starting pts, etc.
4871
4872
4873 var groupFramesIntoGops = function groupFramesIntoGops(frames) {
4874 var i,
4875 currentFrame,
4876 currentGop = [],
4877 gops = []; // We must pre-set some of the values on the Gop since we
4878 // keep running totals of these values
4879
4880 currentGop.byteLength = 0;
4881 currentGop.nalCount = 0;
4882 currentGop.duration = 0;
4883 currentGop.pts = frames[0].pts;
4884 currentGop.dts = frames[0].dts; // store some metadata about all the Gops
4885
4886 gops.byteLength = 0;
4887 gops.nalCount = 0;
4888 gops.duration = 0;
4889 gops.pts = frames[0].pts;
4890 gops.dts = frames[0].dts;
4891
4892 for (i = 0; i < frames.length; i++) {
4893 currentFrame = frames[i];
4894
4895 if (currentFrame.keyFrame) {
4896 // Since the very first frame is expected to be an keyframe
4897 // only push to the gops array when currentGop is not empty
4898 if (currentGop.length) {
4899 gops.push(currentGop);
4900 gops.byteLength += currentGop.byteLength;
4901 gops.nalCount += currentGop.nalCount;
4902 gops.duration += currentGop.duration;
4903 }
4904
4905 currentGop = [currentFrame];
4906 currentGop.nalCount = currentFrame.length;
4907 currentGop.byteLength = currentFrame.byteLength;
4908 currentGop.pts = currentFrame.pts;
4909 currentGop.dts = currentFrame.dts;
4910 currentGop.duration = currentFrame.duration;
4911 } else {
4912 currentGop.duration += currentFrame.duration;
4913 currentGop.nalCount += currentFrame.length;
4914 currentGop.byteLength += currentFrame.byteLength;
4915 currentGop.push(currentFrame);
4916 }
4917 }
4918
4919 if (gops.length && currentGop.duration <= 0) {
4920 currentGop.duration = gops[gops.length - 1].duration;
4921 }
4922
4923 gops.byteLength += currentGop.byteLength;
4924 gops.nalCount += currentGop.nalCount;
4925 gops.duration += currentGop.duration; // push the final Gop
4926
4927 gops.push(currentGop);
4928 return gops;
4929 };
4930 /*
4931 * Search for the first keyframe in the GOPs and throw away all frames
4932 * until that keyframe. Then extend the duration of the pulled keyframe
4933 * and pull the PTS and DTS of the keyframe so that it covers the time
4934 * range of the frames that were disposed.
4935 *
4936 * @param {Array} gops video GOPs
4937 * @returns {Array} modified video GOPs
4938 */
4939
4940
4941 var extendFirstKeyFrame = function extendFirstKeyFrame(gops) {
4942 var currentGop;
4943
4944 if (!gops[0][0].keyFrame && gops.length > 1) {
4945 // Remove the first GOP
4946 currentGop = gops.shift();
4947 gops.byteLength -= currentGop.byteLength;
4948 gops.nalCount -= currentGop.nalCount; // Extend the first frame of what is now the
4949 // first gop to cover the time period of the
4950 // frames we just removed
4951
4952 gops[0][0].dts = currentGop.dts;
4953 gops[0][0].pts = currentGop.pts;
4954 gops[0][0].duration += currentGop.duration;
4955 }
4956
4957 return gops;
4958 };
4959 /**
4960 * Default sample object
4961 * see ISO/IEC 14496-12:2012, section 8.6.4.3
4962 */
4963
4964
4965 var createDefaultSample = function createDefaultSample() {
4966 return {
4967 size: 0,
4968 flags: {
4969 isLeading: 0,
4970 dependsOn: 1,
4971 isDependedOn: 0,
4972 hasRedundancy: 0,
4973 degradationPriority: 0,
4974 isNonSyncSample: 1
4975 }
4976 };
4977 };
4978 /*
4979 * Collates information from a video frame into an object for eventual
4980 * entry into an MP4 sample table.
4981 *
4982 * @param {Object} frame the video frame
4983 * @param {Number} dataOffset the byte offset to position the sample
4984 * @return {Object} object containing sample table info for a frame
4985 */
4986
4987
4988 var sampleForFrame = function sampleForFrame(frame, dataOffset) {
4989 var sample = createDefaultSample();
4990 sample.dataOffset = dataOffset;
4991 sample.compositionTimeOffset = frame.pts - frame.dts;
4992 sample.duration = frame.duration;
4993 sample.size = 4 * frame.length; // Space for nal unit size
4994
4995 sample.size += frame.byteLength;
4996
4997 if (frame.keyFrame) {
4998 sample.flags.dependsOn = 2;
4999 sample.flags.isNonSyncSample = 0;
5000 }
5001
5002 return sample;
5003 }; // generate the track's sample table from an array of gops
5004
5005
5006 var generateSampleTable$1 = function generateSampleTable(gops, baseDataOffset) {
5007 var h,
5008 i,
5009 sample,
5010 currentGop,
5011 currentFrame,
5012 dataOffset = baseDataOffset || 0,
5013 samples = [];
5014
5015 for (h = 0; h < gops.length; h++) {
5016 currentGop = gops[h];
5017
5018 for (i = 0; i < currentGop.length; i++) {
5019 currentFrame = currentGop[i];
5020 sample = sampleForFrame(currentFrame, dataOffset);
5021 dataOffset += sample.size;
5022 samples.push(sample);
5023 }
5024 }
5025
5026 return samples;
5027 }; // generate the track's raw mdat data from an array of gops
5028
5029
5030 var concatenateNalData = function concatenateNalData(gops) {
5031 var h,
5032 i,
5033 j,
5034 currentGop,
5035 currentFrame,
5036 currentNal,
5037 dataOffset = 0,
5038 nalsByteLength = gops.byteLength,
5039 numberOfNals = gops.nalCount,
5040 totalByteLength = nalsByteLength + 4 * numberOfNals,
5041 data = new Uint8Array(totalByteLength),
5042 view = new DataView(data.buffer); // For each Gop..
5043
5044 for (h = 0; h < gops.length; h++) {
5045 currentGop = gops[h]; // For each Frame..
5046
5047 for (i = 0; i < currentGop.length; i++) {
5048 currentFrame = currentGop[i]; // For each NAL..
5049
5050 for (j = 0; j < currentFrame.length; j++) {
5051 currentNal = currentFrame[j];
5052 view.setUint32(dataOffset, currentNal.data.byteLength);
5053 dataOffset += 4;
5054 data.set(currentNal.data, dataOffset);
5055 dataOffset += currentNal.data.byteLength;
5056 }
5057 }
5058 }
5059
5060 return data;
5061 }; // generate the track's sample table from a frame
5062
5063
5064 var generateSampleTableForFrame = function generateSampleTableForFrame(frame, baseDataOffset) {
5065 var sample,
5066 dataOffset = baseDataOffset || 0,
5067 samples = [];
5068 sample = sampleForFrame(frame, dataOffset);
5069 samples.push(sample);
5070 return samples;
5071 }; // generate the track's raw mdat data from a frame
5072
5073
5074 var concatenateNalDataForFrame = function concatenateNalDataForFrame(frame) {
5075 var i,
5076 currentNal,
5077 dataOffset = 0,
5078 nalsByteLength = frame.byteLength,
5079 numberOfNals = frame.length,
5080 totalByteLength = nalsByteLength + 4 * numberOfNals,
5081 data = new Uint8Array(totalByteLength),
5082 view = new DataView(data.buffer); // For each NAL..
5083
5084 for (i = 0; i < frame.length; i++) {
5085 currentNal = frame[i];
5086 view.setUint32(dataOffset, currentNal.data.byteLength);
5087 dataOffset += 4;
5088 data.set(currentNal.data, dataOffset);
5089 dataOffset += currentNal.data.byteLength;
5090 }
5091
5092 return data;
5093 };
5094
5095 var frameUtils = {
5096 groupNalsIntoFrames: groupNalsIntoFrames,
5097 groupFramesIntoGops: groupFramesIntoGops,
5098 extendFirstKeyFrame: extendFirstKeyFrame,
5099 generateSampleTable: generateSampleTable$1,
5100 concatenateNalData: concatenateNalData,
5101 generateSampleTableForFrame: generateSampleTableForFrame,
5102 concatenateNalDataForFrame: concatenateNalDataForFrame
5103 };
5104 /**
5105 * mux.js
5106 *
5107 * Copyright (c) Brightcove
5108 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5109 */
5110
5111 var highPrefix = [33, 16, 5, 32, 164, 27];
5112 var lowPrefix = [33, 65, 108, 84, 1, 2, 4, 8, 168, 2, 4, 8, 17, 191, 252];
5113
5114 var zeroFill = function zeroFill(count) {
5115 var a = [];
5116
5117 while (count--) {
5118 a.push(0);
5119 }
5120
5121 return a;
5122 };
5123
5124 var makeTable = function makeTable(metaTable) {
5125 return Object.keys(metaTable).reduce(function (obj, key) {
5126 obj[key] = new Uint8Array(metaTable[key].reduce(function (arr, part) {
5127 return arr.concat(part);
5128 }, []));
5129 return obj;
5130 }, {});
5131 };
5132
5133 var silence;
5134
5135 var silence_1 = function silence_1() {
5136 if (!silence) {
5137 // Frames-of-silence to use for filling in missing AAC frames
5138 var coneOfSilence = {
5139 96000: [highPrefix, [227, 64], zeroFill(154), [56]],
5140 88200: [highPrefix, [231], zeroFill(170), [56]],
5141 64000: [highPrefix, [248, 192], zeroFill(240), [56]],
5142 48000: [highPrefix, [255, 192], zeroFill(268), [55, 148, 128], zeroFill(54), [112]],
5143 44100: [highPrefix, [255, 192], zeroFill(268), [55, 163, 128], zeroFill(84), [112]],
5144 32000: [highPrefix, [255, 192], zeroFill(268), [55, 234], zeroFill(226), [112]],
5145 24000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 112], zeroFill(126), [224]],
5146 16000: [highPrefix, [255, 192], zeroFill(268), [55, 255, 128], zeroFill(268), [111, 255], zeroFill(269), [223, 108], zeroFill(195), [1, 192]],
5147 12000: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 253, 128], zeroFill(259), [56]],
5148 11025: [lowPrefix, zeroFill(268), [3, 127, 248], zeroFill(268), [6, 255, 240], zeroFill(268), [13, 255, 224], zeroFill(268), [27, 255, 192], zeroFill(268), [55, 175, 128], zeroFill(108), [112]],
5149 8000: [lowPrefix, zeroFill(268), [3, 121, 16], zeroFill(47), [7]]
5150 };
5151 silence = makeTable(coneOfSilence);
5152 }
5153
5154 return silence;
5155 };
5156 /**
5157 * mux.js
5158 *
5159 * Copyright (c) Brightcove
5160 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5161 */
5162
5163
5164 var ONE_SECOND_IN_TS$4 = 90000,
5165 // 90kHz clock
5166 secondsToVideoTs,
5167 secondsToAudioTs,
5168 videoTsToSeconds,
5169 audioTsToSeconds,
5170 audioTsToVideoTs,
5171 videoTsToAudioTs,
5172 metadataTsToSeconds;
5173
5174 secondsToVideoTs = function secondsToVideoTs(seconds) {
5175 return seconds * ONE_SECOND_IN_TS$4;
5176 };
5177
5178 secondsToAudioTs = function secondsToAudioTs(seconds, sampleRate) {
5179 return seconds * sampleRate;
5180 };
5181
5182 videoTsToSeconds = function videoTsToSeconds(timestamp) {
5183 return timestamp / ONE_SECOND_IN_TS$4;
5184 };
5185
5186 audioTsToSeconds = function audioTsToSeconds(timestamp, sampleRate) {
5187 return timestamp / sampleRate;
5188 };
5189
5190 audioTsToVideoTs = function audioTsToVideoTs(timestamp, sampleRate) {
5191 return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
5192 };
5193
5194 videoTsToAudioTs = function videoTsToAudioTs(timestamp, sampleRate) {
5195 return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
5196 };
5197 /**
5198 * Adjust ID3 tag or caption timing information by the timeline pts values
5199 * (if keepOriginalTimestamps is false) and convert to seconds
5200 */
5201
5202
5203 metadataTsToSeconds = function metadataTsToSeconds(timestamp, timelineStartPts, keepOriginalTimestamps) {
5204 return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
5205 };
5206
5207 var clock = {
5208 ONE_SECOND_IN_TS: ONE_SECOND_IN_TS$4,
5209 secondsToVideoTs: secondsToVideoTs,
5210 secondsToAudioTs: secondsToAudioTs,
5211 videoTsToSeconds: videoTsToSeconds,
5212 audioTsToSeconds: audioTsToSeconds,
5213 audioTsToVideoTs: audioTsToVideoTs,
5214 videoTsToAudioTs: videoTsToAudioTs,
5215 metadataTsToSeconds: metadataTsToSeconds
5216 };
5217 /**
5218 * mux.js
5219 *
5220 * Copyright (c) Brightcove
5221 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5222 */
5223
5224 /**
5225 * Sum the `byteLength` properties of the data in each AAC frame
5226 */
5227
5228 var sumFrameByteLengths = function sumFrameByteLengths(array) {
5229 var i,
5230 currentObj,
5231 sum = 0; // sum the byteLength's all each nal unit in the frame
5232
5233 for (i = 0; i < array.length; i++) {
5234 currentObj = array[i];
5235 sum += currentObj.data.byteLength;
5236 }
5237
5238 return sum;
5239 }; // Possibly pad (prefix) the audio track with silence if appending this track
5240 // would lead to the introduction of a gap in the audio buffer
5241
5242
5243 var prefixWithSilence = function prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime) {
5244 var baseMediaDecodeTimeTs,
5245 frameDuration = 0,
5246 audioGapDuration = 0,
5247 audioFillFrameCount = 0,
5248 audioFillDuration = 0,
5249 silentFrame,
5250 i,
5251 firstFrame;
5252
5253 if (!frames.length) {
5254 return;
5255 }
5256
5257 baseMediaDecodeTimeTs = clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate); // determine frame clock duration based on sample rate, round up to avoid overfills
5258
5259 frameDuration = Math.ceil(clock.ONE_SECOND_IN_TS / (track.samplerate / 1024));
5260
5261 if (audioAppendStartTs && videoBaseMediaDecodeTime) {
5262 // insert the shortest possible amount (audio gap or audio to video gap)
5263 audioGapDuration = baseMediaDecodeTimeTs - Math.max(audioAppendStartTs, videoBaseMediaDecodeTime); // number of full frames in the audio gap
5264
5265 audioFillFrameCount = Math.floor(audioGapDuration / frameDuration);
5266 audioFillDuration = audioFillFrameCount * frameDuration;
5267 } // don't attempt to fill gaps smaller than a single frame or larger
5268 // than a half second
5269
5270
5271 if (audioFillFrameCount < 1 || audioFillDuration > clock.ONE_SECOND_IN_TS / 2) {
5272 return;
5273 }
5274
5275 silentFrame = silence_1()[track.samplerate];
5276
5277 if (!silentFrame) {
5278 // we don't have a silent frame pregenerated for the sample rate, so use a frame
5279 // from the content instead
5280 silentFrame = frames[0].data;
5281 }
5282
5283 for (i = 0; i < audioFillFrameCount; i++) {
5284 firstFrame = frames[0];
5285 frames.splice(0, 0, {
5286 data: silentFrame,
5287 dts: firstFrame.dts - frameDuration,
5288 pts: firstFrame.pts - frameDuration
5289 });
5290 }
5291
5292 track.baseMediaDecodeTime -= Math.floor(clock.videoTsToAudioTs(audioFillDuration, track.samplerate));
5293 return audioFillDuration;
5294 }; // If the audio segment extends before the earliest allowed dts
5295 // value, remove AAC frames until starts at or after the earliest
5296 // allowed DTS so that we don't end up with a negative baseMedia-
5297 // DecodeTime for the audio track
5298
5299
5300 var trimAdtsFramesByEarliestDts = function trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts) {
5301 if (track.minSegmentDts >= earliestAllowedDts) {
5302 return adtsFrames;
5303 } // We will need to recalculate the earliest segment Dts
5304
5305
5306 track.minSegmentDts = Infinity;
5307 return adtsFrames.filter(function (currentFrame) {
5308 // If this is an allowed frame, keep it and record it's Dts
5309 if (currentFrame.dts >= earliestAllowedDts) {
5310 track.minSegmentDts = Math.min(track.minSegmentDts, currentFrame.dts);
5311 track.minSegmentPts = track.minSegmentDts;
5312 return true;
5313 } // Otherwise, discard it
5314
5315
5316 return false;
5317 });
5318 }; // generate the track's raw mdat data from an array of frames
5319
5320
5321 var generateSampleTable = function generateSampleTable(frames) {
5322 var i,
5323 currentFrame,
5324 samples = [];
5325
5326 for (i = 0; i < frames.length; i++) {
5327 currentFrame = frames[i];
5328 samples.push({
5329 size: currentFrame.data.byteLength,
5330 duration: 1024 // For AAC audio, all samples contain 1024 samples
5331
5332 });
5333 }
5334
5335 return samples;
5336 }; // generate the track's sample table from an array of frames
5337
5338
5339 var concatenateFrameData = function concatenateFrameData(frames) {
5340 var i,
5341 currentFrame,
5342 dataOffset = 0,
5343 data = new Uint8Array(sumFrameByteLengths(frames));
5344
5345 for (i = 0; i < frames.length; i++) {
5346 currentFrame = frames[i];
5347 data.set(currentFrame.data, dataOffset);
5348 dataOffset += currentFrame.data.byteLength;
5349 }
5350
5351 return data;
5352 };
5353
5354 var audioFrameUtils = {
5355 prefixWithSilence: prefixWithSilence,
5356 trimAdtsFramesByEarliestDts: trimAdtsFramesByEarliestDts,
5357 generateSampleTable: generateSampleTable,
5358 concatenateFrameData: concatenateFrameData
5359 };
5360 /**
5361 * mux.js
5362 *
5363 * Copyright (c) Brightcove
5364 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5365 */
5366
5367 var ONE_SECOND_IN_TS$3 = clock.ONE_SECOND_IN_TS;
5368 /**
5369 * Store information about the start and end of the track and the
5370 * duration for each frame/sample we process in order to calculate
5371 * the baseMediaDecodeTime
5372 */
5373
5374 var collectDtsInfo = function collectDtsInfo(track, data) {
5375 if (typeof data.pts === 'number') {
5376 if (track.timelineStartInfo.pts === undefined) {
5377 track.timelineStartInfo.pts = data.pts;
5378 }
5379
5380 if (track.minSegmentPts === undefined) {
5381 track.minSegmentPts = data.pts;
5382 } else {
5383 track.minSegmentPts = Math.min(track.minSegmentPts, data.pts);
5384 }
5385
5386 if (track.maxSegmentPts === undefined) {
5387 track.maxSegmentPts = data.pts;
5388 } else {
5389 track.maxSegmentPts = Math.max(track.maxSegmentPts, data.pts);
5390 }
5391 }
5392
5393 if (typeof data.dts === 'number') {
5394 if (track.timelineStartInfo.dts === undefined) {
5395 track.timelineStartInfo.dts = data.dts;
5396 }
5397
5398 if (track.minSegmentDts === undefined) {
5399 track.minSegmentDts = data.dts;
5400 } else {
5401 track.minSegmentDts = Math.min(track.minSegmentDts, data.dts);
5402 }
5403
5404 if (track.maxSegmentDts === undefined) {
5405 track.maxSegmentDts = data.dts;
5406 } else {
5407 track.maxSegmentDts = Math.max(track.maxSegmentDts, data.dts);
5408 }
5409 }
5410 };
5411 /**
5412 * Clear values used to calculate the baseMediaDecodeTime between
5413 * tracks
5414 */
5415
5416
5417 var clearDtsInfo = function clearDtsInfo(track) {
5418 delete track.minSegmentDts;
5419 delete track.maxSegmentDts;
5420 delete track.minSegmentPts;
5421 delete track.maxSegmentPts;
5422 };
5423 /**
5424 * Calculate the track's baseMediaDecodeTime based on the earliest
5425 * DTS the transmuxer has ever seen and the minimum DTS for the
5426 * current track
5427 * @param track {object} track metadata configuration
5428 * @param keepOriginalTimestamps {boolean} If true, keep the timestamps
5429 * in the source; false to adjust the first segment to start at 0.
5430 */
5431
5432
5433 var calculateTrackBaseMediaDecodeTime = function calculateTrackBaseMediaDecodeTime(track, keepOriginalTimestamps) {
5434 var baseMediaDecodeTime,
5435 scale,
5436 minSegmentDts = track.minSegmentDts; // Optionally adjust the time so the first segment starts at zero.
5437
5438 if (!keepOriginalTimestamps) {
5439 minSegmentDts -= track.timelineStartInfo.dts;
5440 } // track.timelineStartInfo.baseMediaDecodeTime is the location, in time, where
5441 // we want the start of the first segment to be placed
5442
5443
5444 baseMediaDecodeTime = track.timelineStartInfo.baseMediaDecodeTime; // Add to that the distance this segment is from the very first
5445
5446 baseMediaDecodeTime += minSegmentDts; // baseMediaDecodeTime must not become negative
5447
5448 baseMediaDecodeTime = Math.max(0, baseMediaDecodeTime);
5449
5450 if (track.type === 'audio') {
5451 // Audio has a different clock equal to the sampling_rate so we need to
5452 // scale the PTS values into the clock rate of the track
5453 scale = track.samplerate / ONE_SECOND_IN_TS$3;
5454 baseMediaDecodeTime *= scale;
5455 baseMediaDecodeTime = Math.floor(baseMediaDecodeTime);
5456 }
5457
5458 return baseMediaDecodeTime;
5459 };
5460
5461 var trackDecodeInfo = {
5462 clearDtsInfo: clearDtsInfo,
5463 calculateTrackBaseMediaDecodeTime: calculateTrackBaseMediaDecodeTime,
5464 collectDtsInfo: collectDtsInfo
5465 };
5466 /**
5467 * mux.js
5468 *
5469 * Copyright (c) Brightcove
5470 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
5471 *
5472 * Reads in-band caption information from a video elementary
5473 * stream. Captions must follow the CEA-708 standard for injection
5474 * into an MPEG-2 transport streams.
5475 * @see https://en.wikipedia.org/wiki/CEA-708
5476 * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
5477 */
5478 // payload type field to indicate how they are to be
5479 // interpreted. CEAS-708 caption content is always transmitted with
5480 // payload type 0x04.
5481
5482 var USER_DATA_REGISTERED_ITU_T_T35 = 4,
5483 RBSP_TRAILING_BITS = 128;
5484 /**
5485 * Parse a supplemental enhancement information (SEI) NAL unit.
5486 * Stops parsing once a message of type ITU T T35 has been found.
5487 *
5488 * @param bytes {Uint8Array} the bytes of a SEI NAL unit
5489 * @return {object} the parsed SEI payload
5490 * @see Rec. ITU-T H.264, 7.3.2.3.1
5491 */
5492
5493 var parseSei = function parseSei(bytes) {
5494 var i = 0,
5495 result = {
5496 payloadType: -1,
5497 payloadSize: 0
5498 },
5499 payloadType = 0,
5500 payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
5501
5502 while (i < bytes.byteLength) {
5503 // stop once we have hit the end of the sei_rbsp
5504 if (bytes[i] === RBSP_TRAILING_BITS) {
5505 break;
5506 } // Parse payload type
5507
5508
5509 while (bytes[i] === 0xFF) {
5510 payloadType += 255;
5511 i++;
5512 }
5513
5514 payloadType += bytes[i++]; // Parse payload size
5515
5516 while (bytes[i] === 0xFF) {
5517 payloadSize += 255;
5518 i++;
5519 }
5520
5521 payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
5522 // there can only ever be one caption message in a frame's sei
5523
5524 if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
5525 var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
5526
5527 if (userIdentifier === 'GA94') {
5528 result.payloadType = payloadType;
5529 result.payloadSize = payloadSize;
5530 result.payload = bytes.subarray(i, i + payloadSize);
5531 break;
5532 } else {
5533 result.payload = void 0;
5534 }
5535 } // skip the payload and parse the next message
5536
5537
5538 i += payloadSize;
5539 payloadType = 0;
5540 payloadSize = 0;
5541 }
5542
5543 return result;
5544 }; // see ANSI/SCTE 128-1 (2013), section 8.1
5545
5546
5547 var parseUserData = function parseUserData(sei) {
5548 // itu_t_t35_contry_code must be 181 (United States) for
5549 // captions
5550 if (sei.payload[0] !== 181) {
5551 return null;
5552 } // itu_t_t35_provider_code should be 49 (ATSC) for captions
5553
5554
5555 if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
5556 return null;
5557 } // the user_identifier should be "GA94" to indicate ATSC1 data
5558
5559
5560 if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
5561 return null;
5562 } // finally, user_data_type_code should be 0x03 for caption data
5563
5564
5565 if (sei.payload[7] !== 0x03) {
5566 return null;
5567 } // return the user_data_type_structure and strip the trailing
5568 // marker bits
5569
5570
5571 return sei.payload.subarray(8, sei.payload.length - 1);
5572 }; // see CEA-708-D, section 4.4
5573
5574
5575 var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
5576 var results = [],
5577 i,
5578 count,
5579 offset,
5580 data; // if this is just filler, return immediately
5581
5582 if (!(userData[0] & 0x40)) {
5583 return results;
5584 } // parse out the cc_data_1 and cc_data_2 fields
5585
5586
5587 count = userData[0] & 0x1f;
5588
5589 for (i = 0; i < count; i++) {
5590 offset = i * 3;
5591 data = {
5592 type: userData[offset + 2] & 0x03,
5593 pts: pts
5594 }; // capture cc data when cc_valid is 1
5595
5596 if (userData[offset + 2] & 0x04) {
5597 data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
5598 results.push(data);
5599 }
5600 }
5601
5602 return results;
5603 };
5604
5605 var discardEmulationPreventionBytes$1 = function discardEmulationPreventionBytes(data) {
5606 var length = data.byteLength,
5607 emulationPreventionBytesPositions = [],
5608 i = 1,
5609 newLength,
5610 newData; // Find all `Emulation Prevention Bytes`
5611
5612 while (i < length - 2) {
5613 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
5614 emulationPreventionBytesPositions.push(i + 2);
5615 i += 2;
5616 } else {
5617 i++;
5618 }
5619 } // If no Emulation Prevention Bytes were found just return the original
5620 // array
5621
5622
5623 if (emulationPreventionBytesPositions.length === 0) {
5624 return data;
5625 } // Create a new array to hold the NAL unit data
5626
5627
5628 newLength = length - emulationPreventionBytesPositions.length;
5629 newData = new Uint8Array(newLength);
5630 var sourceIndex = 0;
5631
5632 for (i = 0; i < newLength; sourceIndex++, i++) {
5633 if (sourceIndex === emulationPreventionBytesPositions[0]) {
5634 // Skip this byte
5635 sourceIndex++; // Remove this position index
5636
5637 emulationPreventionBytesPositions.shift();
5638 }
5639
5640 newData[i] = data[sourceIndex];
5641 }
5642
5643 return newData;
5644 }; // exports
5645
5646
5647 var captionPacketParser = {
5648 parseSei: parseSei,
5649 parseUserData: parseUserData,
5650 parseCaptionPackets: parseCaptionPackets,
5651 discardEmulationPreventionBytes: discardEmulationPreventionBytes$1,
5652 USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
5653 }; // Link To Transport
5654 // -----------------
5655
5656 var CaptionStream$1 = function CaptionStream(options) {
5657 options = options || {};
5658 CaptionStream.prototype.init.call(this); // parse708captions flag, default to true
5659
5660 this.parse708captions_ = typeof options.parse708captions === 'boolean' ? options.parse708captions : true;
5661 this.captionPackets_ = [];
5662 this.ccStreams_ = [new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
5663 new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
5664 new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
5665 new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
5666 ];
5667
5668 if (this.parse708captions_) {
5669 this.cc708Stream_ = new Cea708Stream({
5670 captionServices: options.captionServices
5671 }); // eslint-disable-line no-use-before-define
5672 }
5673
5674 this.reset(); // forward data and done events from CCs to this CaptionStream
5675
5676 this.ccStreams_.forEach(function (cc) {
5677 cc.on('data', this.trigger.bind(this, 'data'));
5678 cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
5679 cc.on('done', this.trigger.bind(this, 'done'));
5680 }, this);
5681
5682 if (this.parse708captions_) {
5683 this.cc708Stream_.on('data', this.trigger.bind(this, 'data'));
5684 this.cc708Stream_.on('partialdone', this.trigger.bind(this, 'partialdone'));
5685 this.cc708Stream_.on('done', this.trigger.bind(this, 'done'));
5686 }
5687 };
5688
5689 CaptionStream$1.prototype = new stream();
5690
5691 CaptionStream$1.prototype.push = function (event) {
5692 var sei, userData, newCaptionPackets; // only examine SEI NALs
5693
5694 if (event.nalUnitType !== 'sei_rbsp') {
5695 return;
5696 } // parse the sei
5697
5698
5699 sei = captionPacketParser.parseSei(event.escapedRBSP); // no payload data, skip
5700
5701 if (!sei.payload) {
5702 return;
5703 } // ignore everything but user_data_registered_itu_t_t35
5704
5705
5706 if (sei.payloadType !== captionPacketParser.USER_DATA_REGISTERED_ITU_T_T35) {
5707 return;
5708 } // parse out the user data payload
5709
5710
5711 userData = captionPacketParser.parseUserData(sei); // ignore unrecognized userData
5712
5713 if (!userData) {
5714 return;
5715 } // Sometimes, the same segment # will be downloaded twice. To stop the
5716 // caption data from being processed twice, we track the latest dts we've
5717 // received and ignore everything with a dts before that. However, since
5718 // data for a specific dts can be split across packets on either side of
5719 // a segment boundary, we need to make sure we *don't* ignore the packets
5720 // from the *next* segment that have dts === this.latestDts_. By constantly
5721 // tracking the number of packets received with dts === this.latestDts_, we
5722 // know how many should be ignored once we start receiving duplicates.
5723
5724
5725 if (event.dts < this.latestDts_) {
5726 // We've started getting older data, so set the flag.
5727 this.ignoreNextEqualDts_ = true;
5728 return;
5729 } else if (event.dts === this.latestDts_ && this.ignoreNextEqualDts_) {
5730 this.numSameDts_--;
5731
5732 if (!this.numSameDts_) {
5733 // We've received the last duplicate packet, time to start processing again
5734 this.ignoreNextEqualDts_ = false;
5735 }
5736
5737 return;
5738 } // parse out CC data packets and save them for later
5739
5740
5741 newCaptionPackets = captionPacketParser.parseCaptionPackets(event.pts, userData);
5742 this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
5743
5744 if (this.latestDts_ !== event.dts) {
5745 this.numSameDts_ = 0;
5746 }
5747
5748 this.numSameDts_++;
5749 this.latestDts_ = event.dts;
5750 };
5751
5752 CaptionStream$1.prototype.flushCCStreams = function (flushType) {
5753 this.ccStreams_.forEach(function (cc) {
5754 return flushType === 'flush' ? cc.flush() : cc.partialFlush();
5755 }, this);
5756 };
5757
5758 CaptionStream$1.prototype.flushStream = function (flushType) {
5759 // make sure we actually parsed captions before proceeding
5760 if (!this.captionPackets_.length) {
5761 this.flushCCStreams(flushType);
5762 return;
5763 } // In Chrome, the Array#sort function is not stable so add a
5764 // presortIndex that we can use to ensure we get a stable-sort
5765
5766
5767 this.captionPackets_.forEach(function (elem, idx) {
5768 elem.presortIndex = idx;
5769 }); // sort caption byte-pairs based on their PTS values
5770
5771 this.captionPackets_.sort(function (a, b) {
5772 if (a.pts === b.pts) {
5773 return a.presortIndex - b.presortIndex;
5774 }
5775
5776 return a.pts - b.pts;
5777 });
5778 this.captionPackets_.forEach(function (packet) {
5779 if (packet.type < 2) {
5780 // Dispatch packet to the right Cea608Stream
5781 this.dispatchCea608Packet(packet);
5782 } else {
5783 // Dispatch packet to the Cea708Stream
5784 this.dispatchCea708Packet(packet);
5785 }
5786 }, this);
5787 this.captionPackets_.length = 0;
5788 this.flushCCStreams(flushType);
5789 };
5790
5791 CaptionStream$1.prototype.flush = function () {
5792 return this.flushStream('flush');
5793 }; // Only called if handling partial data
5794
5795
5796 CaptionStream$1.prototype.partialFlush = function () {
5797 return this.flushStream('partialFlush');
5798 };
5799
5800 CaptionStream$1.prototype.reset = function () {
5801 this.latestDts_ = null;
5802 this.ignoreNextEqualDts_ = false;
5803 this.numSameDts_ = 0;
5804 this.activeCea608Channel_ = [null, null];
5805 this.ccStreams_.forEach(function (ccStream) {
5806 ccStream.reset();
5807 });
5808 }; // From the CEA-608 spec:
5809
5810 /*
5811 * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
5812 * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
5813 * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
5814 * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
5815 * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
5816 * to switch to captioning or Text.
5817 */
5818 // With that in mind, we ignore any data between an XDS control code and a
5819 // subsequent closed-captioning control code.
5820
5821
5822 CaptionStream$1.prototype.dispatchCea608Packet = function (packet) {
5823 // NOTE: packet.type is the CEA608 field
5824 if (this.setsTextOrXDSActive(packet)) {
5825 this.activeCea608Channel_[packet.type] = null;
5826 } else if (this.setsChannel1Active(packet)) {
5827 this.activeCea608Channel_[packet.type] = 0;
5828 } else if (this.setsChannel2Active(packet)) {
5829 this.activeCea608Channel_[packet.type] = 1;
5830 }
5831
5832 if (this.activeCea608Channel_[packet.type] === null) {
5833 // If we haven't received anything to set the active channel, or the
5834 // packets are Text/XDS data, discard the data; we don't want jumbled
5835 // captions
5836 return;
5837 }
5838
5839 this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
5840 };
5841
5842 CaptionStream$1.prototype.setsChannel1Active = function (packet) {
5843 return (packet.ccData & 0x7800) === 0x1000;
5844 };
5845
5846 CaptionStream$1.prototype.setsChannel2Active = function (packet) {
5847 return (packet.ccData & 0x7800) === 0x1800;
5848 };
5849
5850 CaptionStream$1.prototype.setsTextOrXDSActive = function (packet) {
5851 return (packet.ccData & 0x7100) === 0x0100 || (packet.ccData & 0x78fe) === 0x102a || (packet.ccData & 0x78fe) === 0x182a;
5852 };
5853
5854 CaptionStream$1.prototype.dispatchCea708Packet = function (packet) {
5855 if (this.parse708captions_) {
5856 this.cc708Stream_.push(packet);
5857 }
5858 }; // ----------------------
5859 // Session to Application
5860 // ----------------------
5861 // This hash maps special and extended character codes to their
5862 // proper Unicode equivalent. The first one-byte key is just a
5863 // non-standard character code. The two-byte keys that follow are
5864 // the extended CEA708 character codes, along with the preceding
5865 // 0x10 extended character byte to distinguish these codes from
5866 // non-extended character codes. Every CEA708 character code that
5867 // is not in this object maps directly to a standard unicode
5868 // character code.
5869 // The transparent space and non-breaking transparent space are
5870 // technically not fully supported since there is no code to
5871 // make them transparent, so they have normal non-transparent
5872 // stand-ins.
5873 // The special closed caption (CC) character isn't a standard
5874 // unicode character, so a fairly similar unicode character was
5875 // chosen in it's place.
5876
5877
5878 var CHARACTER_TRANSLATION_708 = {
5879 0x7f: 0x266a,
5880 // ♪
5881 0x1020: 0x20,
5882 // Transparent Space
5883 0x1021: 0xa0,
5884 // Nob-breaking Transparent Space
5885 0x1025: 0x2026,
5886 // …
5887 0x102a: 0x0160,
5888 // Š
5889 0x102c: 0x0152,
5890 // Œ
5891 0x1030: 0x2588,
5892 // █
5893 0x1031: 0x2018,
5894 // ‘
5895 0x1032: 0x2019,
5896 // ’
5897 0x1033: 0x201c,
5898 // “
5899 0x1034: 0x201d,
5900 // ”
5901 0x1035: 0x2022,
5902 // •
5903 0x1039: 0x2122,
5904 // ™
5905 0x103a: 0x0161,
5906 // š
5907 0x103c: 0x0153,
5908 // œ
5909 0x103d: 0x2120,
5910 // ℠
5911 0x103f: 0x0178,
5912 // Ÿ
5913 0x1076: 0x215b,
5914 // ⅛
5915 0x1077: 0x215c,
5916 // ⅜
5917 0x1078: 0x215d,
5918 // ⅝
5919 0x1079: 0x215e,
5920 // ⅞
5921 0x107a: 0x23d0,
5922 // ⏐
5923 0x107b: 0x23a4,
5924 // ⎤
5925 0x107c: 0x23a3,
5926 // ⎣
5927 0x107d: 0x23af,
5928 // ⎯
5929 0x107e: 0x23a6,
5930 // ⎦
5931 0x107f: 0x23a1,
5932 // ⎡
5933 0x10a0: 0x3138 // ㄸ (CC char)
5934
5935 };
5936
5937 var get708CharFromCode = function get708CharFromCode(code) {
5938 var newCode = CHARACTER_TRANSLATION_708[code] || code;
5939
5940 if (code & 0x1000 && code === newCode) {
5941 // Invalid extended code
5942 return '';
5943 }
5944
5945 return String.fromCharCode(newCode);
5946 };
5947
5948 var within708TextBlock = function within708TextBlock(b) {
5949 return 0x20 <= b && b <= 0x7f || 0xa0 <= b && b <= 0xff;
5950 };
5951
5952 var Cea708Window = function Cea708Window(windowNum) {
5953 this.windowNum = windowNum;
5954 this.reset();
5955 };
5956
5957 Cea708Window.prototype.reset = function () {
5958 this.clearText();
5959 this.pendingNewLine = false;
5960 this.winAttr = {};
5961 this.penAttr = {};
5962 this.penLoc = {};
5963 this.penColor = {}; // These default values are arbitrary,
5964 // defineWindow will usually override them
5965
5966 this.visible = 0;
5967 this.rowLock = 0;
5968 this.columnLock = 0;
5969 this.priority = 0;
5970 this.relativePositioning = 0;
5971 this.anchorVertical = 0;
5972 this.anchorHorizontal = 0;
5973 this.anchorPoint = 0;
5974 this.rowCount = 1;
5975 this.virtualRowCount = this.rowCount + 1;
5976 this.columnCount = 41;
5977 this.windowStyle = 0;
5978 this.penStyle = 0;
5979 };
5980
5981 Cea708Window.prototype.getText = function () {
5982 return this.rows.join('\n');
5983 };
5984
5985 Cea708Window.prototype.clearText = function () {
5986 this.rows = [''];
5987 this.rowIdx = 0;
5988 };
5989
5990 Cea708Window.prototype.newLine = function (pts) {
5991 if (this.rows.length >= this.virtualRowCount && typeof this.beforeRowOverflow === 'function') {
5992 this.beforeRowOverflow(pts);
5993 }
5994
5995 if (this.rows.length > 0) {
5996 this.rows.push('');
5997 this.rowIdx++;
5998 } // Show all virtual rows since there's no visible scrolling
5999
6000
6001 while (this.rows.length > this.virtualRowCount) {
6002 this.rows.shift();
6003 this.rowIdx--;
6004 }
6005 };
6006
6007 Cea708Window.prototype.isEmpty = function () {
6008 if (this.rows.length === 0) {
6009 return true;
6010 } else if (this.rows.length === 1) {
6011 return this.rows[0] === '';
6012 }
6013
6014 return false;
6015 };
6016
6017 Cea708Window.prototype.addText = function (text) {
6018 this.rows[this.rowIdx] += text;
6019 };
6020
6021 Cea708Window.prototype.backspace = function () {
6022 if (!this.isEmpty()) {
6023 var row = this.rows[this.rowIdx];
6024 this.rows[this.rowIdx] = row.substr(0, row.length - 1);
6025 }
6026 };
6027
6028 var Cea708Service = function Cea708Service(serviceNum, encoding, stream) {
6029 this.serviceNum = serviceNum;
6030 this.text = '';
6031 this.currentWindow = new Cea708Window(-1);
6032 this.windows = [];
6033 this.stream = stream; // Try to setup a TextDecoder if an `encoding` value was provided
6034
6035 if (typeof encoding === 'string') {
6036 this.createTextDecoder(encoding);
6037 }
6038 };
6039 /**
6040 * Initialize service windows
6041 * Must be run before service use
6042 *
6043 * @param {Integer} pts PTS value
6044 * @param {Function} beforeRowOverflow Function to execute before row overflow of a window
6045 */
6046
6047
6048 Cea708Service.prototype.init = function (pts, beforeRowOverflow) {
6049 this.startPts = pts;
6050
6051 for (var win = 0; win < 8; win++) {
6052 this.windows[win] = new Cea708Window(win);
6053
6054 if (typeof beforeRowOverflow === 'function') {
6055 this.windows[win].beforeRowOverflow = beforeRowOverflow;
6056 }
6057 }
6058 };
6059 /**
6060 * Set current window of service to be affected by commands
6061 *
6062 * @param {Integer} windowNum Window number
6063 */
6064
6065
6066 Cea708Service.prototype.setCurrentWindow = function (windowNum) {
6067 this.currentWindow = this.windows[windowNum];
6068 };
6069 /**
6070 * Try to create a TextDecoder if it is natively supported
6071 */
6072
6073
6074 Cea708Service.prototype.createTextDecoder = function (encoding) {
6075 if (typeof TextDecoder === 'undefined') {
6076 this.stream.trigger('log', {
6077 level: 'warn',
6078 message: 'The `encoding` option is unsupported without TextDecoder support'
6079 });
6080 } else {
6081 try {
6082 this.textDecoder_ = new TextDecoder(encoding);
6083 } catch (error) {
6084 this.stream.trigger('log', {
6085 level: 'warn',
6086 message: 'TextDecoder could not be created with ' + encoding + ' encoding. ' + error
6087 });
6088 }
6089 }
6090 };
6091
6092 var Cea708Stream = function Cea708Stream(options) {
6093 options = options || {};
6094 Cea708Stream.prototype.init.call(this);
6095 var self = this;
6096 var captionServices = options.captionServices || {};
6097 var captionServiceEncodings = {};
6098 var serviceProps; // Get service encodings from captionServices option block
6099
6100 Object.keys(captionServices).forEach(function (serviceName) {
6101 serviceProps = captionServices[serviceName];
6102
6103 if (/^SERVICE/.test(serviceName)) {
6104 captionServiceEncodings[serviceName] = serviceProps.encoding;
6105 }
6106 });
6107 this.serviceEncodings = captionServiceEncodings;
6108 this.current708Packet = null;
6109 this.services = {};
6110
6111 this.push = function (packet) {
6112 if (packet.type === 3) {
6113 // 708 packet start
6114 self.new708Packet();
6115 self.add708Bytes(packet);
6116 } else {
6117 if (self.current708Packet === null) {
6118 // This should only happen at the start of a file if there's no packet start.
6119 self.new708Packet();
6120 }
6121
6122 self.add708Bytes(packet);
6123 }
6124 };
6125 };
6126
6127 Cea708Stream.prototype = new stream();
6128 /**
6129 * Push current 708 packet, create new 708 packet.
6130 */
6131
6132 Cea708Stream.prototype.new708Packet = function () {
6133 if (this.current708Packet !== null) {
6134 this.push708Packet();
6135 }
6136
6137 this.current708Packet = {
6138 data: [],
6139 ptsVals: []
6140 };
6141 };
6142 /**
6143 * Add pts and both bytes from packet into current 708 packet.
6144 */
6145
6146
6147 Cea708Stream.prototype.add708Bytes = function (packet) {
6148 var data = packet.ccData;
6149 var byte0 = data >>> 8;
6150 var byte1 = data & 0xff; // I would just keep a list of packets instead of bytes, but it isn't clear in the spec
6151 // that service blocks will always line up with byte pairs.
6152
6153 this.current708Packet.ptsVals.push(packet.pts);
6154 this.current708Packet.data.push(byte0);
6155 this.current708Packet.data.push(byte1);
6156 };
6157 /**
6158 * Parse completed 708 packet into service blocks and push each service block.
6159 */
6160
6161
6162 Cea708Stream.prototype.push708Packet = function () {
6163 var packet708 = this.current708Packet;
6164 var packetData = packet708.data;
6165 var serviceNum = null;
6166 var blockSize = null;
6167 var i = 0;
6168 var b = packetData[i++];
6169 packet708.seq = b >> 6;
6170 packet708.sizeCode = b & 0x3f; // 0b00111111;
6171
6172 for (; i < packetData.length; i++) {
6173 b = packetData[i++];
6174 serviceNum = b >> 5;
6175 blockSize = b & 0x1f; // 0b00011111
6176
6177 if (serviceNum === 7 && blockSize > 0) {
6178 // Extended service num
6179 b = packetData[i++];
6180 serviceNum = b;
6181 }
6182
6183 this.pushServiceBlock(serviceNum, i, blockSize);
6184
6185 if (blockSize > 0) {
6186 i += blockSize - 1;
6187 }
6188 }
6189 };
6190 /**
6191 * Parse service block, execute commands, read text.
6192 *
6193 * Note: While many of these commands serve important purposes,
6194 * many others just parse out the parameters or attributes, but
6195 * nothing is done with them because this is not a full and complete
6196 * implementation of the entire 708 spec.
6197 *
6198 * @param {Integer} serviceNum Service number
6199 * @param {Integer} start Start index of the 708 packet data
6200 * @param {Integer} size Block size
6201 */
6202
6203
6204 Cea708Stream.prototype.pushServiceBlock = function (serviceNum, start, size) {
6205 var b;
6206 var i = start;
6207 var packetData = this.current708Packet.data;
6208 var service = this.services[serviceNum];
6209
6210 if (!service) {
6211 service = this.initService(serviceNum, i);
6212 }
6213
6214 for (; i < start + size && i < packetData.length; i++) {
6215 b = packetData[i];
6216
6217 if (within708TextBlock(b)) {
6218 i = this.handleText(i, service);
6219 } else if (b === 0x18) {
6220 i = this.multiByteCharacter(i, service);
6221 } else if (b === 0x10) {
6222 i = this.extendedCommands(i, service);
6223 } else if (0x80 <= b && b <= 0x87) {
6224 i = this.setCurrentWindow(i, service);
6225 } else if (0x98 <= b && b <= 0x9f) {
6226 i = this.defineWindow(i, service);
6227 } else if (b === 0x88) {
6228 i = this.clearWindows(i, service);
6229 } else if (b === 0x8c) {
6230 i = this.deleteWindows(i, service);
6231 } else if (b === 0x89) {
6232 i = this.displayWindows(i, service);
6233 } else if (b === 0x8a) {
6234 i = this.hideWindows(i, service);
6235 } else if (b === 0x8b) {
6236 i = this.toggleWindows(i, service);
6237 } else if (b === 0x97) {
6238 i = this.setWindowAttributes(i, service);
6239 } else if (b === 0x90) {
6240 i = this.setPenAttributes(i, service);
6241 } else if (b === 0x91) {
6242 i = this.setPenColor(i, service);
6243 } else if (b === 0x92) {
6244 i = this.setPenLocation(i, service);
6245 } else if (b === 0x8f) {
6246 service = this.reset(i, service);
6247 } else if (b === 0x08) {
6248 // BS: Backspace
6249 service.currentWindow.backspace();
6250 } else if (b === 0x0c) {
6251 // FF: Form feed
6252 service.currentWindow.clearText();
6253 } else if (b === 0x0d) {
6254 // CR: Carriage return
6255 service.currentWindow.pendingNewLine = true;
6256 } else if (b === 0x0e) {
6257 // HCR: Horizontal carriage return
6258 service.currentWindow.clearText();
6259 } else if (b === 0x8d) {
6260 // DLY: Delay, nothing to do
6261 i++;
6262 } else ;
6263 }
6264 };
6265 /**
6266 * Execute an extended command
6267 *
6268 * @param {Integer} i Current index in the 708 packet
6269 * @param {Service} service The service object to be affected
6270 * @return {Integer} New index after parsing
6271 */
6272
6273
6274 Cea708Stream.prototype.extendedCommands = function (i, service) {
6275 var packetData = this.current708Packet.data;
6276 var b = packetData[++i];
6277
6278 if (within708TextBlock(b)) {
6279 i = this.handleText(i, service, {
6280 isExtended: true
6281 });
6282 }
6283
6284 return i;
6285 };
6286 /**
6287 * Get PTS value of a given byte index
6288 *
6289 * @param {Integer} byteIndex Index of the byte
6290 * @return {Integer} PTS
6291 */
6292
6293
6294 Cea708Stream.prototype.getPts = function (byteIndex) {
6295 // There's 1 pts value per 2 bytes
6296 return this.current708Packet.ptsVals[Math.floor(byteIndex / 2)];
6297 };
6298 /**
6299 * Initializes a service
6300 *
6301 * @param {Integer} serviceNum Service number
6302 * @return {Service} Initialized service object
6303 */
6304
6305
6306 Cea708Stream.prototype.initService = function (serviceNum, i) {
6307 var serviceName = 'SERVICE' + serviceNum;
6308 var self = this;
6309 var serviceName;
6310 var encoding;
6311
6312 if (serviceName in this.serviceEncodings) {
6313 encoding = this.serviceEncodings[serviceName];
6314 }
6315
6316 this.services[serviceNum] = new Cea708Service(serviceNum, encoding, self);
6317 this.services[serviceNum].init(this.getPts(i), function (pts) {
6318 self.flushDisplayed(pts, self.services[serviceNum]);
6319 });
6320 return this.services[serviceNum];
6321 };
6322 /**
6323 * Execute text writing to current window
6324 *
6325 * @param {Integer} i Current index in the 708 packet
6326 * @param {Service} service The service object to be affected
6327 * @return {Integer} New index after parsing
6328 */
6329
6330
6331 Cea708Stream.prototype.handleText = function (i, service, options) {
6332 var isExtended = options && options.isExtended;
6333 var isMultiByte = options && options.isMultiByte;
6334 var packetData = this.current708Packet.data;
6335 var extended = isExtended ? 0x1000 : 0x0000;
6336 var currentByte = packetData[i];
6337 var nextByte = packetData[i + 1];
6338 var win = service.currentWindow;
6339 var char;
6340 var charCodeArray; // Use the TextDecoder if one was created for this service
6341
6342 if (service.textDecoder_ && !isExtended) {
6343 if (isMultiByte) {
6344 charCodeArray = [currentByte, nextByte];
6345 i++;
6346 } else {
6347 charCodeArray = [currentByte];
6348 }
6349
6350 char = service.textDecoder_.decode(new Uint8Array(charCodeArray));
6351 } else {
6352 char = get708CharFromCode(extended | currentByte);
6353 }
6354
6355 if (win.pendingNewLine && !win.isEmpty()) {
6356 win.newLine(this.getPts(i));
6357 }
6358
6359 win.pendingNewLine = false;
6360 win.addText(char);
6361 return i;
6362 };
6363 /**
6364 * Handle decoding of multibyte character
6365 *
6366 * @param {Integer} i Current index in the 708 packet
6367 * @param {Service} service The service object to be affected
6368 * @return {Integer} New index after parsing
6369 */
6370
6371
6372 Cea708Stream.prototype.multiByteCharacter = function (i, service) {
6373 var packetData = this.current708Packet.data;
6374 var firstByte = packetData[i + 1];
6375 var secondByte = packetData[i + 2];
6376
6377 if (within708TextBlock(firstByte) && within708TextBlock(secondByte)) {
6378 i = this.handleText(++i, service, {
6379 isMultiByte: true
6380 });
6381 }
6382
6383 return i;
6384 };
6385 /**
6386 * Parse and execute the CW# command.
6387 *
6388 * Set the current window.
6389 *
6390 * @param {Integer} i Current index in the 708 packet
6391 * @param {Service} service The service object to be affected
6392 * @return {Integer} New index after parsing
6393 */
6394
6395
6396 Cea708Stream.prototype.setCurrentWindow = function (i, service) {
6397 var packetData = this.current708Packet.data;
6398 var b = packetData[i];
6399 var windowNum = b & 0x07;
6400 service.setCurrentWindow(windowNum);
6401 return i;
6402 };
6403 /**
6404 * Parse and execute the DF# command.
6405 *
6406 * Define a window and set it as the current window.
6407 *
6408 * @param {Integer} i Current index in the 708 packet
6409 * @param {Service} service The service object to be affected
6410 * @return {Integer} New index after parsing
6411 */
6412
6413
6414 Cea708Stream.prototype.defineWindow = function (i, service) {
6415 var packetData = this.current708Packet.data;
6416 var b = packetData[i];
6417 var windowNum = b & 0x07;
6418 service.setCurrentWindow(windowNum);
6419 var win = service.currentWindow;
6420 b = packetData[++i];
6421 win.visible = (b & 0x20) >> 5; // v
6422
6423 win.rowLock = (b & 0x10) >> 4; // rl
6424
6425 win.columnLock = (b & 0x08) >> 3; // cl
6426
6427 win.priority = b & 0x07; // p
6428
6429 b = packetData[++i];
6430 win.relativePositioning = (b & 0x80) >> 7; // rp
6431
6432 win.anchorVertical = b & 0x7f; // av
6433
6434 b = packetData[++i];
6435 win.anchorHorizontal = b; // ah
6436
6437 b = packetData[++i];
6438 win.anchorPoint = (b & 0xf0) >> 4; // ap
6439
6440 win.rowCount = b & 0x0f; // rc
6441
6442 b = packetData[++i];
6443 win.columnCount = b & 0x3f; // cc
6444
6445 b = packetData[++i];
6446 win.windowStyle = (b & 0x38) >> 3; // ws
6447
6448 win.penStyle = b & 0x07; // ps
6449 // The spec says there are (rowCount+1) "virtual rows"
6450
6451 win.virtualRowCount = win.rowCount + 1;
6452 return i;
6453 };
6454 /**
6455 * Parse and execute the SWA command.
6456 *
6457 * Set attributes of the current window.
6458 *
6459 * @param {Integer} i Current index in the 708 packet
6460 * @param {Service} service The service object to be affected
6461 * @return {Integer} New index after parsing
6462 */
6463
6464
6465 Cea708Stream.prototype.setWindowAttributes = function (i, service) {
6466 var packetData = this.current708Packet.data;
6467 var b = packetData[i];
6468 var winAttr = service.currentWindow.winAttr;
6469 b = packetData[++i];
6470 winAttr.fillOpacity = (b & 0xc0) >> 6; // fo
6471
6472 winAttr.fillRed = (b & 0x30) >> 4; // fr
6473
6474 winAttr.fillGreen = (b & 0x0c) >> 2; // fg
6475
6476 winAttr.fillBlue = b & 0x03; // fb
6477
6478 b = packetData[++i];
6479 winAttr.borderType = (b & 0xc0) >> 6; // bt
6480
6481 winAttr.borderRed = (b & 0x30) >> 4; // br
6482
6483 winAttr.borderGreen = (b & 0x0c) >> 2; // bg
6484
6485 winAttr.borderBlue = b & 0x03; // bb
6486
6487 b = packetData[++i];
6488 winAttr.borderType += (b & 0x80) >> 5; // bt
6489
6490 winAttr.wordWrap = (b & 0x40) >> 6; // ww
6491
6492 winAttr.printDirection = (b & 0x30) >> 4; // pd
6493
6494 winAttr.scrollDirection = (b & 0x0c) >> 2; // sd
6495
6496 winAttr.justify = b & 0x03; // j
6497
6498 b = packetData[++i];
6499 winAttr.effectSpeed = (b & 0xf0) >> 4; // es
6500
6501 winAttr.effectDirection = (b & 0x0c) >> 2; // ed
6502
6503 winAttr.displayEffect = b & 0x03; // de
6504
6505 return i;
6506 };
6507 /**
6508 * Gather text from all displayed windows and push a caption to output.
6509 *
6510 * @param {Integer} i Current index in the 708 packet
6511 * @param {Service} service The service object to be affected
6512 */
6513
6514
6515 Cea708Stream.prototype.flushDisplayed = function (pts, service) {
6516 var displayedText = []; // TODO: Positioning not supported, displaying multiple windows will not necessarily
6517 // display text in the correct order, but sample files so far have not shown any issue.
6518
6519 for (var winId = 0; winId < 8; winId++) {
6520 if (service.windows[winId].visible && !service.windows[winId].isEmpty()) {
6521 displayedText.push(service.windows[winId].getText());
6522 }
6523 }
6524
6525 service.endPts = pts;
6526 service.text = displayedText.join('\n\n');
6527 this.pushCaption(service);
6528 service.startPts = pts;
6529 };
6530 /**
6531 * Push a caption to output if the caption contains text.
6532 *
6533 * @param {Service} service The service object to be affected
6534 */
6535
6536
6537 Cea708Stream.prototype.pushCaption = function (service) {
6538 if (service.text !== '') {
6539 this.trigger('data', {
6540 startPts: service.startPts,
6541 endPts: service.endPts,
6542 text: service.text,
6543 stream: 'cc708_' + service.serviceNum
6544 });
6545 service.text = '';
6546 service.startPts = service.endPts;
6547 }
6548 };
6549 /**
6550 * Parse and execute the DSW command.
6551 *
6552 * Set visible property of windows based on the parsed bitmask.
6553 *
6554 * @param {Integer} i Current index in the 708 packet
6555 * @param {Service} service The service object to be affected
6556 * @return {Integer} New index after parsing
6557 */
6558
6559
6560 Cea708Stream.prototype.displayWindows = function (i, service) {
6561 var packetData = this.current708Packet.data;
6562 var b = packetData[++i];
6563 var pts = this.getPts(i);
6564 this.flushDisplayed(pts, service);
6565
6566 for (var winId = 0; winId < 8; winId++) {
6567 if (b & 0x01 << winId) {
6568 service.windows[winId].visible = 1;
6569 }
6570 }
6571
6572 return i;
6573 };
6574 /**
6575 * Parse and execute the HDW command.
6576 *
6577 * Set visible property of windows based on the parsed bitmask.
6578 *
6579 * @param {Integer} i Current index in the 708 packet
6580 * @param {Service} service The service object to be affected
6581 * @return {Integer} New index after parsing
6582 */
6583
6584
6585 Cea708Stream.prototype.hideWindows = function (i, service) {
6586 var packetData = this.current708Packet.data;
6587 var b = packetData[++i];
6588 var pts = this.getPts(i);
6589 this.flushDisplayed(pts, service);
6590
6591 for (var winId = 0; winId < 8; winId++) {
6592 if (b & 0x01 << winId) {
6593 service.windows[winId].visible = 0;
6594 }
6595 }
6596
6597 return i;
6598 };
6599 /**
6600 * Parse and execute the TGW command.
6601 *
6602 * Set visible property of windows based on the parsed bitmask.
6603 *
6604 * @param {Integer} i Current index in the 708 packet
6605 * @param {Service} service The service object to be affected
6606 * @return {Integer} New index after parsing
6607 */
6608
6609
6610 Cea708Stream.prototype.toggleWindows = function (i, service) {
6611 var packetData = this.current708Packet.data;
6612 var b = packetData[++i];
6613 var pts = this.getPts(i);
6614 this.flushDisplayed(pts, service);
6615
6616 for (var winId = 0; winId < 8; winId++) {
6617 if (b & 0x01 << winId) {
6618 service.windows[winId].visible ^= 1;
6619 }
6620 }
6621
6622 return i;
6623 };
6624 /**
6625 * Parse and execute the CLW command.
6626 *
6627 * Clear text of windows based on the parsed bitmask.
6628 *
6629 * @param {Integer} i Current index in the 708 packet
6630 * @param {Service} service The service object to be affected
6631 * @return {Integer} New index after parsing
6632 */
6633
6634
6635 Cea708Stream.prototype.clearWindows = function (i, service) {
6636 var packetData = this.current708Packet.data;
6637 var b = packetData[++i];
6638 var pts = this.getPts(i);
6639 this.flushDisplayed(pts, service);
6640
6641 for (var winId = 0; winId < 8; winId++) {
6642 if (b & 0x01 << winId) {
6643 service.windows[winId].clearText();
6644 }
6645 }
6646
6647 return i;
6648 };
6649 /**
6650 * Parse and execute the DLW command.
6651 *
6652 * Re-initialize windows based on the parsed bitmask.
6653 *
6654 * @param {Integer} i Current index in the 708 packet
6655 * @param {Service} service The service object to be affected
6656 * @return {Integer} New index after parsing
6657 */
6658
6659
6660 Cea708Stream.prototype.deleteWindows = function (i, service) {
6661 var packetData = this.current708Packet.data;
6662 var b = packetData[++i];
6663 var pts = this.getPts(i);
6664 this.flushDisplayed(pts, service);
6665
6666 for (var winId = 0; winId < 8; winId++) {
6667 if (b & 0x01 << winId) {
6668 service.windows[winId].reset();
6669 }
6670 }
6671
6672 return i;
6673 };
6674 /**
6675 * Parse and execute the SPA command.
6676 *
6677 * Set pen attributes of the current window.
6678 *
6679 * @param {Integer} i Current index in the 708 packet
6680 * @param {Service} service The service object to be affected
6681 * @return {Integer} New index after parsing
6682 */
6683
6684
6685 Cea708Stream.prototype.setPenAttributes = function (i, service) {
6686 var packetData = this.current708Packet.data;
6687 var b = packetData[i];
6688 var penAttr = service.currentWindow.penAttr;
6689 b = packetData[++i];
6690 penAttr.textTag = (b & 0xf0) >> 4; // tt
6691
6692 penAttr.offset = (b & 0x0c) >> 2; // o
6693
6694 penAttr.penSize = b & 0x03; // s
6695
6696 b = packetData[++i];
6697 penAttr.italics = (b & 0x80) >> 7; // i
6698
6699 penAttr.underline = (b & 0x40) >> 6; // u
6700
6701 penAttr.edgeType = (b & 0x38) >> 3; // et
6702
6703 penAttr.fontStyle = b & 0x07; // fs
6704
6705 return i;
6706 };
6707 /**
6708 * Parse and execute the SPC command.
6709 *
6710 * Set pen color of the current window.
6711 *
6712 * @param {Integer} i Current index in the 708 packet
6713 * @param {Service} service The service object to be affected
6714 * @return {Integer} New index after parsing
6715 */
6716
6717
6718 Cea708Stream.prototype.setPenColor = function (i, service) {
6719 var packetData = this.current708Packet.data;
6720 var b = packetData[i];
6721 var penColor = service.currentWindow.penColor;
6722 b = packetData[++i];
6723 penColor.fgOpacity = (b & 0xc0) >> 6; // fo
6724
6725 penColor.fgRed = (b & 0x30) >> 4; // fr
6726
6727 penColor.fgGreen = (b & 0x0c) >> 2; // fg
6728
6729 penColor.fgBlue = b & 0x03; // fb
6730
6731 b = packetData[++i];
6732 penColor.bgOpacity = (b & 0xc0) >> 6; // bo
6733
6734 penColor.bgRed = (b & 0x30) >> 4; // br
6735
6736 penColor.bgGreen = (b & 0x0c) >> 2; // bg
6737
6738 penColor.bgBlue = b & 0x03; // bb
6739
6740 b = packetData[++i];
6741 penColor.edgeRed = (b & 0x30) >> 4; // er
6742
6743 penColor.edgeGreen = (b & 0x0c) >> 2; // eg
6744
6745 penColor.edgeBlue = b & 0x03; // eb
6746
6747 return i;
6748 };
6749 /**
6750 * Parse and execute the SPL command.
6751 *
6752 * Set pen location of the current window.
6753 *
6754 * @param {Integer} i Current index in the 708 packet
6755 * @param {Service} service The service object to be affected
6756 * @return {Integer} New index after parsing
6757 */
6758
6759
6760 Cea708Stream.prototype.setPenLocation = function (i, service) {
6761 var packetData = this.current708Packet.data;
6762 var b = packetData[i];
6763 var penLoc = service.currentWindow.penLoc; // Positioning isn't really supported at the moment, so this essentially just inserts a linebreak
6764
6765 service.currentWindow.pendingNewLine = true;
6766 b = packetData[++i];
6767 penLoc.row = b & 0x0f; // r
6768
6769 b = packetData[++i];
6770 penLoc.column = b & 0x3f; // c
6771
6772 return i;
6773 };
6774 /**
6775 * Execute the RST command.
6776 *
6777 * Reset service to a clean slate. Re-initialize.
6778 *
6779 * @param {Integer} i Current index in the 708 packet
6780 * @param {Service} service The service object to be affected
6781 * @return {Service} Re-initialized service
6782 */
6783
6784
6785 Cea708Stream.prototype.reset = function (i, service) {
6786 var pts = this.getPts(i);
6787 this.flushDisplayed(pts, service);
6788 return this.initService(service.serviceNum, i);
6789 }; // This hash maps non-ASCII, special, and extended character codes to their
6790 // proper Unicode equivalent. The first keys that are only a single byte
6791 // are the non-standard ASCII characters, which simply map the CEA608 byte
6792 // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
6793 // character codes, but have their MSB bitmasked with 0x03 so that a lookup
6794 // can be performed regardless of the field and data channel on which the
6795 // character code was received.
6796
6797
6798 var CHARACTER_TRANSLATION = {
6799 0x2a: 0xe1,
6800 // á
6801 0x5c: 0xe9,
6802 // é
6803 0x5e: 0xed,
6804 // í
6805 0x5f: 0xf3,
6806 // ó
6807 0x60: 0xfa,
6808 // ú
6809 0x7b: 0xe7,
6810 // ç
6811 0x7c: 0xf7,
6812 // ÷
6813 0x7d: 0xd1,
6814 // Ñ
6815 0x7e: 0xf1,
6816 // ñ
6817 0x7f: 0x2588,
6818 // █
6819 0x0130: 0xae,
6820 // ®
6821 0x0131: 0xb0,
6822 // °
6823 0x0132: 0xbd,
6824 // ½
6825 0x0133: 0xbf,
6826 // ¿
6827 0x0134: 0x2122,
6828 // ™
6829 0x0135: 0xa2,
6830 // ¢
6831 0x0136: 0xa3,
6832 // £
6833 0x0137: 0x266a,
6834 // ♪
6835 0x0138: 0xe0,
6836 // à
6837 0x0139: 0xa0,
6838 //
6839 0x013a: 0xe8,
6840 // è
6841 0x013b: 0xe2,
6842 // â
6843 0x013c: 0xea,
6844 // ê
6845 0x013d: 0xee,
6846 // î
6847 0x013e: 0xf4,
6848 // ô
6849 0x013f: 0xfb,
6850 // û
6851 0x0220: 0xc1,
6852 // Á
6853 0x0221: 0xc9,
6854 // É
6855 0x0222: 0xd3,
6856 // Ó
6857 0x0223: 0xda,
6858 // Ú
6859 0x0224: 0xdc,
6860 // Ü
6861 0x0225: 0xfc,
6862 // ü
6863 0x0226: 0x2018,
6864 // ‘
6865 0x0227: 0xa1,
6866 // ¡
6867 0x0228: 0x2a,
6868 // *
6869 0x0229: 0x27,
6870 // '
6871 0x022a: 0x2014,
6872 // —
6873 0x022b: 0xa9,
6874 // ©
6875 0x022c: 0x2120,
6876 // ℠
6877 0x022d: 0x2022,
6878 // •
6879 0x022e: 0x201c,
6880 // “
6881 0x022f: 0x201d,
6882 // ”
6883 0x0230: 0xc0,
6884 // À
6885 0x0231: 0xc2,
6886 // Â
6887 0x0232: 0xc7,
6888 // Ç
6889 0x0233: 0xc8,
6890 // È
6891 0x0234: 0xca,
6892 // Ê
6893 0x0235: 0xcb,
6894 // Ë
6895 0x0236: 0xeb,
6896 // ë
6897 0x0237: 0xce,
6898 // Î
6899 0x0238: 0xcf,
6900 // Ï
6901 0x0239: 0xef,
6902 // ï
6903 0x023a: 0xd4,
6904 // Ô
6905 0x023b: 0xd9,
6906 // Ù
6907 0x023c: 0xf9,
6908 // ù
6909 0x023d: 0xdb,
6910 // Û
6911 0x023e: 0xab,
6912 // «
6913 0x023f: 0xbb,
6914 // »
6915 0x0320: 0xc3,
6916 // Ã
6917 0x0321: 0xe3,
6918 // ã
6919 0x0322: 0xcd,
6920 // Í
6921 0x0323: 0xcc,
6922 // Ì
6923 0x0324: 0xec,
6924 // ì
6925 0x0325: 0xd2,
6926 // Ò
6927 0x0326: 0xf2,
6928 // ò
6929 0x0327: 0xd5,
6930 // Õ
6931 0x0328: 0xf5,
6932 // õ
6933 0x0329: 0x7b,
6934 // {
6935 0x032a: 0x7d,
6936 // }
6937 0x032b: 0x5c,
6938 // \
6939 0x032c: 0x5e,
6940 // ^
6941 0x032d: 0x5f,
6942 // _
6943 0x032e: 0x7c,
6944 // |
6945 0x032f: 0x7e,
6946 // ~
6947 0x0330: 0xc4,
6948 // Ä
6949 0x0331: 0xe4,
6950 // ä
6951 0x0332: 0xd6,
6952 // Ö
6953 0x0333: 0xf6,
6954 // ö
6955 0x0334: 0xdf,
6956 // ß
6957 0x0335: 0xa5,
6958 // ¥
6959 0x0336: 0xa4,
6960 // ¤
6961 0x0337: 0x2502,
6962 // │
6963 0x0338: 0xc5,
6964 // Å
6965 0x0339: 0xe5,
6966 // å
6967 0x033a: 0xd8,
6968 // Ø
6969 0x033b: 0xf8,
6970 // ø
6971 0x033c: 0x250c,
6972 // ┌
6973 0x033d: 0x2510,
6974 // ┐
6975 0x033e: 0x2514,
6976 // └
6977 0x033f: 0x2518 // ┘
6978
6979 };
6980
6981 var getCharFromCode = function getCharFromCode(code) {
6982 if (code === null) {
6983 return '';
6984 }
6985
6986 code = CHARACTER_TRANSLATION[code] || code;
6987 return String.fromCharCode(code);
6988 }; // the index of the last row in a CEA-608 display buffer
6989
6990
6991 var BOTTOM_ROW = 14; // This array is used for mapping PACs -> row #, since there's no way of
6992 // getting it through bit logic.
6993
6994 var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620, 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420]; // CEA-608 captions are rendered onto a 34x15 matrix of character
6995 // cells. The "bottom" row is the last element in the outer array.
6996
6997 var createDisplayBuffer = function createDisplayBuffer() {
6998 var result = [],
6999 i = BOTTOM_ROW + 1;
7000
7001 while (i--) {
7002 result.push('');
7003 }
7004
7005 return result;
7006 };
7007
7008 var Cea608Stream = function Cea608Stream(field, dataChannel) {
7009 Cea608Stream.prototype.init.call(this);
7010 this.field_ = field || 0;
7011 this.dataChannel_ = dataChannel || 0;
7012 this.name_ = 'CC' + ((this.field_ << 1 | this.dataChannel_) + 1);
7013 this.setConstants();
7014 this.reset();
7015
7016 this.push = function (packet) {
7017 var data, swap, char0, char1, text; // remove the parity bits
7018
7019 data = packet.ccData & 0x7f7f; // ignore duplicate control codes; the spec demands they're sent twice
7020
7021 if (data === this.lastControlCode_) {
7022 this.lastControlCode_ = null;
7023 return;
7024 } // Store control codes
7025
7026
7027 if ((data & 0xf000) === 0x1000) {
7028 this.lastControlCode_ = data;
7029 } else if (data !== this.PADDING_) {
7030 this.lastControlCode_ = null;
7031 }
7032
7033 char0 = data >>> 8;
7034 char1 = data & 0xff;
7035
7036 if (data === this.PADDING_) {
7037 return;
7038 } else if (data === this.RESUME_CAPTION_LOADING_) {
7039 this.mode_ = 'popOn';
7040 } else if (data === this.END_OF_CAPTION_) {
7041 // If an EOC is received while in paint-on mode, the displayed caption
7042 // text should be swapped to non-displayed memory as if it was a pop-on
7043 // caption. Because of that, we should explicitly switch back to pop-on
7044 // mode
7045 this.mode_ = 'popOn';
7046 this.clearFormatting(packet.pts); // if a caption was being displayed, it's gone now
7047
7048 this.flushDisplayed(packet.pts); // flip memory
7049
7050 swap = this.displayed_;
7051 this.displayed_ = this.nonDisplayed_;
7052 this.nonDisplayed_ = swap; // start measuring the time to display the caption
7053
7054 this.startPts_ = packet.pts;
7055 } else if (data === this.ROLL_UP_2_ROWS_) {
7056 this.rollUpRows_ = 2;
7057 this.setRollUp(packet.pts);
7058 } else if (data === this.ROLL_UP_3_ROWS_) {
7059 this.rollUpRows_ = 3;
7060 this.setRollUp(packet.pts);
7061 } else if (data === this.ROLL_UP_4_ROWS_) {
7062 this.rollUpRows_ = 4;
7063 this.setRollUp(packet.pts);
7064 } else if (data === this.CARRIAGE_RETURN_) {
7065 this.clearFormatting(packet.pts);
7066 this.flushDisplayed(packet.pts);
7067 this.shiftRowsUp_();
7068 this.startPts_ = packet.pts;
7069 } else if (data === this.BACKSPACE_) {
7070 if (this.mode_ === 'popOn') {
7071 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7072 } else {
7073 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7074 }
7075 } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
7076 this.flushDisplayed(packet.pts);
7077 this.displayed_ = createDisplayBuffer();
7078 } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
7079 this.nonDisplayed_ = createDisplayBuffer();
7080 } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
7081 if (this.mode_ !== 'paintOn') {
7082 // NOTE: This should be removed when proper caption positioning is
7083 // implemented
7084 this.flushDisplayed(packet.pts);
7085 this.displayed_ = createDisplayBuffer();
7086 }
7087
7088 this.mode_ = 'paintOn';
7089 this.startPts_ = packet.pts; // Append special characters to caption text
7090 } else if (this.isSpecialCharacter(char0, char1)) {
7091 // Bitmask char0 so that we can apply character transformations
7092 // regardless of field and data channel.
7093 // Then byte-shift to the left and OR with char1 so we can pass the
7094 // entire character code to `getCharFromCode`.
7095 char0 = (char0 & 0x03) << 8;
7096 text = getCharFromCode(char0 | char1);
7097 this[this.mode_](packet.pts, text);
7098 this.column_++; // Append extended characters to caption text
7099 } else if (this.isExtCharacter(char0, char1)) {
7100 // Extended characters always follow their "non-extended" equivalents.
7101 // IE if a "è" is desired, you'll always receive "eè"; non-compliant
7102 // decoders are supposed to drop the "è", while compliant decoders
7103 // backspace the "e" and insert "è".
7104 // Delete the previous character
7105 if (this.mode_ === 'popOn') {
7106 this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
7107 } else {
7108 this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
7109 } // Bitmask char0 so that we can apply character transformations
7110 // regardless of field and data channel.
7111 // Then byte-shift to the left and OR with char1 so we can pass the
7112 // entire character code to `getCharFromCode`.
7113
7114
7115 char0 = (char0 & 0x03) << 8;
7116 text = getCharFromCode(char0 | char1);
7117 this[this.mode_](packet.pts, text);
7118 this.column_++; // Process mid-row codes
7119 } else if (this.isMidRowCode(char0, char1)) {
7120 // Attributes are not additive, so clear all formatting
7121 this.clearFormatting(packet.pts); // According to the standard, mid-row codes
7122 // should be replaced with spaces, so add one now
7123
7124 this[this.mode_](packet.pts, ' ');
7125 this.column_++;
7126
7127 if ((char1 & 0xe) === 0xe) {
7128 this.addFormatting(packet.pts, ['i']);
7129 }
7130
7131 if ((char1 & 0x1) === 0x1) {
7132 this.addFormatting(packet.pts, ['u']);
7133 } // Detect offset control codes and adjust cursor
7134
7135 } else if (this.isOffsetControlCode(char0, char1)) {
7136 // Cursor position is set by indent PAC (see below) in 4-column
7137 // increments, with an additional offset code of 1-3 to reach any
7138 // of the 32 columns specified by CEA-608. So all we need to do
7139 // here is increment the column cursor by the given offset.
7140 this.column_ += char1 & 0x03; // Detect PACs (Preamble Address Codes)
7141 } else if (this.isPAC(char0, char1)) {
7142 // There's no logic for PAC -> row mapping, so we have to just
7143 // find the row code in an array and use its index :(
7144 var row = ROWS.indexOf(data & 0x1f20); // Configure the caption window if we're in roll-up mode
7145
7146 if (this.mode_ === 'rollUp') {
7147 // This implies that the base row is incorrectly set.
7148 // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
7149 // of roll-up rows set.
7150 if (row - this.rollUpRows_ + 1 < 0) {
7151 row = this.rollUpRows_ - 1;
7152 }
7153
7154 this.setRollUp(packet.pts, row);
7155 }
7156
7157 if (row !== this.row_) {
7158 // formatting is only persistent for current row
7159 this.clearFormatting(packet.pts);
7160 this.row_ = row;
7161 } // All PACs can apply underline, so detect and apply
7162 // (All odd-numbered second bytes set underline)
7163
7164
7165 if (char1 & 0x1 && this.formatting_.indexOf('u') === -1) {
7166 this.addFormatting(packet.pts, ['u']);
7167 }
7168
7169 if ((data & 0x10) === 0x10) {
7170 // We've got an indent level code. Each successive even number
7171 // increments the column cursor by 4, so we can get the desired
7172 // column position by bit-shifting to the right (to get n/2)
7173 // and multiplying by 4.
7174 this.column_ = ((data & 0xe) >> 1) * 4;
7175 }
7176
7177 if (this.isColorPAC(char1)) {
7178 // it's a color code, though we only support white, which
7179 // can be either normal or italicized. white italics can be
7180 // either 0x4e or 0x6e depending on the row, so we just
7181 // bitwise-and with 0xe to see if italics should be turned on
7182 if ((char1 & 0xe) === 0xe) {
7183 this.addFormatting(packet.pts, ['i']);
7184 }
7185 } // We have a normal character in char0, and possibly one in char1
7186
7187 } else if (this.isNormalChar(char0)) {
7188 if (char1 === 0x00) {
7189 char1 = null;
7190 }
7191
7192 text = getCharFromCode(char0);
7193 text += getCharFromCode(char1);
7194 this[this.mode_](packet.pts, text);
7195 this.column_ += text.length;
7196 } // finish data processing
7197
7198 };
7199 };
7200
7201 Cea608Stream.prototype = new stream(); // Trigger a cue point that captures the current state of the
7202 // display buffer
7203
7204 Cea608Stream.prototype.flushDisplayed = function (pts) {
7205 var content = this.displayed_ // remove spaces from the start and end of the string
7206 .map(function (row, index) {
7207 try {
7208 return row.trim();
7209 } catch (e) {
7210 // Ordinarily, this shouldn't happen. However, caption
7211 // parsing errors should not throw exceptions and
7212 // break playback.
7213 this.trigger('log', {
7214 level: 'warn',
7215 message: 'Skipping a malformed 608 caption at index ' + index + '.'
7216 });
7217 return '';
7218 }
7219 }, this) // combine all text rows to display in one cue
7220 .join('\n') // and remove blank rows from the start and end, but not the middle
7221 .replace(/^\n+|\n+$/g, '');
7222
7223 if (content.length) {
7224 this.trigger('data', {
7225 startPts: this.startPts_,
7226 endPts: pts,
7227 text: content,
7228 stream: this.name_
7229 });
7230 }
7231 };
7232 /**
7233 * Zero out the data, used for startup and on seek
7234 */
7235
7236
7237 Cea608Stream.prototype.reset = function () {
7238 this.mode_ = 'popOn'; // When in roll-up mode, the index of the last row that will
7239 // actually display captions. If a caption is shifted to a row
7240 // with a lower index than this, it is cleared from the display
7241 // buffer
7242
7243 this.topRow_ = 0;
7244 this.startPts_ = 0;
7245 this.displayed_ = createDisplayBuffer();
7246 this.nonDisplayed_ = createDisplayBuffer();
7247 this.lastControlCode_ = null; // Track row and column for proper line-breaking and spacing
7248
7249 this.column_ = 0;
7250 this.row_ = BOTTOM_ROW;
7251 this.rollUpRows_ = 2; // This variable holds currently-applied formatting
7252
7253 this.formatting_ = [];
7254 };
7255 /**
7256 * Sets up control code and related constants for this instance
7257 */
7258
7259
7260 Cea608Stream.prototype.setConstants = function () {
7261 // The following attributes have these uses:
7262 // ext_ : char0 for mid-row codes, and the base for extended
7263 // chars (ext_+0, ext_+1, and ext_+2 are char0s for
7264 // extended codes)
7265 // control_: char0 for control codes, except byte-shifted to the
7266 // left so that we can do this.control_ | CONTROL_CODE
7267 // offset_: char0 for tab offset codes
7268 //
7269 // It's also worth noting that control codes, and _only_ control codes,
7270 // differ between field 1 and field2. Field 2 control codes are always
7271 // their field 1 value plus 1. That's why there's the "| field" on the
7272 // control value.
7273 if (this.dataChannel_ === 0) {
7274 this.BASE_ = 0x10;
7275 this.EXT_ = 0x11;
7276 this.CONTROL_ = (0x14 | this.field_) << 8;
7277 this.OFFSET_ = 0x17;
7278 } else if (this.dataChannel_ === 1) {
7279 this.BASE_ = 0x18;
7280 this.EXT_ = 0x19;
7281 this.CONTROL_ = (0x1c | this.field_) << 8;
7282 this.OFFSET_ = 0x1f;
7283 } // Constants for the LSByte command codes recognized by Cea608Stream. This
7284 // list is not exhaustive. For a more comprehensive listing and semantics see
7285 // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
7286 // Padding
7287
7288
7289 this.PADDING_ = 0x0000; // Pop-on Mode
7290
7291 this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
7292 this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f; // Roll-up Mode
7293
7294 this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
7295 this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
7296 this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
7297 this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d; // paint-on mode
7298
7299 this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29; // Erasure
7300
7301 this.BACKSPACE_ = this.CONTROL_ | 0x21;
7302 this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
7303 this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
7304 };
7305 /**
7306 * Detects if the 2-byte packet data is a special character
7307 *
7308 * Special characters have a second byte in the range 0x30 to 0x3f,
7309 * with the first byte being 0x11 (for data channel 1) or 0x19 (for
7310 * data channel 2).
7311 *
7312 * @param {Integer} char0 The first byte
7313 * @param {Integer} char1 The second byte
7314 * @return {Boolean} Whether the 2 bytes are an special character
7315 */
7316
7317
7318 Cea608Stream.prototype.isSpecialCharacter = function (char0, char1) {
7319 return char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f;
7320 };
7321 /**
7322 * Detects if the 2-byte packet data is an extended character
7323 *
7324 * Extended characters have a second byte in the range 0x20 to 0x3f,
7325 * with the first byte being 0x12 or 0x13 (for data channel 1) or
7326 * 0x1a or 0x1b (for data channel 2).
7327 *
7328 * @param {Integer} char0 The first byte
7329 * @param {Integer} char1 The second byte
7330 * @return {Boolean} Whether the 2 bytes are an extended character
7331 */
7332
7333
7334 Cea608Stream.prototype.isExtCharacter = function (char0, char1) {
7335 return (char0 === this.EXT_ + 1 || char0 === this.EXT_ + 2) && char1 >= 0x20 && char1 <= 0x3f;
7336 };
7337 /**
7338 * Detects if the 2-byte packet is a mid-row code
7339 *
7340 * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
7341 * the first byte being 0x11 (for data channel 1) or 0x19 (for data
7342 * channel 2).
7343 *
7344 * @param {Integer} char0 The first byte
7345 * @param {Integer} char1 The second byte
7346 * @return {Boolean} Whether the 2 bytes are a mid-row code
7347 */
7348
7349
7350 Cea608Stream.prototype.isMidRowCode = function (char0, char1) {
7351 return char0 === this.EXT_ && char1 >= 0x20 && char1 <= 0x2f;
7352 };
7353 /**
7354 * Detects if the 2-byte packet is an offset control code
7355 *
7356 * Offset control codes have a second byte in the range 0x21 to 0x23,
7357 * with the first byte being 0x17 (for data channel 1) or 0x1f (for
7358 * data channel 2).
7359 *
7360 * @param {Integer} char0 The first byte
7361 * @param {Integer} char1 The second byte
7362 * @return {Boolean} Whether the 2 bytes are an offset control code
7363 */
7364
7365
7366 Cea608Stream.prototype.isOffsetControlCode = function (char0, char1) {
7367 return char0 === this.OFFSET_ && char1 >= 0x21 && char1 <= 0x23;
7368 };
7369 /**
7370 * Detects if the 2-byte packet is a Preamble Address Code
7371 *
7372 * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
7373 * or 0x18 to 0x1f (for data channel 2), with the second byte in the
7374 * range 0x40 to 0x7f.
7375 *
7376 * @param {Integer} char0 The first byte
7377 * @param {Integer} char1 The second byte
7378 * @return {Boolean} Whether the 2 bytes are a PAC
7379 */
7380
7381
7382 Cea608Stream.prototype.isPAC = function (char0, char1) {
7383 return char0 >= this.BASE_ && char0 < this.BASE_ + 8 && char1 >= 0x40 && char1 <= 0x7f;
7384 };
7385 /**
7386 * Detects if a packet's second byte is in the range of a PAC color code
7387 *
7388 * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
7389 * 0x60 to 0x6f.
7390 *
7391 * @param {Integer} char1 The second byte
7392 * @return {Boolean} Whether the byte is a color PAC
7393 */
7394
7395
7396 Cea608Stream.prototype.isColorPAC = function (char1) {
7397 return char1 >= 0x40 && char1 <= 0x4f || char1 >= 0x60 && char1 <= 0x7f;
7398 };
7399 /**
7400 * Detects if a single byte is in the range of a normal character
7401 *
7402 * Normal text bytes are in the range 0x20 to 0x7f.
7403 *
7404 * @param {Integer} char The byte
7405 * @return {Boolean} Whether the byte is a normal character
7406 */
7407
7408
7409 Cea608Stream.prototype.isNormalChar = function (char) {
7410 return char >= 0x20 && char <= 0x7f;
7411 };
7412 /**
7413 * Configures roll-up
7414 *
7415 * @param {Integer} pts Current PTS
7416 * @param {Integer} newBaseRow Used by PACs to slide the current window to
7417 * a new position
7418 */
7419
7420
7421 Cea608Stream.prototype.setRollUp = function (pts, newBaseRow) {
7422 // Reset the base row to the bottom row when switching modes
7423 if (this.mode_ !== 'rollUp') {
7424 this.row_ = BOTTOM_ROW;
7425 this.mode_ = 'rollUp'; // Spec says to wipe memories when switching to roll-up
7426
7427 this.flushDisplayed(pts);
7428 this.nonDisplayed_ = createDisplayBuffer();
7429 this.displayed_ = createDisplayBuffer();
7430 }
7431
7432 if (newBaseRow !== undefined && newBaseRow !== this.row_) {
7433 // move currently displayed captions (up or down) to the new base row
7434 for (var i = 0; i < this.rollUpRows_; i++) {
7435 this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
7436 this.displayed_[this.row_ - i] = '';
7437 }
7438 }
7439
7440 if (newBaseRow === undefined) {
7441 newBaseRow = this.row_;
7442 }
7443
7444 this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
7445 }; // Adds the opening HTML tag for the passed character to the caption text,
7446 // and keeps track of it for later closing
7447
7448
7449 Cea608Stream.prototype.addFormatting = function (pts, format) {
7450 this.formatting_ = this.formatting_.concat(format);
7451 var text = format.reduce(function (text, format) {
7452 return text + '<' + format + '>';
7453 }, '');
7454 this[this.mode_](pts, text);
7455 }; // Adds HTML closing tags for current formatting to caption text and
7456 // clears remembered formatting
7457
7458
7459 Cea608Stream.prototype.clearFormatting = function (pts) {
7460 if (!this.formatting_.length) {
7461 return;
7462 }
7463
7464 var text = this.formatting_.reverse().reduce(function (text, format) {
7465 return text + '</' + format + '>';
7466 }, '');
7467 this.formatting_ = [];
7468 this[this.mode_](pts, text);
7469 }; // Mode Implementations
7470
7471
7472 Cea608Stream.prototype.popOn = function (pts, text) {
7473 var baseRow = this.nonDisplayed_[this.row_]; // buffer characters
7474
7475 baseRow += text;
7476 this.nonDisplayed_[this.row_] = baseRow;
7477 };
7478
7479 Cea608Stream.prototype.rollUp = function (pts, text) {
7480 var baseRow = this.displayed_[this.row_];
7481 baseRow += text;
7482 this.displayed_[this.row_] = baseRow;
7483 };
7484
7485 Cea608Stream.prototype.shiftRowsUp_ = function () {
7486 var i; // clear out inactive rows
7487
7488 for (i = 0; i < this.topRow_; i++) {
7489 this.displayed_[i] = '';
7490 }
7491
7492 for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
7493 this.displayed_[i] = '';
7494 } // shift displayed rows up
7495
7496
7497 for (i = this.topRow_; i < this.row_; i++) {
7498 this.displayed_[i] = this.displayed_[i + 1];
7499 } // clear out the bottom row
7500
7501
7502 this.displayed_[this.row_] = '';
7503 };
7504
7505 Cea608Stream.prototype.paintOn = function (pts, text) {
7506 var baseRow = this.displayed_[this.row_];
7507 baseRow += text;
7508 this.displayed_[this.row_] = baseRow;
7509 }; // exports
7510
7511
7512 var captionStream = {
7513 CaptionStream: CaptionStream$1,
7514 Cea608Stream: Cea608Stream,
7515 Cea708Stream: Cea708Stream
7516 };
7517 /**
7518 * mux.js
7519 *
7520 * Copyright (c) Brightcove
7521 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
7522 */
7523
7524 var streamTypes = {
7525 H264_STREAM_TYPE: 0x1B,
7526 ADTS_STREAM_TYPE: 0x0F,
7527 METADATA_STREAM_TYPE: 0x15
7528 };
7529 var MAX_TS = 8589934592;
7530 var RO_THRESH = 4294967296;
7531 var TYPE_SHARED = 'shared';
7532
7533 var handleRollover$1 = function handleRollover(value, reference) {
7534 var direction = 1;
7535
7536 if (value > reference) {
7537 // If the current timestamp value is greater than our reference timestamp and we detect a
7538 // timestamp rollover, this means the roll over is happening in the opposite direction.
7539 // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
7540 // point will be set to a small number, e.g. 1. The user then seeks backwards over the
7541 // rollover point. In loading this segment, the timestamp values will be very large,
7542 // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
7543 // the time stamp to be `value - 2^33`.
7544 direction = -1;
7545 } // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
7546 // cause an incorrect adjustment.
7547
7548
7549 while (Math.abs(reference - value) > RO_THRESH) {
7550 value += direction * MAX_TS;
7551 }
7552
7553 return value;
7554 };
7555
7556 var TimestampRolloverStream$1 = function TimestampRolloverStream(type) {
7557 var lastDTS, referenceDTS;
7558 TimestampRolloverStream.prototype.init.call(this); // The "shared" type is used in cases where a stream will contain muxed
7559 // video and audio. We could use `undefined` here, but having a string
7560 // makes debugging a little clearer.
7561
7562 this.type_ = type || TYPE_SHARED;
7563
7564 this.push = function (data) {
7565 // Any "shared" rollover streams will accept _all_ data. Otherwise,
7566 // streams will only accept data that matches their type.
7567 if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
7568 return;
7569 }
7570
7571 if (referenceDTS === undefined) {
7572 referenceDTS = data.dts;
7573 }
7574
7575 data.dts = handleRollover$1(data.dts, referenceDTS);
7576 data.pts = handleRollover$1(data.pts, referenceDTS);
7577 lastDTS = data.dts;
7578 this.trigger('data', data);
7579 };
7580
7581 this.flush = function () {
7582 referenceDTS = lastDTS;
7583 this.trigger('done');
7584 };
7585
7586 this.endTimeline = function () {
7587 this.flush();
7588 this.trigger('endedtimeline');
7589 };
7590
7591 this.discontinuity = function () {
7592 referenceDTS = void 0;
7593 lastDTS = void 0;
7594 };
7595
7596 this.reset = function () {
7597 this.discontinuity();
7598 this.trigger('reset');
7599 };
7600 };
7601
7602 TimestampRolloverStream$1.prototype = new stream();
7603 var timestampRolloverStream = {
7604 TimestampRolloverStream: TimestampRolloverStream$1,
7605 handleRollover: handleRollover$1
7606 };
7607
7608 var percentEncode$1 = function percentEncode(bytes, start, end) {
7609 var i,
7610 result = '';
7611
7612 for (i = start; i < end; i++) {
7613 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
7614 }
7615
7616 return result;
7617 },
7618 // return the string representation of the specified byte range,
7619 // interpreted as UTf-8.
7620 parseUtf8 = function parseUtf8(bytes, start, end) {
7621 return decodeURIComponent(percentEncode$1(bytes, start, end));
7622 },
7623 // return the string representation of the specified byte range,
7624 // interpreted as ISO-8859-1.
7625 parseIso88591$1 = function parseIso88591(bytes, start, end) {
7626 return unescape(percentEncode$1(bytes, start, end)); // jshint ignore:line
7627 },
7628 parseSyncSafeInteger$1 = function parseSyncSafeInteger(data) {
7629 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
7630 },
7631 tagParsers = {
7632 TXXX: function TXXX(tag) {
7633 var i;
7634
7635 if (tag.data[0] !== 3) {
7636 // ignore frames with unrecognized character encodings
7637 return;
7638 }
7639
7640 for (i = 1; i < tag.data.length; i++) {
7641 if (tag.data[i] === 0) {
7642 // parse the text fields
7643 tag.description = parseUtf8(tag.data, 1, i); // do not include the null terminator in the tag value
7644
7645 tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
7646 break;
7647 }
7648 }
7649
7650 tag.data = tag.value;
7651 },
7652 WXXX: function WXXX(tag) {
7653 var i;
7654
7655 if (tag.data[0] !== 3) {
7656 // ignore frames with unrecognized character encodings
7657 return;
7658 }
7659
7660 for (i = 1; i < tag.data.length; i++) {
7661 if (tag.data[i] === 0) {
7662 // parse the description and URL fields
7663 tag.description = parseUtf8(tag.data, 1, i);
7664 tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
7665 break;
7666 }
7667 }
7668 },
7669 PRIV: function PRIV(tag) {
7670 var i;
7671
7672 for (i = 0; i < tag.data.length; i++) {
7673 if (tag.data[i] === 0) {
7674 // parse the description and URL fields
7675 tag.owner = parseIso88591$1(tag.data, 0, i);
7676 break;
7677 }
7678 }
7679
7680 tag.privateData = tag.data.subarray(i + 1);
7681 tag.data = tag.privateData;
7682 }
7683 },
7684 _MetadataStream;
7685
7686 _MetadataStream = function MetadataStream(options) {
7687 var settings = {
7688 // the bytes of the program-level descriptor field in MP2T
7689 // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
7690 // program element descriptors"
7691 descriptor: options && options.descriptor
7692 },
7693 // the total size in bytes of the ID3 tag being parsed
7694 tagSize = 0,
7695 // tag data that is not complete enough to be parsed
7696 buffer = [],
7697 // the total number of bytes currently in the buffer
7698 bufferSize = 0,
7699 i;
7700
7701 _MetadataStream.prototype.init.call(this); // calculate the text track in-band metadata track dispatch type
7702 // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
7703
7704
7705 this.dispatchType = streamTypes.METADATA_STREAM_TYPE.toString(16);
7706
7707 if (settings.descriptor) {
7708 for (i = 0; i < settings.descriptor.length; i++) {
7709 this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
7710 }
7711 }
7712
7713 this.push = function (chunk) {
7714 var tag, frameStart, frameSize, frame, i, frameHeader;
7715
7716 if (chunk.type !== 'timed-metadata') {
7717 return;
7718 } // if data_alignment_indicator is set in the PES header,
7719 // we must have the start of a new ID3 tag. Assume anything
7720 // remaining in the buffer was malformed and throw it out
7721
7722
7723 if (chunk.dataAlignmentIndicator) {
7724 bufferSize = 0;
7725 buffer.length = 0;
7726 } // ignore events that don't look like ID3 data
7727
7728
7729 if (buffer.length === 0 && (chunk.data.length < 10 || chunk.data[0] !== 'I'.charCodeAt(0) || chunk.data[1] !== 'D'.charCodeAt(0) || chunk.data[2] !== '3'.charCodeAt(0))) {
7730 this.trigger('log', {
7731 level: 'warn',
7732 message: 'Skipping unrecognized metadata packet'
7733 });
7734 return;
7735 } // add this chunk to the data we've collected so far
7736
7737
7738 buffer.push(chunk);
7739 bufferSize += chunk.data.byteLength; // grab the size of the entire frame from the ID3 header
7740
7741 if (buffer.length === 1) {
7742 // the frame size is transmitted as a 28-bit integer in the
7743 // last four bytes of the ID3 header.
7744 // The most significant bit of each byte is dropped and the
7745 // results concatenated to recover the actual value.
7746 tagSize = parseSyncSafeInteger$1(chunk.data.subarray(6, 10)); // ID3 reports the tag size excluding the header but it's more
7747 // convenient for our comparisons to include it
7748
7749 tagSize += 10;
7750 } // if the entire frame has not arrived, wait for more data
7751
7752
7753 if (bufferSize < tagSize) {
7754 return;
7755 } // collect the entire frame so it can be parsed
7756
7757
7758 tag = {
7759 data: new Uint8Array(tagSize),
7760 frames: [],
7761 pts: buffer[0].pts,
7762 dts: buffer[0].dts
7763 };
7764
7765 for (i = 0; i < tagSize;) {
7766 tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
7767 i += buffer[0].data.byteLength;
7768 bufferSize -= buffer[0].data.byteLength;
7769 buffer.shift();
7770 } // find the start of the first frame and the end of the tag
7771
7772
7773 frameStart = 10;
7774
7775 if (tag.data[5] & 0x40) {
7776 // advance the frame start past the extended header
7777 frameStart += 4; // header size field
7778
7779 frameStart += parseSyncSafeInteger$1(tag.data.subarray(10, 14)); // clip any padding off the end
7780
7781 tagSize -= parseSyncSafeInteger$1(tag.data.subarray(16, 20));
7782 } // parse one or more ID3 frames
7783 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
7784
7785
7786 do {
7787 // determine the number of bytes in this frame
7788 frameSize = parseSyncSafeInteger$1(tag.data.subarray(frameStart + 4, frameStart + 8));
7789
7790 if (frameSize < 1) {
7791 this.trigger('log', {
7792 level: 'warn',
7793 message: 'Malformed ID3 frame encountered. Skipping metadata parsing.'
7794 });
7795 return;
7796 }
7797
7798 frameHeader = String.fromCharCode(tag.data[frameStart], tag.data[frameStart + 1], tag.data[frameStart + 2], tag.data[frameStart + 3]);
7799 frame = {
7800 id: frameHeader,
7801 data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
7802 };
7803 frame.key = frame.id;
7804
7805 if (tagParsers[frame.id]) {
7806 tagParsers[frame.id](frame); // handle the special PRIV frame used to indicate the start
7807 // time for raw AAC data
7808
7809 if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
7810 var d = frame.data,
7811 size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
7812 size *= 4;
7813 size += d[7] & 0x03;
7814 frame.timeStamp = size; // in raw AAC, all subsequent data will be timestamped based
7815 // on the value of this frame
7816 // we couldn't have known the appropriate pts and dts before
7817 // parsing this ID3 tag so set those values now
7818
7819 if (tag.pts === undefined && tag.dts === undefined) {
7820 tag.pts = frame.timeStamp;
7821 tag.dts = frame.timeStamp;
7822 }
7823
7824 this.trigger('timestamp', frame);
7825 }
7826 }
7827
7828 tag.frames.push(frame);
7829 frameStart += 10; // advance past the frame header
7830
7831 frameStart += frameSize; // advance past the frame body
7832 } while (frameStart < tagSize);
7833
7834 this.trigger('data', tag);
7835 };
7836 };
7837
7838 _MetadataStream.prototype = new stream();
7839 var metadataStream = _MetadataStream;
7840 var TimestampRolloverStream = timestampRolloverStream.TimestampRolloverStream; // object types
7841
7842 var _TransportPacketStream, _TransportParseStream, _ElementaryStream; // constants
7843
7844
7845 var MP2T_PACKET_LENGTH$1 = 188,
7846 // bytes
7847 SYNC_BYTE$1 = 0x47;
7848 /**
7849 * Splits an incoming stream of binary data into MPEG-2 Transport
7850 * Stream packets.
7851 */
7852
7853 _TransportPacketStream = function TransportPacketStream() {
7854 var buffer = new Uint8Array(MP2T_PACKET_LENGTH$1),
7855 bytesInBuffer = 0;
7856
7857 _TransportPacketStream.prototype.init.call(this); // Deliver new bytes to the stream.
7858
7859 /**
7860 * Split a stream of data into M2TS packets
7861 **/
7862
7863
7864 this.push = function (bytes) {
7865 var startIndex = 0,
7866 endIndex = MP2T_PACKET_LENGTH$1,
7867 everything; // If there are bytes remaining from the last segment, prepend them to the
7868 // bytes that were pushed in
7869
7870 if (bytesInBuffer) {
7871 everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
7872 everything.set(buffer.subarray(0, bytesInBuffer));
7873 everything.set(bytes, bytesInBuffer);
7874 bytesInBuffer = 0;
7875 } else {
7876 everything = bytes;
7877 } // While we have enough data for a packet
7878
7879
7880 while (endIndex < everything.byteLength) {
7881 // Look for a pair of start and end sync bytes in the data..
7882 if (everything[startIndex] === SYNC_BYTE$1 && everything[endIndex] === SYNC_BYTE$1) {
7883 // We found a packet so emit it and jump one whole packet forward in
7884 // the stream
7885 this.trigger('data', everything.subarray(startIndex, endIndex));
7886 startIndex += MP2T_PACKET_LENGTH$1;
7887 endIndex += MP2T_PACKET_LENGTH$1;
7888 continue;
7889 } // If we get here, we have somehow become de-synchronized and we need to step
7890 // forward one byte at a time until we find a pair of sync bytes that denote
7891 // a packet
7892
7893
7894 startIndex++;
7895 endIndex++;
7896 } // If there was some data left over at the end of the segment that couldn't
7897 // possibly be a whole packet, keep it because it might be the start of a packet
7898 // that continues in the next segment
7899
7900
7901 if (startIndex < everything.byteLength) {
7902 buffer.set(everything.subarray(startIndex), 0);
7903 bytesInBuffer = everything.byteLength - startIndex;
7904 }
7905 };
7906 /**
7907 * Passes identified M2TS packets to the TransportParseStream to be parsed
7908 **/
7909
7910
7911 this.flush = function () {
7912 // If the buffer contains a whole packet when we are being flushed, emit it
7913 // and empty the buffer. Otherwise hold onto the data because it may be
7914 // important for decoding the next segment
7915 if (bytesInBuffer === MP2T_PACKET_LENGTH$1 && buffer[0] === SYNC_BYTE$1) {
7916 this.trigger('data', buffer);
7917 bytesInBuffer = 0;
7918 }
7919
7920 this.trigger('done');
7921 };
7922
7923 this.endTimeline = function () {
7924 this.flush();
7925 this.trigger('endedtimeline');
7926 };
7927
7928 this.reset = function () {
7929 bytesInBuffer = 0;
7930 this.trigger('reset');
7931 };
7932 };
7933
7934 _TransportPacketStream.prototype = new stream();
7935 /**
7936 * Accepts an MP2T TransportPacketStream and emits data events with parsed
7937 * forms of the individual transport stream packets.
7938 */
7939
7940 _TransportParseStream = function TransportParseStream() {
7941 var parsePsi, parsePat, parsePmt, self;
7942
7943 _TransportParseStream.prototype.init.call(this);
7944
7945 self = this;
7946 this.packetsWaitingForPmt = [];
7947 this.programMapTable = undefined;
7948
7949 parsePsi = function parsePsi(payload, psi) {
7950 var offset = 0; // PSI packets may be split into multiple sections and those
7951 // sections may be split into multiple packets. If a PSI
7952 // section starts in this packet, the payload_unit_start_indicator
7953 // will be true and the first byte of the payload will indicate
7954 // the offset from the current position to the start of the
7955 // section.
7956
7957 if (psi.payloadUnitStartIndicator) {
7958 offset += payload[offset] + 1;
7959 }
7960
7961 if (psi.type === 'pat') {
7962 parsePat(payload.subarray(offset), psi);
7963 } else {
7964 parsePmt(payload.subarray(offset), psi);
7965 }
7966 };
7967
7968 parsePat = function parsePat(payload, pat) {
7969 pat.section_number = payload[7]; // eslint-disable-line camelcase
7970
7971 pat.last_section_number = payload[8]; // eslint-disable-line camelcase
7972 // skip the PSI header and parse the first PMT entry
7973
7974 self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
7975 pat.pmtPid = self.pmtPid;
7976 };
7977 /**
7978 * Parse out the relevant fields of a Program Map Table (PMT).
7979 * @param payload {Uint8Array} the PMT-specific portion of an MP2T
7980 * packet. The first byte in this array should be the table_id
7981 * field.
7982 * @param pmt {object} the object that should be decorated with
7983 * fields parsed from the PMT.
7984 */
7985
7986
7987 parsePmt = function parsePmt(payload, pmt) {
7988 var sectionLength, tableEnd, programInfoLength, offset; // PMTs can be sent ahead of the time when they should actually
7989 // take effect. We don't believe this should ever be the case
7990 // for HLS but we'll ignore "forward" PMT declarations if we see
7991 // them. Future PMT declarations have the current_next_indicator
7992 // set to zero.
7993
7994 if (!(payload[5] & 0x01)) {
7995 return;
7996 } // overwrite any existing program map table
7997
7998
7999 self.programMapTable = {
8000 video: null,
8001 audio: null,
8002 'timed-metadata': {}
8003 }; // the mapping table ends at the end of the current section
8004
8005 sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
8006 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
8007 // long the program info descriptors are
8008
8009 programInfoLength = (payload[10] & 0x0f) << 8 | payload[11]; // advance the offset to the first entry in the mapping table
8010
8011 offset = 12 + programInfoLength;
8012
8013 while (offset < tableEnd) {
8014 var streamType = payload[offset];
8015 var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2]; // only map a single elementary_pid for audio and video stream types
8016 // TODO: should this be done for metadata too? for now maintain behavior of
8017 // multiple metadata streams
8018
8019 if (streamType === streamTypes.H264_STREAM_TYPE && self.programMapTable.video === null) {
8020 self.programMapTable.video = pid;
8021 } else if (streamType === streamTypes.ADTS_STREAM_TYPE && self.programMapTable.audio === null) {
8022 self.programMapTable.audio = pid;
8023 } else if (streamType === streamTypes.METADATA_STREAM_TYPE) {
8024 // map pid to stream type for metadata streams
8025 self.programMapTable['timed-metadata'][pid] = streamType;
8026 } // move to the next table entry
8027 // skip past the elementary stream descriptors, if present
8028
8029
8030 offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
8031 } // record the map on the packet as well
8032
8033
8034 pmt.programMapTable = self.programMapTable;
8035 };
8036 /**
8037 * Deliver a new MP2T packet to the next stream in the pipeline.
8038 */
8039
8040
8041 this.push = function (packet) {
8042 var result = {},
8043 offset = 4;
8044 result.payloadUnitStartIndicator = !!(packet[1] & 0x40); // pid is a 13-bit field starting at the last bit of packet[1]
8045
8046 result.pid = packet[1] & 0x1f;
8047 result.pid <<= 8;
8048 result.pid |= packet[2]; // if an adaption field is present, its length is specified by the
8049 // fifth byte of the TS packet header. The adaptation field is
8050 // used to add stuffing to PES packets that don't fill a complete
8051 // TS packet, and to specify some forms of timing and control data
8052 // that we do not currently use.
8053
8054 if ((packet[3] & 0x30) >>> 4 > 0x01) {
8055 offset += packet[offset] + 1;
8056 } // parse the rest of the packet based on the type
8057
8058
8059 if (result.pid === 0) {
8060 result.type = 'pat';
8061 parsePsi(packet.subarray(offset), result);
8062 this.trigger('data', result);
8063 } else if (result.pid === this.pmtPid) {
8064 result.type = 'pmt';
8065 parsePsi(packet.subarray(offset), result);
8066 this.trigger('data', result); // if there are any packets waiting for a PMT to be found, process them now
8067
8068 while (this.packetsWaitingForPmt.length) {
8069 this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
8070 }
8071 } else if (this.programMapTable === undefined) {
8072 // When we have not seen a PMT yet, defer further processing of
8073 // PES packets until one has been parsed
8074 this.packetsWaitingForPmt.push([packet, offset, result]);
8075 } else {
8076 this.processPes_(packet, offset, result);
8077 }
8078 };
8079
8080 this.processPes_ = function (packet, offset, result) {
8081 // set the appropriate stream type
8082 if (result.pid === this.programMapTable.video) {
8083 result.streamType = streamTypes.H264_STREAM_TYPE;
8084 } else if (result.pid === this.programMapTable.audio) {
8085 result.streamType = streamTypes.ADTS_STREAM_TYPE;
8086 } else {
8087 // if not video or audio, it is timed-metadata or unknown
8088 // if unknown, streamType will be undefined
8089 result.streamType = this.programMapTable['timed-metadata'][result.pid];
8090 }
8091
8092 result.type = 'pes';
8093 result.data = packet.subarray(offset);
8094 this.trigger('data', result);
8095 };
8096 };
8097
8098 _TransportParseStream.prototype = new stream();
8099 _TransportParseStream.STREAM_TYPES = {
8100 h264: 0x1b,
8101 adts: 0x0f
8102 };
8103 /**
8104 * Reconsistutes program elementary stream (PES) packets from parsed
8105 * transport stream packets. That is, if you pipe an
8106 * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
8107 * events will be events which capture the bytes for individual PES
8108 * packets plus relevant metadata that has been extracted from the
8109 * container.
8110 */
8111
8112 _ElementaryStream = function ElementaryStream() {
8113 var self = this,
8114 segmentHadPmt = false,
8115 // PES packet fragments
8116 video = {
8117 data: [],
8118 size: 0
8119 },
8120 audio = {
8121 data: [],
8122 size: 0
8123 },
8124 timedMetadata = {
8125 data: [],
8126 size: 0
8127 },
8128 programMapTable,
8129 parsePes = function parsePes(payload, pes) {
8130 var ptsDtsFlags;
8131 var startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]; // default to an empty array
8132
8133 pes.data = new Uint8Array(); // In certain live streams, the start of a TS fragment has ts packets
8134 // that are frame data that is continuing from the previous fragment. This
8135 // is to check that the pes data is the start of a new pes payload
8136
8137 if (startPrefix !== 1) {
8138 return;
8139 } // get the packet length, this will be 0 for video
8140
8141
8142 pes.packetLength = 6 + (payload[4] << 8 | payload[5]); // find out if this packets starts a new keyframe
8143
8144 pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0; // PES packets may be annotated with a PTS value, or a PTS value
8145 // and a DTS value. Determine what combination of values is
8146 // available to work with.
8147
8148 ptsDtsFlags = payload[7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
8149 // performs all bitwise operations on 32-bit integers but javascript
8150 // supports a much greater range (52-bits) of integer using standard
8151 // mathematical operations.
8152 // We construct a 31-bit value using bitwise operators over the 31
8153 // most significant bits and then multiply by 4 (equal to a left-shift
8154 // of 2) before we add the final 2 least significant bits of the
8155 // timestamp (equal to an OR.)
8156
8157 if (ptsDtsFlags & 0xC0) {
8158 // the PTS and DTS are not written out directly. For information
8159 // on how they are encoded, see
8160 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
8161 pes.pts = (payload[9] & 0x0E) << 27 | (payload[10] & 0xFF) << 20 | (payload[11] & 0xFE) << 12 | (payload[12] & 0xFF) << 5 | (payload[13] & 0xFE) >>> 3;
8162 pes.pts *= 4; // Left shift by 2
8163
8164 pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
8165
8166 pes.dts = pes.pts;
8167
8168 if (ptsDtsFlags & 0x40) {
8169 pes.dts = (payload[14] & 0x0E) << 27 | (payload[15] & 0xFF) << 20 | (payload[16] & 0xFE) << 12 | (payload[17] & 0xFF) << 5 | (payload[18] & 0xFE) >>> 3;
8170 pes.dts *= 4; // Left shift by 2
8171
8172 pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
8173 }
8174 } // the data section starts immediately after the PES header.
8175 // pes_header_data_length specifies the number of header bytes
8176 // that follow the last byte of the field.
8177
8178
8179 pes.data = payload.subarray(9 + payload[8]);
8180 },
8181
8182 /**
8183 * Pass completely parsed PES packets to the next stream in the pipeline
8184 **/
8185 flushStream = function flushStream(stream, type, forceFlush) {
8186 var packetData = new Uint8Array(stream.size),
8187 event = {
8188 type: type
8189 },
8190 i = 0,
8191 offset = 0,
8192 packetFlushable = false,
8193 fragment; // do nothing if there is not enough buffered data for a complete
8194 // PES header
8195
8196 if (!stream.data.length || stream.size < 9) {
8197 return;
8198 }
8199
8200 event.trackId = stream.data[0].pid; // reassemble the packet
8201
8202 for (i = 0; i < stream.data.length; i++) {
8203 fragment = stream.data[i];
8204 packetData.set(fragment.data, offset);
8205 offset += fragment.data.byteLength;
8206 } // parse assembled packet's PES header
8207
8208
8209 parsePes(packetData, event); // non-video PES packets MUST have a non-zero PES_packet_length
8210 // check that there is enough stream data to fill the packet
8211
8212 packetFlushable = type === 'video' || event.packetLength <= stream.size; // flush pending packets if the conditions are right
8213
8214 if (forceFlush || packetFlushable) {
8215 stream.size = 0;
8216 stream.data.length = 0;
8217 } // only emit packets that are complete. this is to avoid assembling
8218 // incomplete PES packets due to poor segmentation
8219
8220
8221 if (packetFlushable) {
8222 self.trigger('data', event);
8223 }
8224 };
8225
8226 _ElementaryStream.prototype.init.call(this);
8227 /**
8228 * Identifies M2TS packet types and parses PES packets using metadata
8229 * parsed from the PMT
8230 **/
8231
8232
8233 this.push = function (data) {
8234 ({
8235 pat: function pat() {// we have to wait for the PMT to arrive as well before we
8236 // have any meaningful metadata
8237 },
8238 pes: function pes() {
8239 var stream, streamType;
8240
8241 switch (data.streamType) {
8242 case streamTypes.H264_STREAM_TYPE:
8243 stream = video;
8244 streamType = 'video';
8245 break;
8246
8247 case streamTypes.ADTS_STREAM_TYPE:
8248 stream = audio;
8249 streamType = 'audio';
8250 break;
8251
8252 case streamTypes.METADATA_STREAM_TYPE:
8253 stream = timedMetadata;
8254 streamType = 'timed-metadata';
8255 break;
8256
8257 default:
8258 // ignore unknown stream types
8259 return;
8260 } // if a new packet is starting, we can flush the completed
8261 // packet
8262
8263
8264 if (data.payloadUnitStartIndicator) {
8265 flushStream(stream, streamType, true);
8266 } // buffer this fragment until we are sure we've received the
8267 // complete payload
8268
8269
8270 stream.data.push(data);
8271 stream.size += data.data.byteLength;
8272 },
8273 pmt: function pmt() {
8274 var event = {
8275 type: 'metadata',
8276 tracks: []
8277 };
8278 programMapTable = data.programMapTable; // translate audio and video streams to tracks
8279
8280 if (programMapTable.video !== null) {
8281 event.tracks.push({
8282 timelineStartInfo: {
8283 baseMediaDecodeTime: 0
8284 },
8285 id: +programMapTable.video,
8286 codec: 'avc',
8287 type: 'video'
8288 });
8289 }
8290
8291 if (programMapTable.audio !== null) {
8292 event.tracks.push({
8293 timelineStartInfo: {
8294 baseMediaDecodeTime: 0
8295 },
8296 id: +programMapTable.audio,
8297 codec: 'adts',
8298 type: 'audio'
8299 });
8300 }
8301
8302 segmentHadPmt = true;
8303 self.trigger('data', event);
8304 }
8305 })[data.type]();
8306 };
8307
8308 this.reset = function () {
8309 video.size = 0;
8310 video.data.length = 0;
8311 audio.size = 0;
8312 audio.data.length = 0;
8313 this.trigger('reset');
8314 };
8315 /**
8316 * Flush any remaining input. Video PES packets may be of variable
8317 * length. Normally, the start of a new video packet can trigger the
8318 * finalization of the previous packet. That is not possible if no
8319 * more video is forthcoming, however. In that case, some other
8320 * mechanism (like the end of the file) has to be employed. When it is
8321 * clear that no additional data is forthcoming, calling this method
8322 * will flush the buffered packets.
8323 */
8324
8325
8326 this.flushStreams_ = function () {
8327 // !!THIS ORDER IS IMPORTANT!!
8328 // video first then audio
8329 flushStream(video, 'video');
8330 flushStream(audio, 'audio');
8331 flushStream(timedMetadata, 'timed-metadata');
8332 };
8333
8334 this.flush = function () {
8335 // if on flush we haven't had a pmt emitted
8336 // and we have a pmt to emit. emit the pmt
8337 // so that we trigger a trackinfo downstream.
8338 if (!segmentHadPmt && programMapTable) {
8339 var pmt = {
8340 type: 'metadata',
8341 tracks: []
8342 }; // translate audio and video streams to tracks
8343
8344 if (programMapTable.video !== null) {
8345 pmt.tracks.push({
8346 timelineStartInfo: {
8347 baseMediaDecodeTime: 0
8348 },
8349 id: +programMapTable.video,
8350 codec: 'avc',
8351 type: 'video'
8352 });
8353 }
8354
8355 if (programMapTable.audio !== null) {
8356 pmt.tracks.push({
8357 timelineStartInfo: {
8358 baseMediaDecodeTime: 0
8359 },
8360 id: +programMapTable.audio,
8361 codec: 'adts',
8362 type: 'audio'
8363 });
8364 }
8365
8366 self.trigger('data', pmt);
8367 }
8368
8369 segmentHadPmt = false;
8370 this.flushStreams_();
8371 this.trigger('done');
8372 };
8373 };
8374
8375 _ElementaryStream.prototype = new stream();
8376 var m2ts = {
8377 PAT_PID: 0x0000,
8378 MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH$1,
8379 TransportPacketStream: _TransportPacketStream,
8380 TransportParseStream: _TransportParseStream,
8381 ElementaryStream: _ElementaryStream,
8382 TimestampRolloverStream: TimestampRolloverStream,
8383 CaptionStream: captionStream.CaptionStream,
8384 Cea608Stream: captionStream.Cea608Stream,
8385 Cea708Stream: captionStream.Cea708Stream,
8386 MetadataStream: metadataStream
8387 };
8388
8389 for (var type in streamTypes) {
8390 if (streamTypes.hasOwnProperty(type)) {
8391 m2ts[type] = streamTypes[type];
8392 }
8393 }
8394
8395 var m2ts_1 = m2ts;
8396 var ONE_SECOND_IN_TS$2 = clock.ONE_SECOND_IN_TS;
8397
8398 var _AdtsStream;
8399
8400 var ADTS_SAMPLING_FREQUENCIES$1 = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
8401 /*
8402 * Accepts a ElementaryStream and emits data events with parsed
8403 * AAC Audio Frames of the individual packets. Input audio in ADTS
8404 * format is unpacked and re-emitted as AAC frames.
8405 *
8406 * @see http://wiki.multimedia.cx/index.php?title=ADTS
8407 * @see http://wiki.multimedia.cx/?title=Understanding_AAC
8408 */
8409
8410 _AdtsStream = function AdtsStream(handlePartialSegments) {
8411 var buffer,
8412 frameNum = 0;
8413
8414 _AdtsStream.prototype.init.call(this);
8415
8416 this.skipWarn_ = function (start, end) {
8417 this.trigger('log', {
8418 level: 'warn',
8419 message: "adts skiping bytes " + start + " to " + end + " in frame " + frameNum + " outside syncword"
8420 });
8421 };
8422
8423 this.push = function (packet) {
8424 var i = 0,
8425 frameLength,
8426 protectionSkipBytes,
8427 oldBuffer,
8428 sampleCount,
8429 adtsFrameDuration;
8430
8431 if (!handlePartialSegments) {
8432 frameNum = 0;
8433 }
8434
8435 if (packet.type !== 'audio') {
8436 // ignore non-audio data
8437 return;
8438 } // Prepend any data in the buffer to the input data so that we can parse
8439 // aac frames the cross a PES packet boundary
8440
8441
8442 if (buffer && buffer.length) {
8443 oldBuffer = buffer;
8444 buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
8445 buffer.set(oldBuffer);
8446 buffer.set(packet.data, oldBuffer.byteLength);
8447 } else {
8448 buffer = packet.data;
8449 } // unpack any ADTS frames which have been fully received
8450 // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
8451
8452
8453 var skip; // We use i + 7 here because we want to be able to parse the entire header.
8454 // If we don't have enough bytes to do that, then we definitely won't have a full frame.
8455
8456 while (i + 7 < buffer.length) {
8457 // Look for the start of an ADTS header..
8458 if (buffer[i] !== 0xFF || (buffer[i + 1] & 0xF6) !== 0xF0) {
8459 if (typeof skip !== 'number') {
8460 skip = i;
8461 } // If a valid header was not found, jump one forward and attempt to
8462 // find a valid ADTS header starting at the next byte
8463
8464
8465 i++;
8466 continue;
8467 }
8468
8469 if (typeof skip === 'number') {
8470 this.skipWarn_(skip, i);
8471 skip = null;
8472 } // The protection skip bit tells us if we have 2 bytes of CRC data at the
8473 // end of the ADTS header
8474
8475
8476 protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2; // Frame length is a 13 bit integer starting 16 bits from the
8477 // end of the sync sequence
8478 // NOTE: frame length includes the size of the header
8479
8480 frameLength = (buffer[i + 3] & 0x03) << 11 | buffer[i + 4] << 3 | (buffer[i + 5] & 0xe0) >> 5;
8481 sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
8482 adtsFrameDuration = sampleCount * ONE_SECOND_IN_TS$2 / ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2]; // If we don't have enough data to actually finish this ADTS frame,
8483 // then we have to wait for more data
8484
8485 if (buffer.byteLength - i < frameLength) {
8486 break;
8487 } // Otherwise, deliver the complete AAC frame
8488
8489
8490 this.trigger('data', {
8491 pts: packet.pts + frameNum * adtsFrameDuration,
8492 dts: packet.dts + frameNum * adtsFrameDuration,
8493 sampleCount: sampleCount,
8494 audioobjecttype: (buffer[i + 2] >>> 6 & 0x03) + 1,
8495 channelcount: (buffer[i + 2] & 1) << 2 | (buffer[i + 3] & 0xc0) >>> 6,
8496 samplerate: ADTS_SAMPLING_FREQUENCIES$1[(buffer[i + 2] & 0x3c) >>> 2],
8497 samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
8498 // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
8499 samplesize: 16,
8500 // data is the frame without it's header
8501 data: buffer.subarray(i + 7 + protectionSkipBytes, i + frameLength)
8502 });
8503 frameNum++;
8504 i += frameLength;
8505 }
8506
8507 if (typeof skip === 'number') {
8508 this.skipWarn_(skip, i);
8509 skip = null;
8510 } // remove processed bytes from the buffer.
8511
8512
8513 buffer = buffer.subarray(i);
8514 };
8515
8516 this.flush = function () {
8517 frameNum = 0;
8518 this.trigger('done');
8519 };
8520
8521 this.reset = function () {
8522 buffer = void 0;
8523 this.trigger('reset');
8524 };
8525
8526 this.endTimeline = function () {
8527 buffer = void 0;
8528 this.trigger('endedtimeline');
8529 };
8530 };
8531
8532 _AdtsStream.prototype = new stream();
8533 var adts = _AdtsStream;
8534 /**
8535 * mux.js
8536 *
8537 * Copyright (c) Brightcove
8538 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
8539 */
8540
8541 var ExpGolomb;
8542 /**
8543 * Parser for exponential Golomb codes, a variable-bitwidth number encoding
8544 * scheme used by h264.
8545 */
8546
8547 ExpGolomb = function ExpGolomb(workingData) {
8548 var // the number of bytes left to examine in workingData
8549 workingBytesAvailable = workingData.byteLength,
8550 // the current word being examined
8551 workingWord = 0,
8552 // :uint
8553 // the number of bits left to examine in the current word
8554 workingBitsAvailable = 0; // :uint;
8555 // ():uint
8556
8557 this.length = function () {
8558 return 8 * workingBytesAvailable;
8559 }; // ():uint
8560
8561
8562 this.bitsAvailable = function () {
8563 return 8 * workingBytesAvailable + workingBitsAvailable;
8564 }; // ():void
8565
8566
8567 this.loadWord = function () {
8568 var position = workingData.byteLength - workingBytesAvailable,
8569 workingBytes = new Uint8Array(4),
8570 availableBytes = Math.min(4, workingBytesAvailable);
8571
8572 if (availableBytes === 0) {
8573 throw new Error('no bytes available');
8574 }
8575
8576 workingBytes.set(workingData.subarray(position, position + availableBytes));
8577 workingWord = new DataView(workingBytes.buffer).getUint32(0); // track the amount of workingData that has been processed
8578
8579 workingBitsAvailable = availableBytes * 8;
8580 workingBytesAvailable -= availableBytes;
8581 }; // (count:int):void
8582
8583
8584 this.skipBits = function (count) {
8585 var skipBytes; // :int
8586
8587 if (workingBitsAvailable > count) {
8588 workingWord <<= count;
8589 workingBitsAvailable -= count;
8590 } else {
8591 count -= workingBitsAvailable;
8592 skipBytes = Math.floor(count / 8);
8593 count -= skipBytes * 8;
8594 workingBytesAvailable -= skipBytes;
8595 this.loadWord();
8596 workingWord <<= count;
8597 workingBitsAvailable -= count;
8598 }
8599 }; // (size:int):uint
8600
8601
8602 this.readBits = function (size) {
8603 var bits = Math.min(workingBitsAvailable, size),
8604 // :uint
8605 valu = workingWord >>> 32 - bits; // :uint
8606 // if size > 31, handle error
8607
8608 workingBitsAvailable -= bits;
8609
8610 if (workingBitsAvailable > 0) {
8611 workingWord <<= bits;
8612 } else if (workingBytesAvailable > 0) {
8613 this.loadWord();
8614 }
8615
8616 bits = size - bits;
8617
8618 if (bits > 0) {
8619 return valu << bits | this.readBits(bits);
8620 }
8621
8622 return valu;
8623 }; // ():uint
8624
8625
8626 this.skipLeadingZeros = function () {
8627 var leadingZeroCount; // :uint
8628
8629 for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
8630 if ((workingWord & 0x80000000 >>> leadingZeroCount) !== 0) {
8631 // the first bit of working word is 1
8632 workingWord <<= leadingZeroCount;
8633 workingBitsAvailable -= leadingZeroCount;
8634 return leadingZeroCount;
8635 }
8636 } // we exhausted workingWord and still have not found a 1
8637
8638
8639 this.loadWord();
8640 return leadingZeroCount + this.skipLeadingZeros();
8641 }; // ():void
8642
8643
8644 this.skipUnsignedExpGolomb = function () {
8645 this.skipBits(1 + this.skipLeadingZeros());
8646 }; // ():void
8647
8648
8649 this.skipExpGolomb = function () {
8650 this.skipBits(1 + this.skipLeadingZeros());
8651 }; // ():uint
8652
8653
8654 this.readUnsignedExpGolomb = function () {
8655 var clz = this.skipLeadingZeros(); // :uint
8656
8657 return this.readBits(clz + 1) - 1;
8658 }; // ():int
8659
8660
8661 this.readExpGolomb = function () {
8662 var valu = this.readUnsignedExpGolomb(); // :int
8663
8664 if (0x01 & valu) {
8665 // the number is odd if the low order bit is set
8666 return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
8667 }
8668
8669 return -1 * (valu >>> 1); // divide by two then make it negative
8670 }; // Some convenience functions
8671 // :Boolean
8672
8673
8674 this.readBoolean = function () {
8675 return this.readBits(1) === 1;
8676 }; // ():int
8677
8678
8679 this.readUnsignedByte = function () {
8680 return this.readBits(8);
8681 };
8682
8683 this.loadWord();
8684 };
8685
8686 var expGolomb = ExpGolomb;
8687
8688 var _H264Stream, _NalByteStream;
8689
8690 var PROFILES_WITH_OPTIONAL_SPS_DATA;
8691 /**
8692 * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
8693 */
8694
8695 _NalByteStream = function NalByteStream() {
8696 var syncPoint = 0,
8697 i,
8698 buffer;
8699
8700 _NalByteStream.prototype.init.call(this);
8701 /*
8702 * Scans a byte stream and triggers a data event with the NAL units found.
8703 * @param {Object} data Event received from H264Stream
8704 * @param {Uint8Array} data.data The h264 byte stream to be scanned
8705 *
8706 * @see H264Stream.push
8707 */
8708
8709
8710 this.push = function (data) {
8711 var swapBuffer;
8712
8713 if (!buffer) {
8714 buffer = data.data;
8715 } else {
8716 swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
8717 swapBuffer.set(buffer);
8718 swapBuffer.set(data.data, buffer.byteLength);
8719 buffer = swapBuffer;
8720 }
8721
8722 var len = buffer.byteLength; // Rec. ITU-T H.264, Annex B
8723 // scan for NAL unit boundaries
8724 // a match looks like this:
8725 // 0 0 1 .. NAL .. 0 0 1
8726 // ^ sync point ^ i
8727 // or this:
8728 // 0 0 1 .. NAL .. 0 0 0
8729 // ^ sync point ^ i
8730 // advance the sync point to a NAL start, if necessary
8731
8732 for (; syncPoint < len - 3; syncPoint++) {
8733 if (buffer[syncPoint + 2] === 1) {
8734 // the sync point is properly aligned
8735 i = syncPoint + 5;
8736 break;
8737 }
8738 }
8739
8740 while (i < len) {
8741 // look at the current byte to determine if we've hit the end of
8742 // a NAL unit boundary
8743 switch (buffer[i]) {
8744 case 0:
8745 // skip past non-sync sequences
8746 if (buffer[i - 1] !== 0) {
8747 i += 2;
8748 break;
8749 } else if (buffer[i - 2] !== 0) {
8750 i++;
8751 break;
8752 } // deliver the NAL unit if it isn't empty
8753
8754
8755 if (syncPoint + 3 !== i - 2) {
8756 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8757 } // drop trailing zeroes
8758
8759
8760 do {
8761 i++;
8762 } while (buffer[i] !== 1 && i < len);
8763
8764 syncPoint = i - 2;
8765 i += 3;
8766 break;
8767
8768 case 1:
8769 // skip past non-sync sequences
8770 if (buffer[i - 1] !== 0 || buffer[i - 2] !== 0) {
8771 i += 3;
8772 break;
8773 } // deliver the NAL unit
8774
8775
8776 this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
8777 syncPoint = i - 2;
8778 i += 3;
8779 break;
8780
8781 default:
8782 // the current byte isn't a one or zero, so it cannot be part
8783 // of a sync sequence
8784 i += 3;
8785 break;
8786 }
8787 } // filter out the NAL units that were delivered
8788
8789
8790 buffer = buffer.subarray(syncPoint);
8791 i -= syncPoint;
8792 syncPoint = 0;
8793 };
8794
8795 this.reset = function () {
8796 buffer = null;
8797 syncPoint = 0;
8798 this.trigger('reset');
8799 };
8800
8801 this.flush = function () {
8802 // deliver the last buffered NAL unit
8803 if (buffer && buffer.byteLength > 3) {
8804 this.trigger('data', buffer.subarray(syncPoint + 3));
8805 } // reset the stream state
8806
8807
8808 buffer = null;
8809 syncPoint = 0;
8810 this.trigger('done');
8811 };
8812
8813 this.endTimeline = function () {
8814 this.flush();
8815 this.trigger('endedtimeline');
8816 };
8817 };
8818
8819 _NalByteStream.prototype = new stream(); // values of profile_idc that indicate additional fields are included in the SPS
8820 // see Recommendation ITU-T H.264 (4/2013),
8821 // 7.3.2.1.1 Sequence parameter set data syntax
8822
8823 PROFILES_WITH_OPTIONAL_SPS_DATA = {
8824 100: true,
8825 110: true,
8826 122: true,
8827 244: true,
8828 44: true,
8829 83: true,
8830 86: true,
8831 118: true,
8832 128: true,
8833 // TODO: the three profiles below don't
8834 // appear to have sps data in the specificiation anymore?
8835 138: true,
8836 139: true,
8837 134: true
8838 };
8839 /**
8840 * Accepts input from a ElementaryStream and produces H.264 NAL unit data
8841 * events.
8842 */
8843
8844 _H264Stream = function H264Stream() {
8845 var nalByteStream = new _NalByteStream(),
8846 self,
8847 trackId,
8848 currentPts,
8849 currentDts,
8850 discardEmulationPreventionBytes,
8851 readSequenceParameterSet,
8852 skipScalingList;
8853
8854 _H264Stream.prototype.init.call(this);
8855
8856 self = this;
8857 /*
8858 * Pushes a packet from a stream onto the NalByteStream
8859 *
8860 * @param {Object} packet - A packet received from a stream
8861 * @param {Uint8Array} packet.data - The raw bytes of the packet
8862 * @param {Number} packet.dts - Decode timestamp of the packet
8863 * @param {Number} packet.pts - Presentation timestamp of the packet
8864 * @param {Number} packet.trackId - The id of the h264 track this packet came from
8865 * @param {('video'|'audio')} packet.type - The type of packet
8866 *
8867 */
8868
8869 this.push = function (packet) {
8870 if (packet.type !== 'video') {
8871 return;
8872 }
8873
8874 trackId = packet.trackId;
8875 currentPts = packet.pts;
8876 currentDts = packet.dts;
8877 nalByteStream.push(packet);
8878 };
8879 /*
8880 * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
8881 * for the NALUs to the next stream component.
8882 * Also, preprocess caption and sequence parameter NALUs.
8883 *
8884 * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
8885 * @see NalByteStream.push
8886 */
8887
8888
8889 nalByteStream.on('data', function (data) {
8890 var event = {
8891 trackId: trackId,
8892 pts: currentPts,
8893 dts: currentDts,
8894 data: data,
8895 nalUnitTypeCode: data[0] & 0x1f
8896 };
8897
8898 switch (event.nalUnitTypeCode) {
8899 case 0x05:
8900 event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
8901 break;
8902
8903 case 0x06:
8904 event.nalUnitType = 'sei_rbsp';
8905 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8906 break;
8907
8908 case 0x07:
8909 event.nalUnitType = 'seq_parameter_set_rbsp';
8910 event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
8911 event.config = readSequenceParameterSet(event.escapedRBSP);
8912 break;
8913
8914 case 0x08:
8915 event.nalUnitType = 'pic_parameter_set_rbsp';
8916 break;
8917
8918 case 0x09:
8919 event.nalUnitType = 'access_unit_delimiter_rbsp';
8920 break;
8921 } // This triggers data on the H264Stream
8922
8923
8924 self.trigger('data', event);
8925 });
8926 nalByteStream.on('done', function () {
8927 self.trigger('done');
8928 });
8929 nalByteStream.on('partialdone', function () {
8930 self.trigger('partialdone');
8931 });
8932 nalByteStream.on('reset', function () {
8933 self.trigger('reset');
8934 });
8935 nalByteStream.on('endedtimeline', function () {
8936 self.trigger('endedtimeline');
8937 });
8938
8939 this.flush = function () {
8940 nalByteStream.flush();
8941 };
8942
8943 this.partialFlush = function () {
8944 nalByteStream.partialFlush();
8945 };
8946
8947 this.reset = function () {
8948 nalByteStream.reset();
8949 };
8950
8951 this.endTimeline = function () {
8952 nalByteStream.endTimeline();
8953 };
8954 /**
8955 * Advance the ExpGolomb decoder past a scaling list. The scaling
8956 * list is optionally transmitted as part of a sequence parameter
8957 * set and is not relevant to transmuxing.
8958 * @param count {number} the number of entries in this scaling list
8959 * @param expGolombDecoder {object} an ExpGolomb pointed to the
8960 * start of a scaling list
8961 * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
8962 */
8963
8964
8965 skipScalingList = function skipScalingList(count, expGolombDecoder) {
8966 var lastScale = 8,
8967 nextScale = 8,
8968 j,
8969 deltaScale;
8970
8971 for (j = 0; j < count; j++) {
8972 if (nextScale !== 0) {
8973 deltaScale = expGolombDecoder.readExpGolomb();
8974 nextScale = (lastScale + deltaScale + 256) % 256;
8975 }
8976
8977 lastScale = nextScale === 0 ? lastScale : nextScale;
8978 }
8979 };
8980 /**
8981 * Expunge any "Emulation Prevention" bytes from a "Raw Byte
8982 * Sequence Payload"
8983 * @param data {Uint8Array} the bytes of a RBSP from a NAL
8984 * unit
8985 * @return {Uint8Array} the RBSP without any Emulation
8986 * Prevention Bytes
8987 */
8988
8989
8990 discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
8991 var length = data.byteLength,
8992 emulationPreventionBytesPositions = [],
8993 i = 1,
8994 newLength,
8995 newData; // Find all `Emulation Prevention Bytes`
8996
8997 while (i < length - 2) {
8998 if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
8999 emulationPreventionBytesPositions.push(i + 2);
9000 i += 2;
9001 } else {
9002 i++;
9003 }
9004 } // If no Emulation Prevention Bytes were found just return the original
9005 // array
9006
9007
9008 if (emulationPreventionBytesPositions.length === 0) {
9009 return data;
9010 } // Create a new array to hold the NAL unit data
9011
9012
9013 newLength = length - emulationPreventionBytesPositions.length;
9014 newData = new Uint8Array(newLength);
9015 var sourceIndex = 0;
9016
9017 for (i = 0; i < newLength; sourceIndex++, i++) {
9018 if (sourceIndex === emulationPreventionBytesPositions[0]) {
9019 // Skip this byte
9020 sourceIndex++; // Remove this position index
9021
9022 emulationPreventionBytesPositions.shift();
9023 }
9024
9025 newData[i] = data[sourceIndex];
9026 }
9027
9028 return newData;
9029 };
9030 /**
9031 * Read a sequence parameter set and return some interesting video
9032 * properties. A sequence parameter set is the H264 metadata that
9033 * describes the properties of upcoming video frames.
9034 * @param data {Uint8Array} the bytes of a sequence parameter set
9035 * @return {object} an object with configuration parsed from the
9036 * sequence parameter set, including the dimensions of the
9037 * associated video frames.
9038 */
9039
9040
9041 readSequenceParameterSet = function readSequenceParameterSet(data) {
9042 var frameCropLeftOffset = 0,
9043 frameCropRightOffset = 0,
9044 frameCropTopOffset = 0,
9045 frameCropBottomOffset = 0,
9046 expGolombDecoder,
9047 profileIdc,
9048 levelIdc,
9049 profileCompatibility,
9050 chromaFormatIdc,
9051 picOrderCntType,
9052 numRefFramesInPicOrderCntCycle,
9053 picWidthInMbsMinus1,
9054 picHeightInMapUnitsMinus1,
9055 frameMbsOnlyFlag,
9056 scalingListCount,
9057 sarRatio = [1, 1],
9058 aspectRatioIdc,
9059 i;
9060 expGolombDecoder = new expGolomb(data);
9061 profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
9062
9063 profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
9064
9065 levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
9066
9067 expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
9068 // some profiles have more optional data we don't need
9069
9070 if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
9071 chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
9072
9073 if (chromaFormatIdc === 3) {
9074 expGolombDecoder.skipBits(1); // separate_colour_plane_flag
9075 }
9076
9077 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
9078
9079 expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
9080
9081 expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
9082
9083 if (expGolombDecoder.readBoolean()) {
9084 // seq_scaling_matrix_present_flag
9085 scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
9086
9087 for (i = 0; i < scalingListCount; i++) {
9088 if (expGolombDecoder.readBoolean()) {
9089 // seq_scaling_list_present_flag[ i ]
9090 if (i < 6) {
9091 skipScalingList(16, expGolombDecoder);
9092 } else {
9093 skipScalingList(64, expGolombDecoder);
9094 }
9095 }
9096 }
9097 }
9098 }
9099
9100 expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
9101
9102 picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
9103
9104 if (picOrderCntType === 0) {
9105 expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
9106 } else if (picOrderCntType === 1) {
9107 expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
9108
9109 expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
9110
9111 expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
9112
9113 numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
9114
9115 for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
9116 expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
9117 }
9118 }
9119
9120 expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
9121
9122 expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
9123
9124 picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9125 picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
9126 frameMbsOnlyFlag = expGolombDecoder.readBits(1);
9127
9128 if (frameMbsOnlyFlag === 0) {
9129 expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
9130 }
9131
9132 expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
9133
9134 if (expGolombDecoder.readBoolean()) {
9135 // frame_cropping_flag
9136 frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
9137 frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
9138 frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
9139 frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
9140 }
9141
9142 if (expGolombDecoder.readBoolean()) {
9143 // vui_parameters_present_flag
9144 if (expGolombDecoder.readBoolean()) {
9145 // aspect_ratio_info_present_flag
9146 aspectRatioIdc = expGolombDecoder.readUnsignedByte();
9147
9148 switch (aspectRatioIdc) {
9149 case 1:
9150 sarRatio = [1, 1];
9151 break;
9152
9153 case 2:
9154 sarRatio = [12, 11];
9155 break;
9156
9157 case 3:
9158 sarRatio = [10, 11];
9159 break;
9160
9161 case 4:
9162 sarRatio = [16, 11];
9163 break;
9164
9165 case 5:
9166 sarRatio = [40, 33];
9167 break;
9168
9169 case 6:
9170 sarRatio = [24, 11];
9171 break;
9172
9173 case 7:
9174 sarRatio = [20, 11];
9175 break;
9176
9177 case 8:
9178 sarRatio = [32, 11];
9179 break;
9180
9181 case 9:
9182 sarRatio = [80, 33];
9183 break;
9184
9185 case 10:
9186 sarRatio = [18, 11];
9187 break;
9188
9189 case 11:
9190 sarRatio = [15, 11];
9191 break;
9192
9193 case 12:
9194 sarRatio = [64, 33];
9195 break;
9196
9197 case 13:
9198 sarRatio = [160, 99];
9199 break;
9200
9201 case 14:
9202 sarRatio = [4, 3];
9203 break;
9204
9205 case 15:
9206 sarRatio = [3, 2];
9207 break;
9208
9209 case 16:
9210 sarRatio = [2, 1];
9211 break;
9212
9213 case 255:
9214 {
9215 sarRatio = [expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte(), expGolombDecoder.readUnsignedByte() << 8 | expGolombDecoder.readUnsignedByte()];
9216 break;
9217 }
9218 }
9219
9220 if (sarRatio) {
9221 sarRatio[0] / sarRatio[1];
9222 }
9223 }
9224 }
9225
9226 return {
9227 profileIdc: profileIdc,
9228 levelIdc: levelIdc,
9229 profileCompatibility: profileCompatibility,
9230 width: (picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2,
9231 height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - frameCropTopOffset * 2 - frameCropBottomOffset * 2,
9232 // sar is sample aspect ratio
9233 sarRatio: sarRatio
9234 };
9235 };
9236 };
9237
9238 _H264Stream.prototype = new stream();
9239 var h264 = {
9240 H264Stream: _H264Stream,
9241 NalByteStream: _NalByteStream
9242 };
9243 /**
9244 * mux.js
9245 *
9246 * Copyright (c) Brightcove
9247 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
9248 *
9249 * Utilities to detect basic properties and metadata about Aac data.
9250 */
9251
9252 var ADTS_SAMPLING_FREQUENCIES = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
9253
9254 var parseId3TagSize = function parseId3TagSize(header, byteIndex) {
9255 var returnSize = header[byteIndex + 6] << 21 | header[byteIndex + 7] << 14 | header[byteIndex + 8] << 7 | header[byteIndex + 9],
9256 flags = header[byteIndex + 5],
9257 footerPresent = (flags & 16) >> 4; // if we get a negative returnSize clamp it to 0
9258
9259 returnSize = returnSize >= 0 ? returnSize : 0;
9260
9261 if (footerPresent) {
9262 return returnSize + 20;
9263 }
9264
9265 return returnSize + 10;
9266 };
9267
9268 var getId3Offset = function getId3Offset(data, offset) {
9269 if (data.length - offset < 10 || data[offset] !== 'I'.charCodeAt(0) || data[offset + 1] !== 'D'.charCodeAt(0) || data[offset + 2] !== '3'.charCodeAt(0)) {
9270 return offset;
9271 }
9272
9273 offset += parseId3TagSize(data, offset);
9274 return getId3Offset(data, offset);
9275 }; // TODO: use vhs-utils
9276
9277
9278 var isLikelyAacData$1 = function isLikelyAacData(data) {
9279 var offset = getId3Offset(data, 0);
9280 return data.length >= offset + 2 && (data[offset] & 0xFF) === 0xFF && (data[offset + 1] & 0xF0) === 0xF0 && // verify that the 2 layer bits are 0, aka this
9281 // is not mp3 data but aac data.
9282 (data[offset + 1] & 0x16) === 0x10;
9283 };
9284
9285 var parseSyncSafeInteger = function parseSyncSafeInteger(data) {
9286 return data[0] << 21 | data[1] << 14 | data[2] << 7 | data[3];
9287 }; // return a percent-encoded representation of the specified byte range
9288 // @see http://en.wikipedia.org/wiki/Percent-encoding
9289
9290
9291 var percentEncode = function percentEncode(bytes, start, end) {
9292 var i,
9293 result = '';
9294
9295 for (i = start; i < end; i++) {
9296 result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
9297 }
9298
9299 return result;
9300 }; // return the string representation of the specified byte range,
9301 // interpreted as ISO-8859-1.
9302
9303
9304 var parseIso88591 = function parseIso88591(bytes, start, end) {
9305 return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
9306 };
9307
9308 var parseAdtsSize = function parseAdtsSize(header, byteIndex) {
9309 var lowThree = (header[byteIndex + 5] & 0xE0) >> 5,
9310 middle = header[byteIndex + 4] << 3,
9311 highTwo = header[byteIndex + 3] & 0x3 << 11;
9312 return highTwo | middle | lowThree;
9313 };
9314
9315 var parseType$2 = function parseType(header, byteIndex) {
9316 if (header[byteIndex] === 'I'.charCodeAt(0) && header[byteIndex + 1] === 'D'.charCodeAt(0) && header[byteIndex + 2] === '3'.charCodeAt(0)) {
9317 return 'timed-metadata';
9318 } else if (header[byteIndex] & 0xff === 0xff && (header[byteIndex + 1] & 0xf0) === 0xf0) {
9319 return 'audio';
9320 }
9321
9322 return null;
9323 };
9324
9325 var parseSampleRate = function parseSampleRate(packet) {
9326 var i = 0;
9327
9328 while (i + 5 < packet.length) {
9329 if (packet[i] !== 0xFF || (packet[i + 1] & 0xF6) !== 0xF0) {
9330 // If a valid header was not found, jump one forward and attempt to
9331 // find a valid ADTS header starting at the next byte
9332 i++;
9333 continue;
9334 }
9335
9336 return ADTS_SAMPLING_FREQUENCIES[(packet[i + 2] & 0x3c) >>> 2];
9337 }
9338
9339 return null;
9340 };
9341
9342 var parseAacTimestamp = function parseAacTimestamp(packet) {
9343 var frameStart, frameSize, frame, frameHeader; // find the start of the first frame and the end of the tag
9344
9345 frameStart = 10;
9346
9347 if (packet[5] & 0x40) {
9348 // advance the frame start past the extended header
9349 frameStart += 4; // header size field
9350
9351 frameStart += parseSyncSafeInteger(packet.subarray(10, 14));
9352 } // parse one or more ID3 frames
9353 // http://id3.org/id3v2.3.0#ID3v2_frame_overview
9354
9355
9356 do {
9357 // determine the number of bytes in this frame
9358 frameSize = parseSyncSafeInteger(packet.subarray(frameStart + 4, frameStart + 8));
9359
9360 if (frameSize < 1) {
9361 return null;
9362 }
9363
9364 frameHeader = String.fromCharCode(packet[frameStart], packet[frameStart + 1], packet[frameStart + 2], packet[frameStart + 3]);
9365
9366 if (frameHeader === 'PRIV') {
9367 frame = packet.subarray(frameStart + 10, frameStart + frameSize + 10);
9368
9369 for (var i = 0; i < frame.byteLength; i++) {
9370 if (frame[i] === 0) {
9371 var owner = parseIso88591(frame, 0, i);
9372
9373 if (owner === 'com.apple.streaming.transportStreamTimestamp') {
9374 var d = frame.subarray(i + 1);
9375 var size = (d[3] & 0x01) << 30 | d[4] << 22 | d[5] << 14 | d[6] << 6 | d[7] >>> 2;
9376 size *= 4;
9377 size += d[7] & 0x03;
9378 return size;
9379 }
9380
9381 break;
9382 }
9383 }
9384 }
9385
9386 frameStart += 10; // advance past the frame header
9387
9388 frameStart += frameSize; // advance past the frame body
9389 } while (frameStart < packet.byteLength);
9390
9391 return null;
9392 };
9393
9394 var utils = {
9395 isLikelyAacData: isLikelyAacData$1,
9396 parseId3TagSize: parseId3TagSize,
9397 parseAdtsSize: parseAdtsSize,
9398 parseType: parseType$2,
9399 parseSampleRate: parseSampleRate,
9400 parseAacTimestamp: parseAacTimestamp
9401 };
9402
9403 var _AacStream;
9404 /**
9405 * Splits an incoming stream of binary data into ADTS and ID3 Frames.
9406 */
9407
9408
9409 _AacStream = function AacStream() {
9410 var everything = new Uint8Array(),
9411 timeStamp = 0;
9412
9413 _AacStream.prototype.init.call(this);
9414
9415 this.setTimestamp = function (timestamp) {
9416 timeStamp = timestamp;
9417 };
9418
9419 this.push = function (bytes) {
9420 var frameSize = 0,
9421 byteIndex = 0,
9422 bytesLeft,
9423 chunk,
9424 packet,
9425 tempLength; // If there are bytes remaining from the last segment, prepend them to the
9426 // bytes that were pushed in
9427
9428 if (everything.length) {
9429 tempLength = everything.length;
9430 everything = new Uint8Array(bytes.byteLength + tempLength);
9431 everything.set(everything.subarray(0, tempLength));
9432 everything.set(bytes, tempLength);
9433 } else {
9434 everything = bytes;
9435 }
9436
9437 while (everything.length - byteIndex >= 3) {
9438 if (everything[byteIndex] === 'I'.charCodeAt(0) && everything[byteIndex + 1] === 'D'.charCodeAt(0) && everything[byteIndex + 2] === '3'.charCodeAt(0)) {
9439 // Exit early because we don't have enough to parse
9440 // the ID3 tag header
9441 if (everything.length - byteIndex < 10) {
9442 break;
9443 } // check framesize
9444
9445
9446 frameSize = utils.parseId3TagSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9447 // to emit a full packet
9448 // Add to byteIndex to support multiple ID3 tags in sequence
9449
9450 if (byteIndex + frameSize > everything.length) {
9451 break;
9452 }
9453
9454 chunk = {
9455 type: 'timed-metadata',
9456 data: everything.subarray(byteIndex, byteIndex + frameSize)
9457 };
9458 this.trigger('data', chunk);
9459 byteIndex += frameSize;
9460 continue;
9461 } else if ((everything[byteIndex] & 0xff) === 0xff && (everything[byteIndex + 1] & 0xf0) === 0xf0) {
9462 // Exit early because we don't have enough to parse
9463 // the ADTS frame header
9464 if (everything.length - byteIndex < 7) {
9465 break;
9466 }
9467
9468 frameSize = utils.parseAdtsSize(everything, byteIndex); // Exit early if we don't have enough in the buffer
9469 // to emit a full packet
9470
9471 if (byteIndex + frameSize > everything.length) {
9472 break;
9473 }
9474
9475 packet = {
9476 type: 'audio',
9477 data: everything.subarray(byteIndex, byteIndex + frameSize),
9478 pts: timeStamp,
9479 dts: timeStamp
9480 };
9481 this.trigger('data', packet);
9482 byteIndex += frameSize;
9483 continue;
9484 }
9485
9486 byteIndex++;
9487 }
9488
9489 bytesLeft = everything.length - byteIndex;
9490
9491 if (bytesLeft > 0) {
9492 everything = everything.subarray(byteIndex);
9493 } else {
9494 everything = new Uint8Array();
9495 }
9496 };
9497
9498 this.reset = function () {
9499 everything = new Uint8Array();
9500 this.trigger('reset');
9501 };
9502
9503 this.endTimeline = function () {
9504 everything = new Uint8Array();
9505 this.trigger('endedtimeline');
9506 };
9507 };
9508
9509 _AacStream.prototype = new stream();
9510 var aac = _AacStream; // constants
9511
9512 var AUDIO_PROPERTIES = ['audioobjecttype', 'channelcount', 'samplerate', 'samplingfrequencyindex', 'samplesize'];
9513 var audioProperties = AUDIO_PROPERTIES;
9514 var VIDEO_PROPERTIES = ['width', 'height', 'profileIdc', 'levelIdc', 'profileCompatibility', 'sarRatio'];
9515 var videoProperties = VIDEO_PROPERTIES;
9516 var H264Stream = h264.H264Stream;
9517 var isLikelyAacData = utils.isLikelyAacData;
9518 var ONE_SECOND_IN_TS$1 = clock.ONE_SECOND_IN_TS; // object types
9519
9520 var _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream;
9521
9522 var retriggerForStream = function retriggerForStream(key, event) {
9523 event.stream = key;
9524 this.trigger('log', event);
9525 };
9526
9527 var addPipelineLogRetriggers = function addPipelineLogRetriggers(transmuxer, pipeline) {
9528 var keys = Object.keys(pipeline);
9529
9530 for (var i = 0; i < keys.length; i++) {
9531 var key = keys[i]; // skip non-stream keys and headOfPipeline
9532 // which is just a duplicate
9533
9534 if (key === 'headOfPipeline' || !pipeline[key].on) {
9535 continue;
9536 }
9537
9538 pipeline[key].on('log', retriggerForStream.bind(transmuxer, key));
9539 }
9540 };
9541 /**
9542 * Compare two arrays (even typed) for same-ness
9543 */
9544
9545
9546 var arrayEquals = function arrayEquals(a, b) {
9547 var i;
9548
9549 if (a.length !== b.length) {
9550 return false;
9551 } // compare the value of each element in the array
9552
9553
9554 for (i = 0; i < a.length; i++) {
9555 if (a[i] !== b[i]) {
9556 return false;
9557 }
9558 }
9559
9560 return true;
9561 };
9562
9563 var generateSegmentTimingInfo = function generateSegmentTimingInfo(baseMediaDecodeTime, startDts, startPts, endDts, endPts, prependedContentDuration) {
9564 var ptsOffsetFromDts = startPts - startDts,
9565 decodeDuration = endDts - startDts,
9566 presentationDuration = endPts - startPts; // The PTS and DTS values are based on the actual stream times from the segment,
9567 // however, the player time values will reflect a start from the baseMediaDecodeTime.
9568 // In order to provide relevant values for the player times, base timing info on the
9569 // baseMediaDecodeTime and the DTS and PTS durations of the segment.
9570
9571 return {
9572 start: {
9573 dts: baseMediaDecodeTime,
9574 pts: baseMediaDecodeTime + ptsOffsetFromDts
9575 },
9576 end: {
9577 dts: baseMediaDecodeTime + decodeDuration,
9578 pts: baseMediaDecodeTime + presentationDuration
9579 },
9580 prependedContentDuration: prependedContentDuration,
9581 baseMediaDecodeTime: baseMediaDecodeTime
9582 };
9583 };
9584 /**
9585 * Constructs a single-track, ISO BMFF media segment from AAC data
9586 * events. The output of this stream can be fed to a SourceBuffer
9587 * configured with a suitable initialization segment.
9588 * @param track {object} track metadata configuration
9589 * @param options {object} transmuxer options object
9590 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9591 * in the source; false to adjust the first segment to start at 0.
9592 */
9593
9594
9595 _AudioSegmentStream = function AudioSegmentStream(track, options) {
9596 var adtsFrames = [],
9597 sequenceNumber,
9598 earliestAllowedDts = 0,
9599 audioAppendStartTs = 0,
9600 videoBaseMediaDecodeTime = Infinity;
9601 options = options || {};
9602 sequenceNumber = options.firstSequenceNumber || 0;
9603
9604 _AudioSegmentStream.prototype.init.call(this);
9605
9606 this.push = function (data) {
9607 trackDecodeInfo.collectDtsInfo(track, data);
9608
9609 if (track) {
9610 audioProperties.forEach(function (prop) {
9611 track[prop] = data[prop];
9612 });
9613 } // buffer audio data until end() is called
9614
9615
9616 adtsFrames.push(data);
9617 };
9618
9619 this.setEarliestDts = function (earliestDts) {
9620 earliestAllowedDts = earliestDts;
9621 };
9622
9623 this.setVideoBaseMediaDecodeTime = function (baseMediaDecodeTime) {
9624 videoBaseMediaDecodeTime = baseMediaDecodeTime;
9625 };
9626
9627 this.setAudioAppendStart = function (timestamp) {
9628 audioAppendStartTs = timestamp;
9629 };
9630
9631 this.flush = function () {
9632 var frames, moof, mdat, boxes, frameDuration, segmentDuration, videoClockCyclesOfSilencePrefixed; // return early if no audio data has been observed
9633
9634 if (adtsFrames.length === 0) {
9635 this.trigger('done', 'AudioSegmentStream');
9636 return;
9637 }
9638
9639 frames = audioFrameUtils.trimAdtsFramesByEarliestDts(adtsFrames, track, earliestAllowedDts);
9640 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps); // amount of audio filled but the value is in video clock rather than audio clock
9641
9642 videoClockCyclesOfSilencePrefixed = audioFrameUtils.prefixWithSilence(track, frames, audioAppendStartTs, videoBaseMediaDecodeTime); // we have to build the index from byte locations to
9643 // samples (that is, adts frames) in the audio data
9644
9645 track.samples = audioFrameUtils.generateSampleTable(frames); // concatenate the audio data to constuct the mdat
9646
9647 mdat = mp4Generator.mdat(audioFrameUtils.concatenateFrameData(frames));
9648 adtsFrames = [];
9649 moof = mp4Generator.moof(sequenceNumber, [track]);
9650 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // bump the sequence number for next time
9651
9652 sequenceNumber++;
9653 boxes.set(moof);
9654 boxes.set(mdat, moof.byteLength);
9655 trackDecodeInfo.clearDtsInfo(track);
9656 frameDuration = Math.ceil(ONE_SECOND_IN_TS$1 * 1024 / track.samplerate); // TODO this check was added to maintain backwards compatibility (particularly with
9657 // tests) on adding the timingInfo event. However, it seems unlikely that there's a
9658 // valid use-case where an init segment/data should be triggered without associated
9659 // frames. Leaving for now, but should be looked into.
9660
9661 if (frames.length) {
9662 segmentDuration = frames.length * frameDuration;
9663 this.trigger('segmentTimingInfo', generateSegmentTimingInfo( // The audio track's baseMediaDecodeTime is in audio clock cycles, but the
9664 // frame info is in video clock cycles. Convert to match expectation of
9665 // listeners (that all timestamps will be based on video clock cycles).
9666 clock.audioTsToVideoTs(track.baseMediaDecodeTime, track.samplerate), // frame times are already in video clock, as is segment duration
9667 frames[0].dts, frames[0].pts, frames[0].dts + segmentDuration, frames[0].pts + segmentDuration, videoClockCyclesOfSilencePrefixed || 0));
9668 this.trigger('timingInfo', {
9669 start: frames[0].pts,
9670 end: frames[0].pts + segmentDuration
9671 });
9672 }
9673
9674 this.trigger('data', {
9675 track: track,
9676 boxes: boxes
9677 });
9678 this.trigger('done', 'AudioSegmentStream');
9679 };
9680
9681 this.reset = function () {
9682 trackDecodeInfo.clearDtsInfo(track);
9683 adtsFrames = [];
9684 this.trigger('reset');
9685 };
9686 };
9687
9688 _AudioSegmentStream.prototype = new stream();
9689 /**
9690 * Constructs a single-track, ISO BMFF media segment from H264 data
9691 * events. The output of this stream can be fed to a SourceBuffer
9692 * configured with a suitable initialization segment.
9693 * @param track {object} track metadata configuration
9694 * @param options {object} transmuxer options object
9695 * @param options.alignGopsAtEnd {boolean} If true, start from the end of the
9696 * gopsToAlignWith list when attempting to align gop pts
9697 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
9698 * in the source; false to adjust the first segment to start at 0.
9699 */
9700
9701 _VideoSegmentStream = function VideoSegmentStream(track, options) {
9702 var sequenceNumber,
9703 nalUnits = [],
9704 gopsToAlignWith = [],
9705 config,
9706 pps;
9707 options = options || {};
9708 sequenceNumber = options.firstSequenceNumber || 0;
9709
9710 _VideoSegmentStream.prototype.init.call(this);
9711
9712 delete track.minPTS;
9713 this.gopCache_ = [];
9714 /**
9715 * Constructs a ISO BMFF segment given H264 nalUnits
9716 * @param {Object} nalUnit A data event representing a nalUnit
9717 * @param {String} nalUnit.nalUnitType
9718 * @param {Object} nalUnit.config Properties for a mp4 track
9719 * @param {Uint8Array} nalUnit.data The nalUnit bytes
9720 * @see lib/codecs/h264.js
9721 **/
9722
9723 this.push = function (nalUnit) {
9724 trackDecodeInfo.collectDtsInfo(track, nalUnit); // record the track config
9725
9726 if (nalUnit.nalUnitType === 'seq_parameter_set_rbsp' && !config) {
9727 config = nalUnit.config;
9728 track.sps = [nalUnit.data];
9729 videoProperties.forEach(function (prop) {
9730 track[prop] = config[prop];
9731 }, this);
9732 }
9733
9734 if (nalUnit.nalUnitType === 'pic_parameter_set_rbsp' && !pps) {
9735 pps = nalUnit.data;
9736 track.pps = [nalUnit.data];
9737 } // buffer video until flush() is called
9738
9739
9740 nalUnits.push(nalUnit);
9741 };
9742 /**
9743 * Pass constructed ISO BMFF track and boxes on to the
9744 * next stream in the pipeline
9745 **/
9746
9747
9748 this.flush = function () {
9749 var frames,
9750 gopForFusion,
9751 gops,
9752 moof,
9753 mdat,
9754 boxes,
9755 prependedContentDuration = 0,
9756 firstGop,
9757 lastGop; // Throw away nalUnits at the start of the byte stream until
9758 // we find the first AUD
9759
9760 while (nalUnits.length) {
9761 if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
9762 break;
9763 }
9764
9765 nalUnits.shift();
9766 } // Return early if no video data has been observed
9767
9768
9769 if (nalUnits.length === 0) {
9770 this.resetStream_();
9771 this.trigger('done', 'VideoSegmentStream');
9772 return;
9773 } // Organize the raw nal-units into arrays that represent
9774 // higher-level constructs such as frames and gops
9775 // (group-of-pictures)
9776
9777
9778 frames = frameUtils.groupNalsIntoFrames(nalUnits);
9779 gops = frameUtils.groupFramesIntoGops(frames); // If the first frame of this fragment is not a keyframe we have
9780 // a problem since MSE (on Chrome) requires a leading keyframe.
9781 //
9782 // We have two approaches to repairing this situation:
9783 // 1) GOP-FUSION:
9784 // This is where we keep track of the GOPS (group-of-pictures)
9785 // from previous fragments and attempt to find one that we can
9786 // prepend to the current fragment in order to create a valid
9787 // fragment.
9788 // 2) KEYFRAME-PULLING:
9789 // Here we search for the first keyframe in the fragment and
9790 // throw away all the frames between the start of the fragment
9791 // and that keyframe. We then extend the duration and pull the
9792 // PTS of the keyframe forward so that it covers the time range
9793 // of the frames that were disposed of.
9794 //
9795 // #1 is far prefereable over #2 which can cause "stuttering" but
9796 // requires more things to be just right.
9797
9798 if (!gops[0][0].keyFrame) {
9799 // Search for a gop for fusion from our gopCache
9800 gopForFusion = this.getGopForFusion_(nalUnits[0], track);
9801
9802 if (gopForFusion) {
9803 // in order to provide more accurate timing information about the segment, save
9804 // the number of seconds prepended to the original segment due to GOP fusion
9805 prependedContentDuration = gopForFusion.duration;
9806 gops.unshift(gopForFusion); // Adjust Gops' metadata to account for the inclusion of the
9807 // new gop at the beginning
9808
9809 gops.byteLength += gopForFusion.byteLength;
9810 gops.nalCount += gopForFusion.nalCount;
9811 gops.pts = gopForFusion.pts;
9812 gops.dts = gopForFusion.dts;
9813 gops.duration += gopForFusion.duration;
9814 } else {
9815 // If we didn't find a candidate gop fall back to keyframe-pulling
9816 gops = frameUtils.extendFirstKeyFrame(gops);
9817 }
9818 } // Trim gops to align with gopsToAlignWith
9819
9820
9821 if (gopsToAlignWith.length) {
9822 var alignedGops;
9823
9824 if (options.alignGopsAtEnd) {
9825 alignedGops = this.alignGopsAtEnd_(gops);
9826 } else {
9827 alignedGops = this.alignGopsAtStart_(gops);
9828 }
9829
9830 if (!alignedGops) {
9831 // save all the nals in the last GOP into the gop cache
9832 this.gopCache_.unshift({
9833 gop: gops.pop(),
9834 pps: track.pps,
9835 sps: track.sps
9836 }); // Keep a maximum of 6 GOPs in the cache
9837
9838 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9839
9840 nalUnits = []; // return early no gops can be aligned with desired gopsToAlignWith
9841
9842 this.resetStream_();
9843 this.trigger('done', 'VideoSegmentStream');
9844 return;
9845 } // Some gops were trimmed. clear dts info so minSegmentDts and pts are correct
9846 // when recalculated before sending off to CoalesceStream
9847
9848
9849 trackDecodeInfo.clearDtsInfo(track);
9850 gops = alignedGops;
9851 }
9852
9853 trackDecodeInfo.collectDtsInfo(track, gops); // First, we have to build the index from byte locations to
9854 // samples (that is, frames) in the video data
9855
9856 track.samples = frameUtils.generateSampleTable(gops); // Concatenate the video data and construct the mdat
9857
9858 mdat = mp4Generator.mdat(frameUtils.concatenateNalData(gops));
9859 track.baseMediaDecodeTime = trackDecodeInfo.calculateTrackBaseMediaDecodeTime(track, options.keepOriginalTimestamps);
9860 this.trigger('processedGopsInfo', gops.map(function (gop) {
9861 return {
9862 pts: gop.pts,
9863 dts: gop.dts,
9864 byteLength: gop.byteLength
9865 };
9866 }));
9867 firstGop = gops[0];
9868 lastGop = gops[gops.length - 1];
9869 this.trigger('segmentTimingInfo', generateSegmentTimingInfo(track.baseMediaDecodeTime, firstGop.dts, firstGop.pts, lastGop.dts + lastGop.duration, lastGop.pts + lastGop.duration, prependedContentDuration));
9870 this.trigger('timingInfo', {
9871 start: gops[0].pts,
9872 end: gops[gops.length - 1].pts + gops[gops.length - 1].duration
9873 }); // save all the nals in the last GOP into the gop cache
9874
9875 this.gopCache_.unshift({
9876 gop: gops.pop(),
9877 pps: track.pps,
9878 sps: track.sps
9879 }); // Keep a maximum of 6 GOPs in the cache
9880
9881 this.gopCache_.length = Math.min(6, this.gopCache_.length); // Clear nalUnits
9882
9883 nalUnits = [];
9884 this.trigger('baseMediaDecodeTime', track.baseMediaDecodeTime);
9885 this.trigger('timelineStartInfo', track.timelineStartInfo);
9886 moof = mp4Generator.moof(sequenceNumber, [track]); // it would be great to allocate this array up front instead of
9887 // throwing away hundreds of media segment fragments
9888
9889 boxes = new Uint8Array(moof.byteLength + mdat.byteLength); // Bump the sequence number for next time
9890
9891 sequenceNumber++;
9892 boxes.set(moof);
9893 boxes.set(mdat, moof.byteLength);
9894 this.trigger('data', {
9895 track: track,
9896 boxes: boxes
9897 });
9898 this.resetStream_(); // Continue with the flush process now
9899
9900 this.trigger('done', 'VideoSegmentStream');
9901 };
9902
9903 this.reset = function () {
9904 this.resetStream_();
9905 nalUnits = [];
9906 this.gopCache_.length = 0;
9907 gopsToAlignWith.length = 0;
9908 this.trigger('reset');
9909 };
9910
9911 this.resetStream_ = function () {
9912 trackDecodeInfo.clearDtsInfo(track); // reset config and pps because they may differ across segments
9913 // for instance, when we are rendition switching
9914
9915 config = undefined;
9916 pps = undefined;
9917 }; // Search for a candidate Gop for gop-fusion from the gop cache and
9918 // return it or return null if no good candidate was found
9919
9920
9921 this.getGopForFusion_ = function (nalUnit) {
9922 var halfSecond = 45000,
9923 // Half-a-second in a 90khz clock
9924 allowableOverlap = 10000,
9925 // About 3 frames @ 30fps
9926 nearestDistance = Infinity,
9927 dtsDistance,
9928 nearestGopObj,
9929 currentGop,
9930 currentGopObj,
9931 i; // Search for the GOP nearest to the beginning of this nal unit
9932
9933 for (i = 0; i < this.gopCache_.length; i++) {
9934 currentGopObj = this.gopCache_[i];
9935 currentGop = currentGopObj.gop; // Reject Gops with different SPS or PPS
9936
9937 if (!(track.pps && arrayEquals(track.pps[0], currentGopObj.pps[0])) || !(track.sps && arrayEquals(track.sps[0], currentGopObj.sps[0]))) {
9938 continue;
9939 } // Reject Gops that would require a negative baseMediaDecodeTime
9940
9941
9942 if (currentGop.dts < track.timelineStartInfo.dts) {
9943 continue;
9944 } // The distance between the end of the gop and the start of the nalUnit
9945
9946
9947 dtsDistance = nalUnit.dts - currentGop.dts - currentGop.duration; // Only consider GOPS that start before the nal unit and end within
9948 // a half-second of the nal unit
9949
9950 if (dtsDistance >= -allowableOverlap && dtsDistance <= halfSecond) {
9951 // Always use the closest GOP we found if there is more than
9952 // one candidate
9953 if (!nearestGopObj || nearestDistance > dtsDistance) {
9954 nearestGopObj = currentGopObj;
9955 nearestDistance = dtsDistance;
9956 }
9957 }
9958 }
9959
9960 if (nearestGopObj) {
9961 return nearestGopObj.gop;
9962 }
9963
9964 return null;
9965 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
9966 // of gopsToAlignWith starting from the START of the list
9967
9968
9969 this.alignGopsAtStart_ = function (gops) {
9970 var alignIndex, gopIndex, align, gop, byteLength, nalCount, duration, alignedGops;
9971 byteLength = gops.byteLength;
9972 nalCount = gops.nalCount;
9973 duration = gops.duration;
9974 alignIndex = gopIndex = 0;
9975
9976 while (alignIndex < gopsToAlignWith.length && gopIndex < gops.length) {
9977 align = gopsToAlignWith[alignIndex];
9978 gop = gops[gopIndex];
9979
9980 if (align.pts === gop.pts) {
9981 break;
9982 }
9983
9984 if (gop.pts > align.pts) {
9985 // this current gop starts after the current gop we want to align on, so increment
9986 // align index
9987 alignIndex++;
9988 continue;
9989 } // current gop starts before the current gop we want to align on. so increment gop
9990 // index
9991
9992
9993 gopIndex++;
9994 byteLength -= gop.byteLength;
9995 nalCount -= gop.nalCount;
9996 duration -= gop.duration;
9997 }
9998
9999 if (gopIndex === 0) {
10000 // no gops to trim
10001 return gops;
10002 }
10003
10004 if (gopIndex === gops.length) {
10005 // all gops trimmed, skip appending all gops
10006 return null;
10007 }
10008
10009 alignedGops = gops.slice(gopIndex);
10010 alignedGops.byteLength = byteLength;
10011 alignedGops.duration = duration;
10012 alignedGops.nalCount = nalCount;
10013 alignedGops.pts = alignedGops[0].pts;
10014 alignedGops.dts = alignedGops[0].dts;
10015 return alignedGops;
10016 }; // trim gop list to the first gop found that has a matching pts with a gop in the list
10017 // of gopsToAlignWith starting from the END of the list
10018
10019
10020 this.alignGopsAtEnd_ = function (gops) {
10021 var alignIndex, gopIndex, align, gop, alignEndIndex, matchFound;
10022 alignIndex = gopsToAlignWith.length - 1;
10023 gopIndex = gops.length - 1;
10024 alignEndIndex = null;
10025 matchFound = false;
10026
10027 while (alignIndex >= 0 && gopIndex >= 0) {
10028 align = gopsToAlignWith[alignIndex];
10029 gop = gops[gopIndex];
10030
10031 if (align.pts === gop.pts) {
10032 matchFound = true;
10033 break;
10034 }
10035
10036 if (align.pts > gop.pts) {
10037 alignIndex--;
10038 continue;
10039 }
10040
10041 if (alignIndex === gopsToAlignWith.length - 1) {
10042 // gop.pts is greater than the last alignment candidate. If no match is found
10043 // by the end of this loop, we still want to append gops that come after this
10044 // point
10045 alignEndIndex = gopIndex;
10046 }
10047
10048 gopIndex--;
10049 }
10050
10051 if (!matchFound && alignEndIndex === null) {
10052 return null;
10053 }
10054
10055 var trimIndex;
10056
10057 if (matchFound) {
10058 trimIndex = gopIndex;
10059 } else {
10060 trimIndex = alignEndIndex;
10061 }
10062
10063 if (trimIndex === 0) {
10064 return gops;
10065 }
10066
10067 var alignedGops = gops.slice(trimIndex);
10068 var metadata = alignedGops.reduce(function (total, gop) {
10069 total.byteLength += gop.byteLength;
10070 total.duration += gop.duration;
10071 total.nalCount += gop.nalCount;
10072 return total;
10073 }, {
10074 byteLength: 0,
10075 duration: 0,
10076 nalCount: 0
10077 });
10078 alignedGops.byteLength = metadata.byteLength;
10079 alignedGops.duration = metadata.duration;
10080 alignedGops.nalCount = metadata.nalCount;
10081 alignedGops.pts = alignedGops[0].pts;
10082 alignedGops.dts = alignedGops[0].dts;
10083 return alignedGops;
10084 };
10085
10086 this.alignGopsWith = function (newGopsToAlignWith) {
10087 gopsToAlignWith = newGopsToAlignWith;
10088 };
10089 };
10090
10091 _VideoSegmentStream.prototype = new stream();
10092 /**
10093 * A Stream that can combine multiple streams (ie. audio & video)
10094 * into a single output segment for MSE. Also supports audio-only
10095 * and video-only streams.
10096 * @param options {object} transmuxer options object
10097 * @param options.keepOriginalTimestamps {boolean} If true, keep the timestamps
10098 * in the source; false to adjust the first segment to start at media timeline start.
10099 */
10100
10101 _CoalesceStream = function CoalesceStream(options, metadataStream) {
10102 // Number of Tracks per output segment
10103 // If greater than 1, we combine multiple
10104 // tracks into a single segment
10105 this.numberOfTracks = 0;
10106 this.metadataStream = metadataStream;
10107 options = options || {};
10108
10109 if (typeof options.remux !== 'undefined') {
10110 this.remuxTracks = !!options.remux;
10111 } else {
10112 this.remuxTracks = true;
10113 }
10114
10115 if (typeof options.keepOriginalTimestamps === 'boolean') {
10116 this.keepOriginalTimestamps = options.keepOriginalTimestamps;
10117 } else {
10118 this.keepOriginalTimestamps = false;
10119 }
10120
10121 this.pendingTracks = [];
10122 this.videoTrack = null;
10123 this.pendingBoxes = [];
10124 this.pendingCaptions = [];
10125 this.pendingMetadata = [];
10126 this.pendingBytes = 0;
10127 this.emittedTracks = 0;
10128
10129 _CoalesceStream.prototype.init.call(this); // Take output from multiple
10130
10131
10132 this.push = function (output) {
10133 // buffer incoming captions until the associated video segment
10134 // finishes
10135 if (output.text) {
10136 return this.pendingCaptions.push(output);
10137 } // buffer incoming id3 tags until the final flush
10138
10139
10140 if (output.frames) {
10141 return this.pendingMetadata.push(output);
10142 } // Add this track to the list of pending tracks and store
10143 // important information required for the construction of
10144 // the final segment
10145
10146
10147 this.pendingTracks.push(output.track);
10148 this.pendingBytes += output.boxes.byteLength; // TODO: is there an issue for this against chrome?
10149 // We unshift audio and push video because
10150 // as of Chrome 75 when switching from
10151 // one init segment to another if the video
10152 // mdat does not appear after the audio mdat
10153 // only audio will play for the duration of our transmux.
10154
10155 if (output.track.type === 'video') {
10156 this.videoTrack = output.track;
10157 this.pendingBoxes.push(output.boxes);
10158 }
10159
10160 if (output.track.type === 'audio') {
10161 this.audioTrack = output.track;
10162 this.pendingBoxes.unshift(output.boxes);
10163 }
10164 };
10165 };
10166
10167 _CoalesceStream.prototype = new stream();
10168
10169 _CoalesceStream.prototype.flush = function (flushSource) {
10170 var offset = 0,
10171 event = {
10172 captions: [],
10173 captionStreams: {},
10174 metadata: [],
10175 info: {}
10176 },
10177 caption,
10178 id3,
10179 initSegment,
10180 timelineStartPts = 0,
10181 i;
10182
10183 if (this.pendingTracks.length < this.numberOfTracks) {
10184 if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') {
10185 // Return because we haven't received a flush from a data-generating
10186 // portion of the segment (meaning that we have only recieved meta-data
10187 // or captions.)
10188 return;
10189 } else if (this.remuxTracks) {
10190 // Return until we have enough tracks from the pipeline to remux (if we
10191 // are remuxing audio and video into a single MP4)
10192 return;
10193 } else if (this.pendingTracks.length === 0) {
10194 // In the case where we receive a flush without any data having been
10195 // received we consider it an emitted track for the purposes of coalescing
10196 // `done` events.
10197 // We do this for the case where there is an audio and video track in the
10198 // segment but no audio data. (seen in several playlists with alternate
10199 // audio tracks and no audio present in the main TS segments.)
10200 this.emittedTracks++;
10201
10202 if (this.emittedTracks >= this.numberOfTracks) {
10203 this.trigger('done');
10204 this.emittedTracks = 0;
10205 }
10206
10207 return;
10208 }
10209 }
10210
10211 if (this.videoTrack) {
10212 timelineStartPts = this.videoTrack.timelineStartInfo.pts;
10213 videoProperties.forEach(function (prop) {
10214 event.info[prop] = this.videoTrack[prop];
10215 }, this);
10216 } else if (this.audioTrack) {
10217 timelineStartPts = this.audioTrack.timelineStartInfo.pts;
10218 audioProperties.forEach(function (prop) {
10219 event.info[prop] = this.audioTrack[prop];
10220 }, this);
10221 }
10222
10223 if (this.videoTrack || this.audioTrack) {
10224 if (this.pendingTracks.length === 1) {
10225 event.type = this.pendingTracks[0].type;
10226 } else {
10227 event.type = 'combined';
10228 }
10229
10230 this.emittedTracks += this.pendingTracks.length;
10231 initSegment = mp4Generator.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment
10232
10233 event.initSegment = new Uint8Array(initSegment.byteLength); // Create an init segment containing a moov
10234 // and track definitions
10235
10236 event.initSegment.set(initSegment); // Create a new typed array to hold the moof+mdats
10237
10238 event.data = new Uint8Array(this.pendingBytes); // Append each moof+mdat (one per track) together
10239
10240 for (i = 0; i < this.pendingBoxes.length; i++) {
10241 event.data.set(this.pendingBoxes[i], offset);
10242 offset += this.pendingBoxes[i].byteLength;
10243 } // Translate caption PTS times into second offsets to match the
10244 // video timeline for the segment, and add track info
10245
10246
10247 for (i = 0; i < this.pendingCaptions.length; i++) {
10248 caption = this.pendingCaptions[i];
10249 caption.startTime = clock.metadataTsToSeconds(caption.startPts, timelineStartPts, this.keepOriginalTimestamps);
10250 caption.endTime = clock.metadataTsToSeconds(caption.endPts, timelineStartPts, this.keepOriginalTimestamps);
10251 event.captionStreams[caption.stream] = true;
10252 event.captions.push(caption);
10253 } // Translate ID3 frame PTS times into second offsets to match the
10254 // video timeline for the segment
10255
10256
10257 for (i = 0; i < this.pendingMetadata.length; i++) {
10258 id3 = this.pendingMetadata[i];
10259 id3.cueTime = clock.metadataTsToSeconds(id3.pts, timelineStartPts, this.keepOriginalTimestamps);
10260 event.metadata.push(id3);
10261 } // We add this to every single emitted segment even though we only need
10262 // it for the first
10263
10264
10265 event.metadata.dispatchType = this.metadataStream.dispatchType; // Reset stream state
10266
10267 this.pendingTracks.length = 0;
10268 this.videoTrack = null;
10269 this.pendingBoxes.length = 0;
10270 this.pendingCaptions.length = 0;
10271 this.pendingBytes = 0;
10272 this.pendingMetadata.length = 0; // Emit the built segment
10273 // We include captions and ID3 tags for backwards compatibility,
10274 // ideally we should send only video and audio in the data event
10275
10276 this.trigger('data', event); // Emit each caption to the outside world
10277 // Ideally, this would happen immediately on parsing captions,
10278 // but we need to ensure that video data is sent back first
10279 // so that caption timing can be adjusted to match video timing
10280
10281 for (i = 0; i < event.captions.length; i++) {
10282 caption = event.captions[i];
10283 this.trigger('caption', caption);
10284 } // Emit each id3 tag to the outside world
10285 // Ideally, this would happen immediately on parsing the tag,
10286 // but we need to ensure that video data is sent back first
10287 // so that ID3 frame timing can be adjusted to match video timing
10288
10289
10290 for (i = 0; i < event.metadata.length; i++) {
10291 id3 = event.metadata[i];
10292 this.trigger('id3Frame', id3);
10293 }
10294 } // Only emit `done` if all tracks have been flushed and emitted
10295
10296
10297 if (this.emittedTracks >= this.numberOfTracks) {
10298 this.trigger('done');
10299 this.emittedTracks = 0;
10300 }
10301 };
10302
10303 _CoalesceStream.prototype.setRemux = function (val) {
10304 this.remuxTracks = val;
10305 };
10306 /**
10307 * A Stream that expects MP2T binary data as input and produces
10308 * corresponding media segments, suitable for use with Media Source
10309 * Extension (MSE) implementations that support the ISO BMFF byte
10310 * stream format, like Chrome.
10311 */
10312
10313
10314 _Transmuxer = function Transmuxer(options) {
10315 var self = this,
10316 hasFlushed = true,
10317 videoTrack,
10318 audioTrack;
10319
10320 _Transmuxer.prototype.init.call(this);
10321
10322 options = options || {};
10323 this.baseMediaDecodeTime = options.baseMediaDecodeTime || 0;
10324 this.transmuxPipeline_ = {};
10325
10326 this.setupAacPipeline = function () {
10327 var pipeline = {};
10328 this.transmuxPipeline_ = pipeline;
10329 pipeline.type = 'aac';
10330 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10331
10332 pipeline.aacStream = new aac();
10333 pipeline.audioTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('audio');
10334 pipeline.timedMetadataTimestampRolloverStream = new m2ts_1.TimestampRolloverStream('timed-metadata');
10335 pipeline.adtsStream = new adts();
10336 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10337 pipeline.headOfPipeline = pipeline.aacStream;
10338 pipeline.aacStream.pipe(pipeline.audioTimestampRolloverStream).pipe(pipeline.adtsStream);
10339 pipeline.aacStream.pipe(pipeline.timedMetadataTimestampRolloverStream).pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream);
10340 pipeline.metadataStream.on('timestamp', function (frame) {
10341 pipeline.aacStream.setTimestamp(frame.timeStamp);
10342 });
10343 pipeline.aacStream.on('data', function (data) {
10344 if (data.type !== 'timed-metadata' && data.type !== 'audio' || pipeline.audioSegmentStream) {
10345 return;
10346 }
10347
10348 audioTrack = audioTrack || {
10349 timelineStartInfo: {
10350 baseMediaDecodeTime: self.baseMediaDecodeTime
10351 },
10352 codec: 'adts',
10353 type: 'audio'
10354 }; // hook up the audio segment stream to the first track with aac data
10355
10356 pipeline.coalesceStream.numberOfTracks++;
10357 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10358 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10359 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo')); // Set up the final part of the audio pipeline
10360
10361 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream); // emit pmt info
10362
10363 self.trigger('trackinfo', {
10364 hasAudio: !!audioTrack,
10365 hasVideo: !!videoTrack
10366 });
10367 }); // Re-emit any data coming from the coalesce stream to the outside world
10368
10369 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data')); // Let the consumer know we have finished flushing the entire pipeline
10370
10371 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10372 addPipelineLogRetriggers(this, pipeline);
10373 };
10374
10375 this.setupTsPipeline = function () {
10376 var pipeline = {};
10377 this.transmuxPipeline_ = pipeline;
10378 pipeline.type = 'ts';
10379 pipeline.metadataStream = new m2ts_1.MetadataStream(); // set up the parsing pipeline
10380
10381 pipeline.packetStream = new m2ts_1.TransportPacketStream();
10382 pipeline.parseStream = new m2ts_1.TransportParseStream();
10383 pipeline.elementaryStream = new m2ts_1.ElementaryStream();
10384 pipeline.timestampRolloverStream = new m2ts_1.TimestampRolloverStream();
10385 pipeline.adtsStream = new adts();
10386 pipeline.h264Stream = new H264Stream();
10387 pipeline.captionStream = new m2ts_1.CaptionStream(options);
10388 pipeline.coalesceStream = new _CoalesceStream(options, pipeline.metadataStream);
10389 pipeline.headOfPipeline = pipeline.packetStream; // disassemble MPEG2-TS packets into elementary streams
10390
10391 pipeline.packetStream.pipe(pipeline.parseStream).pipe(pipeline.elementaryStream).pipe(pipeline.timestampRolloverStream); // !!THIS ORDER IS IMPORTANT!!
10392 // demux the streams
10393
10394 pipeline.timestampRolloverStream.pipe(pipeline.h264Stream);
10395 pipeline.timestampRolloverStream.pipe(pipeline.adtsStream);
10396 pipeline.timestampRolloverStream.pipe(pipeline.metadataStream).pipe(pipeline.coalesceStream); // Hook up CEA-608/708 caption stream
10397
10398 pipeline.h264Stream.pipe(pipeline.captionStream).pipe(pipeline.coalesceStream);
10399 pipeline.elementaryStream.on('data', function (data) {
10400 var i;
10401
10402 if (data.type === 'metadata') {
10403 i = data.tracks.length; // scan the tracks listed in the metadata
10404
10405 while (i--) {
10406 if (!videoTrack && data.tracks[i].type === 'video') {
10407 videoTrack = data.tracks[i];
10408 videoTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10409 } else if (!audioTrack && data.tracks[i].type === 'audio') {
10410 audioTrack = data.tracks[i];
10411 audioTrack.timelineStartInfo.baseMediaDecodeTime = self.baseMediaDecodeTime;
10412 }
10413 } // hook up the video segment stream to the first track with h264 data
10414
10415
10416 if (videoTrack && !pipeline.videoSegmentStream) {
10417 pipeline.coalesceStream.numberOfTracks++;
10418 pipeline.videoSegmentStream = new _VideoSegmentStream(videoTrack, options);
10419 pipeline.videoSegmentStream.on('log', self.getLogTrigger_('videoSegmentStream'));
10420 pipeline.videoSegmentStream.on('timelineStartInfo', function (timelineStartInfo) {
10421 // When video emits timelineStartInfo data after a flush, we forward that
10422 // info to the AudioSegmentStream, if it exists, because video timeline
10423 // data takes precedence. Do not do this if keepOriginalTimestamps is set,
10424 // because this is a particularly subtle form of timestamp alteration.
10425 if (audioTrack && !options.keepOriginalTimestamps) {
10426 audioTrack.timelineStartInfo = timelineStartInfo; // On the first segment we trim AAC frames that exist before the
10427 // very earliest DTS we have seen in video because Chrome will
10428 // interpret any video track with a baseMediaDecodeTime that is
10429 // non-zero as a gap.
10430
10431 pipeline.audioSegmentStream.setEarliestDts(timelineStartInfo.dts - self.baseMediaDecodeTime);
10432 }
10433 });
10434 pipeline.videoSegmentStream.on('processedGopsInfo', self.trigger.bind(self, 'gopInfo'));
10435 pipeline.videoSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'videoSegmentTimingInfo'));
10436 pipeline.videoSegmentStream.on('baseMediaDecodeTime', function (baseMediaDecodeTime) {
10437 if (audioTrack) {
10438 pipeline.audioSegmentStream.setVideoBaseMediaDecodeTime(baseMediaDecodeTime);
10439 }
10440 });
10441 pipeline.videoSegmentStream.on('timingInfo', self.trigger.bind(self, 'videoTimingInfo')); // Set up the final part of the video pipeline
10442
10443 pipeline.h264Stream.pipe(pipeline.videoSegmentStream).pipe(pipeline.coalesceStream);
10444 }
10445
10446 if (audioTrack && !pipeline.audioSegmentStream) {
10447 // hook up the audio segment stream to the first track with aac data
10448 pipeline.coalesceStream.numberOfTracks++;
10449 pipeline.audioSegmentStream = new _AudioSegmentStream(audioTrack, options);
10450 pipeline.audioSegmentStream.on('log', self.getLogTrigger_('audioSegmentStream'));
10451 pipeline.audioSegmentStream.on('timingInfo', self.trigger.bind(self, 'audioTimingInfo'));
10452 pipeline.audioSegmentStream.on('segmentTimingInfo', self.trigger.bind(self, 'audioSegmentTimingInfo')); // Set up the final part of the audio pipeline
10453
10454 pipeline.adtsStream.pipe(pipeline.audioSegmentStream).pipe(pipeline.coalesceStream);
10455 } // emit pmt info
10456
10457
10458 self.trigger('trackinfo', {
10459 hasAudio: !!audioTrack,
10460 hasVideo: !!videoTrack
10461 });
10462 }
10463 }); // Re-emit any data coming from the coalesce stream to the outside world
10464
10465 pipeline.coalesceStream.on('data', this.trigger.bind(this, 'data'));
10466 pipeline.coalesceStream.on('id3Frame', function (id3Frame) {
10467 id3Frame.dispatchType = pipeline.metadataStream.dispatchType;
10468 self.trigger('id3Frame', id3Frame);
10469 });
10470 pipeline.coalesceStream.on('caption', this.trigger.bind(this, 'caption')); // Let the consumer know we have finished flushing the entire pipeline
10471
10472 pipeline.coalesceStream.on('done', this.trigger.bind(this, 'done'));
10473 addPipelineLogRetriggers(this, pipeline);
10474 }; // hook up the segment streams once track metadata is delivered
10475
10476
10477 this.setBaseMediaDecodeTime = function (baseMediaDecodeTime) {
10478 var pipeline = this.transmuxPipeline_;
10479
10480 if (!options.keepOriginalTimestamps) {
10481 this.baseMediaDecodeTime = baseMediaDecodeTime;
10482 }
10483
10484 if (audioTrack) {
10485 audioTrack.timelineStartInfo.dts = undefined;
10486 audioTrack.timelineStartInfo.pts = undefined;
10487 trackDecodeInfo.clearDtsInfo(audioTrack);
10488
10489 if (pipeline.audioTimestampRolloverStream) {
10490 pipeline.audioTimestampRolloverStream.discontinuity();
10491 }
10492 }
10493
10494 if (videoTrack) {
10495 if (pipeline.videoSegmentStream) {
10496 pipeline.videoSegmentStream.gopCache_ = [];
10497 }
10498
10499 videoTrack.timelineStartInfo.dts = undefined;
10500 videoTrack.timelineStartInfo.pts = undefined;
10501 trackDecodeInfo.clearDtsInfo(videoTrack);
10502 pipeline.captionStream.reset();
10503 }
10504
10505 if (pipeline.timestampRolloverStream) {
10506 pipeline.timestampRolloverStream.discontinuity();
10507 }
10508 };
10509
10510 this.setAudioAppendStart = function (timestamp) {
10511 if (audioTrack) {
10512 this.transmuxPipeline_.audioSegmentStream.setAudioAppendStart(timestamp);
10513 }
10514 };
10515
10516 this.setRemux = function (val) {
10517 var pipeline = this.transmuxPipeline_;
10518 options.remux = val;
10519
10520 if (pipeline && pipeline.coalesceStream) {
10521 pipeline.coalesceStream.setRemux(val);
10522 }
10523 };
10524
10525 this.alignGopsWith = function (gopsToAlignWith) {
10526 if (videoTrack && this.transmuxPipeline_.videoSegmentStream) {
10527 this.transmuxPipeline_.videoSegmentStream.alignGopsWith(gopsToAlignWith);
10528 }
10529 };
10530
10531 this.getLogTrigger_ = function (key) {
10532 var self = this;
10533 return function (event) {
10534 event.stream = key;
10535 self.trigger('log', event);
10536 };
10537 }; // feed incoming data to the front of the parsing pipeline
10538
10539
10540 this.push = function (data) {
10541 if (hasFlushed) {
10542 var isAac = isLikelyAacData(data);
10543
10544 if (isAac && this.transmuxPipeline_.type !== 'aac') {
10545 this.setupAacPipeline();
10546 } else if (!isAac && this.transmuxPipeline_.type !== 'ts') {
10547 this.setupTsPipeline();
10548 }
10549
10550 hasFlushed = false;
10551 }
10552
10553 this.transmuxPipeline_.headOfPipeline.push(data);
10554 }; // flush any buffered data
10555
10556
10557 this.flush = function () {
10558 hasFlushed = true; // Start at the top of the pipeline and flush all pending work
10559
10560 this.transmuxPipeline_.headOfPipeline.flush();
10561 };
10562
10563 this.endTimeline = function () {
10564 this.transmuxPipeline_.headOfPipeline.endTimeline();
10565 };
10566
10567 this.reset = function () {
10568 if (this.transmuxPipeline_.headOfPipeline) {
10569 this.transmuxPipeline_.headOfPipeline.reset();
10570 }
10571 }; // Caption data has to be reset when seeking outside buffered range
10572
10573
10574 this.resetCaptions = function () {
10575 if (this.transmuxPipeline_.captionStream) {
10576 this.transmuxPipeline_.captionStream.reset();
10577 }
10578 };
10579 };
10580
10581 _Transmuxer.prototype = new stream();
10582 var transmuxer = {
10583 Transmuxer: _Transmuxer,
10584 VideoSegmentStream: _VideoSegmentStream,
10585 AudioSegmentStream: _AudioSegmentStream,
10586 AUDIO_PROPERTIES: audioProperties,
10587 VIDEO_PROPERTIES: videoProperties,
10588 // exported for testing
10589 generateSegmentTimingInfo: generateSegmentTimingInfo
10590 };
10591 /**
10592 * mux.js
10593 *
10594 * Copyright (c) Brightcove
10595 * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
10596 */
10597
10598 var toUnsigned$3 = function toUnsigned(value) {
10599 return value >>> 0;
10600 };
10601
10602 var toHexString$1 = function toHexString(value) {
10603 return ('00' + value.toString(16)).slice(-2);
10604 };
10605
10606 var bin = {
10607 toUnsigned: toUnsigned$3,
10608 toHexString: toHexString$1
10609 };
10610
10611 var parseType$1 = function parseType(buffer) {
10612 var result = '';
10613 result += String.fromCharCode(buffer[0]);
10614 result += String.fromCharCode(buffer[1]);
10615 result += String.fromCharCode(buffer[2]);
10616 result += String.fromCharCode(buffer[3]);
10617 return result;
10618 };
10619
10620 var parseType_1 = parseType$1;
10621 var toUnsigned$2 = bin.toUnsigned;
10622
10623 var findBox = function findBox(data, path) {
10624 var results = [],
10625 i,
10626 size,
10627 type,
10628 end,
10629 subresults;
10630
10631 if (!path.length) {
10632 // short-circuit the search for empty paths
10633 return null;
10634 }
10635
10636 for (i = 0; i < data.byteLength;) {
10637 size = toUnsigned$2(data[i] << 24 | data[i + 1] << 16 | data[i + 2] << 8 | data[i + 3]);
10638 type = parseType_1(data.subarray(i + 4, i + 8));
10639 end = size > 1 ? i + size : data.byteLength;
10640
10641 if (type === path[0]) {
10642 if (path.length === 1) {
10643 // this is the end of the path and we've found the box we were
10644 // looking for
10645 results.push(data.subarray(i + 8, end));
10646 } else {
10647 // recursively search for the next box along the path
10648 subresults = findBox(data.subarray(i + 8, end), path.slice(1));
10649
10650 if (subresults.length) {
10651 results = results.concat(subresults);
10652 }
10653 }
10654 }
10655
10656 i = end;
10657 } // we've finished searching all of data
10658
10659
10660 return results;
10661 };
10662
10663 var findBox_1 = findBox;
10664 var toUnsigned$1 = bin.toUnsigned;
10665
10666 var tfdt = function tfdt(data) {
10667 var result = {
10668 version: data[0],
10669 flags: new Uint8Array(data.subarray(1, 4)),
10670 baseMediaDecodeTime: toUnsigned$1(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
10671 };
10672
10673 if (result.version === 1) {
10674 result.baseMediaDecodeTime *= Math.pow(2, 32);
10675 result.baseMediaDecodeTime += toUnsigned$1(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
10676 }
10677
10678 return result;
10679 };
10680
10681 var parseTfdt = tfdt;
10682
10683 var parseSampleFlags = function parseSampleFlags(flags) {
10684 return {
10685 isLeading: (flags[0] & 0x0c) >>> 2,
10686 dependsOn: flags[0] & 0x03,
10687 isDependedOn: (flags[1] & 0xc0) >>> 6,
10688 hasRedundancy: (flags[1] & 0x30) >>> 4,
10689 paddingValue: (flags[1] & 0x0e) >>> 1,
10690 isNonSyncSample: flags[1] & 0x01,
10691 degradationPriority: flags[2] << 8 | flags[3]
10692 };
10693 };
10694
10695 var parseSampleFlags_1 = parseSampleFlags;
10696
10697 var trun = function trun(data) {
10698 var result = {
10699 version: data[0],
10700 flags: new Uint8Array(data.subarray(1, 4)),
10701 samples: []
10702 },
10703 view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10704 // Flag interpretation
10705 dataOffsetPresent = result.flags[2] & 0x01,
10706 // compare with 2nd byte of 0x1
10707 firstSampleFlagsPresent = result.flags[2] & 0x04,
10708 // compare with 2nd byte of 0x4
10709 sampleDurationPresent = result.flags[1] & 0x01,
10710 // compare with 2nd byte of 0x100
10711 sampleSizePresent = result.flags[1] & 0x02,
10712 // compare with 2nd byte of 0x200
10713 sampleFlagsPresent = result.flags[1] & 0x04,
10714 // compare with 2nd byte of 0x400
10715 sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
10716 // compare with 2nd byte of 0x800
10717 sampleCount = view.getUint32(4),
10718 offset = 8,
10719 sample;
10720
10721 if (dataOffsetPresent) {
10722 // 32 bit signed integer
10723 result.dataOffset = view.getInt32(offset);
10724 offset += 4;
10725 } // Overrides the flags for the first sample only. The order of
10726 // optional values will be: duration, size, compositionTimeOffset
10727
10728
10729 if (firstSampleFlagsPresent && sampleCount) {
10730 sample = {
10731 flags: parseSampleFlags_1(data.subarray(offset, offset + 4))
10732 };
10733 offset += 4;
10734
10735 if (sampleDurationPresent) {
10736 sample.duration = view.getUint32(offset);
10737 offset += 4;
10738 }
10739
10740 if (sampleSizePresent) {
10741 sample.size = view.getUint32(offset);
10742 offset += 4;
10743 }
10744
10745 if (sampleCompositionTimeOffsetPresent) {
10746 if (result.version === 1) {
10747 sample.compositionTimeOffset = view.getInt32(offset);
10748 } else {
10749 sample.compositionTimeOffset = view.getUint32(offset);
10750 }
10751
10752 offset += 4;
10753 }
10754
10755 result.samples.push(sample);
10756 sampleCount--;
10757 }
10758
10759 while (sampleCount--) {
10760 sample = {};
10761
10762 if (sampleDurationPresent) {
10763 sample.duration = view.getUint32(offset);
10764 offset += 4;
10765 }
10766
10767 if (sampleSizePresent) {
10768 sample.size = view.getUint32(offset);
10769 offset += 4;
10770 }
10771
10772 if (sampleFlagsPresent) {
10773 sample.flags = parseSampleFlags_1(data.subarray(offset, offset + 4));
10774 offset += 4;
10775 }
10776
10777 if (sampleCompositionTimeOffsetPresent) {
10778 if (result.version === 1) {
10779 sample.compositionTimeOffset = view.getInt32(offset);
10780 } else {
10781 sample.compositionTimeOffset = view.getUint32(offset);
10782 }
10783
10784 offset += 4;
10785 }
10786
10787 result.samples.push(sample);
10788 }
10789
10790 return result;
10791 };
10792
10793 var parseTrun = trun;
10794
10795 var tfhd = function tfhd(data) {
10796 var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
10797 result = {
10798 version: data[0],
10799 flags: new Uint8Array(data.subarray(1, 4)),
10800 trackId: view.getUint32(4)
10801 },
10802 baseDataOffsetPresent = result.flags[2] & 0x01,
10803 sampleDescriptionIndexPresent = result.flags[2] & 0x02,
10804 defaultSampleDurationPresent = result.flags[2] & 0x08,
10805 defaultSampleSizePresent = result.flags[2] & 0x10,
10806 defaultSampleFlagsPresent = result.flags[2] & 0x20,
10807 durationIsEmpty = result.flags[0] & 0x010000,
10808 defaultBaseIsMoof = result.flags[0] & 0x020000,
10809 i;
10810 i = 8;
10811
10812 if (baseDataOffsetPresent) {
10813 i += 4; // truncate top 4 bytes
10814 // FIXME: should we read the full 64 bits?
10815
10816 result.baseDataOffset = view.getUint32(12);
10817 i += 4;
10818 }
10819
10820 if (sampleDescriptionIndexPresent) {
10821 result.sampleDescriptionIndex = view.getUint32(i);
10822 i += 4;
10823 }
10824
10825 if (defaultSampleDurationPresent) {
10826 result.defaultSampleDuration = view.getUint32(i);
10827 i += 4;
10828 }
10829
10830 if (defaultSampleSizePresent) {
10831 result.defaultSampleSize = view.getUint32(i);
10832 i += 4;
10833 }
10834
10835 if (defaultSampleFlagsPresent) {
10836 result.defaultSampleFlags = view.getUint32(i);
10837 }
10838
10839 if (durationIsEmpty) {
10840 result.durationIsEmpty = true;
10841 }
10842
10843 if (!baseDataOffsetPresent && defaultBaseIsMoof) {
10844 result.baseDataOffsetIsMoof = true;
10845 }
10846
10847 return result;
10848 };
10849
10850 var parseTfhd = tfhd;
10851 var discardEmulationPreventionBytes = captionPacketParser.discardEmulationPreventionBytes;
10852 var CaptionStream = captionStream.CaptionStream;
10853 /**
10854 * Maps an offset in the mdat to a sample based on the the size of the samples.
10855 * Assumes that `parseSamples` has been called first.
10856 *
10857 * @param {Number} offset - The offset into the mdat
10858 * @param {Object[]} samples - An array of samples, parsed using `parseSamples`
10859 * @return {?Object} The matching sample, or null if no match was found.
10860 *
10861 * @see ISO-BMFF-12/2015, Section 8.8.8
10862 **/
10863
10864 var mapToSample = function mapToSample(offset, samples) {
10865 var approximateOffset = offset;
10866
10867 for (var i = 0; i < samples.length; i++) {
10868 var sample = samples[i];
10869
10870 if (approximateOffset < sample.size) {
10871 return sample;
10872 }
10873
10874 approximateOffset -= sample.size;
10875 }
10876
10877 return null;
10878 };
10879 /**
10880 * Finds SEI nal units contained in a Media Data Box.
10881 * Assumes that `parseSamples` has been called first.
10882 *
10883 * @param {Uint8Array} avcStream - The bytes of the mdat
10884 * @param {Object[]} samples - The samples parsed out by `parseSamples`
10885 * @param {Number} trackId - The trackId of this video track
10886 * @return {Object[]} seiNals - the parsed SEI NALUs found.
10887 * The contents of the seiNal should match what is expected by
10888 * CaptionStream.push (nalUnitType, size, data, escapedRBSP, pts, dts)
10889 *
10890 * @see ISO-BMFF-12/2015, Section 8.1.1
10891 * @see Rec. ITU-T H.264, 7.3.2.3.1
10892 **/
10893
10894
10895 var findSeiNals = function findSeiNals(avcStream, samples, trackId) {
10896 var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
10897 result = {
10898 logs: [],
10899 seiNals: []
10900 },
10901 seiNal,
10902 i,
10903 length,
10904 lastMatchedSample;
10905
10906 for (i = 0; i + 4 < avcStream.length; i += length) {
10907 length = avcView.getUint32(i);
10908 i += 4; // Bail if this doesn't appear to be an H264 stream
10909
10910 if (length <= 0) {
10911 continue;
10912 }
10913
10914 switch (avcStream[i] & 0x1F) {
10915 case 0x06:
10916 var data = avcStream.subarray(i + 1, i + 1 + length);
10917 var matchingSample = mapToSample(i, samples);
10918 seiNal = {
10919 nalUnitType: 'sei_rbsp',
10920 size: length,
10921 data: data,
10922 escapedRBSP: discardEmulationPreventionBytes(data),
10923 trackId: trackId
10924 };
10925
10926 if (matchingSample) {
10927 seiNal.pts = matchingSample.pts;
10928 seiNal.dts = matchingSample.dts;
10929 lastMatchedSample = matchingSample;
10930 } else if (lastMatchedSample) {
10931 // If a matching sample cannot be found, use the last
10932 // sample's values as they should be as close as possible
10933 seiNal.pts = lastMatchedSample.pts;
10934 seiNal.dts = lastMatchedSample.dts;
10935 } else {
10936 result.logs.push({
10937 level: 'warn',
10938 message: 'We\'ve encountered a nal unit without data at ' + i + ' for trackId ' + trackId + '. See mux.js#223.'
10939 });
10940 break;
10941 }
10942
10943 result.seiNals.push(seiNal);
10944 break;
10945 }
10946 }
10947
10948 return result;
10949 };
10950 /**
10951 * Parses sample information out of Track Run Boxes and calculates
10952 * the absolute presentation and decode timestamps of each sample.
10953 *
10954 * @param {Array<Uint8Array>} truns - The Trun Run boxes to be parsed
10955 * @param {Number} baseMediaDecodeTime - base media decode time from tfdt
10956 @see ISO-BMFF-12/2015, Section 8.8.12
10957 * @param {Object} tfhd - The parsed Track Fragment Header
10958 * @see inspect.parseTfhd
10959 * @return {Object[]} the parsed samples
10960 *
10961 * @see ISO-BMFF-12/2015, Section 8.8.8
10962 **/
10963
10964
10965 var parseSamples = function parseSamples(truns, baseMediaDecodeTime, tfhd) {
10966 var currentDts = baseMediaDecodeTime;
10967 var defaultSampleDuration = tfhd.defaultSampleDuration || 0;
10968 var defaultSampleSize = tfhd.defaultSampleSize || 0;
10969 var trackId = tfhd.trackId;
10970 var allSamples = [];
10971 truns.forEach(function (trun) {
10972 // Note: We currently do not parse the sample table as well
10973 // as the trun. It's possible some sources will require this.
10974 // moov > trak > mdia > minf > stbl
10975 var trackRun = parseTrun(trun);
10976 var samples = trackRun.samples;
10977 samples.forEach(function (sample) {
10978 if (sample.duration === undefined) {
10979 sample.duration = defaultSampleDuration;
10980 }
10981
10982 if (sample.size === undefined) {
10983 sample.size = defaultSampleSize;
10984 }
10985
10986 sample.trackId = trackId;
10987 sample.dts = currentDts;
10988
10989 if (sample.compositionTimeOffset === undefined) {
10990 sample.compositionTimeOffset = 0;
10991 }
10992
10993 sample.pts = currentDts + sample.compositionTimeOffset;
10994 currentDts += sample.duration;
10995 });
10996 allSamples = allSamples.concat(samples);
10997 });
10998 return allSamples;
10999 };
11000 /**
11001 * Parses out caption nals from an FMP4 segment's video tracks.
11002 *
11003 * @param {Uint8Array} segment - The bytes of a single segment
11004 * @param {Number} videoTrackId - The trackId of a video track in the segment
11005 * @return {Object.<Number, Object[]>} A mapping of video trackId to
11006 * a list of seiNals found in that track
11007 **/
11008
11009
11010 var parseCaptionNals = function parseCaptionNals(segment, videoTrackId) {
11011 // To get the samples
11012 var trafs = findBox_1(segment, ['moof', 'traf']); // To get SEI NAL units
11013
11014 var mdats = findBox_1(segment, ['mdat']);
11015 var captionNals = {};
11016 var mdatTrafPairs = []; // Pair up each traf with a mdat as moofs and mdats are in pairs
11017
11018 mdats.forEach(function (mdat, index) {
11019 var matchingTraf = trafs[index];
11020 mdatTrafPairs.push({
11021 mdat: mdat,
11022 traf: matchingTraf
11023 });
11024 });
11025 mdatTrafPairs.forEach(function (pair) {
11026 var mdat = pair.mdat;
11027 var traf = pair.traf;
11028 var tfhd = findBox_1(traf, ['tfhd']); // Exactly 1 tfhd per traf
11029
11030 var headerInfo = parseTfhd(tfhd[0]);
11031 var trackId = headerInfo.trackId;
11032 var tfdt = findBox_1(traf, ['tfdt']); // Either 0 or 1 tfdt per traf
11033
11034 var baseMediaDecodeTime = tfdt.length > 0 ? parseTfdt(tfdt[0]).baseMediaDecodeTime : 0;
11035 var truns = findBox_1(traf, ['trun']);
11036 var samples;
11037 var result; // Only parse video data for the chosen video track
11038
11039 if (videoTrackId === trackId && truns.length > 0) {
11040 samples = parseSamples(truns, baseMediaDecodeTime, headerInfo);
11041 result = findSeiNals(mdat, samples, trackId);
11042
11043 if (!captionNals[trackId]) {
11044 captionNals[trackId] = {
11045 seiNals: [],
11046 logs: []
11047 };
11048 }
11049
11050 captionNals[trackId].seiNals = captionNals[trackId].seiNals.concat(result.seiNals);
11051 captionNals[trackId].logs = captionNals[trackId].logs.concat(result.logs);
11052 }
11053 });
11054 return captionNals;
11055 };
11056 /**
11057 * Parses out inband captions from an MP4 container and returns
11058 * caption objects that can be used by WebVTT and the TextTrack API.
11059 * @see https://developer.mozilla.org/en-US/docs/Web/API/VTTCue
11060 * @see https://developer.mozilla.org/en-US/docs/Web/API/TextTrack
11061 * Assumes that `probe.getVideoTrackIds` and `probe.timescale` have been called first
11062 *
11063 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11064 * @param {Number} trackId - The id of the video track to parse
11065 * @param {Number} timescale - The timescale for the video track from the init segment
11066 *
11067 * @return {?Object[]} parsedCaptions - A list of captions or null if no video tracks
11068 * @return {Number} parsedCaptions[].startTime - The time to show the caption in seconds
11069 * @return {Number} parsedCaptions[].endTime - The time to stop showing the caption in seconds
11070 * @return {String} parsedCaptions[].text - The visible content of the caption
11071 **/
11072
11073
11074 var parseEmbeddedCaptions = function parseEmbeddedCaptions(segment, trackId, timescale) {
11075 var captionNals; // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11076
11077 if (trackId === null) {
11078 return null;
11079 }
11080
11081 captionNals = parseCaptionNals(segment, trackId);
11082 var trackNals = captionNals[trackId] || {};
11083 return {
11084 seiNals: trackNals.seiNals,
11085 logs: trackNals.logs,
11086 timescale: timescale
11087 };
11088 };
11089 /**
11090 * Converts SEI NALUs into captions that can be used by video.js
11091 **/
11092
11093
11094 var CaptionParser = function CaptionParser() {
11095 var isInitialized = false;
11096 var captionStream; // Stores segments seen before trackId and timescale are set
11097
11098 var segmentCache; // Stores video track ID of the track being parsed
11099
11100 var trackId; // Stores the timescale of the track being parsed
11101
11102 var timescale; // Stores captions parsed so far
11103
11104 var parsedCaptions; // Stores whether we are receiving partial data or not
11105
11106 var parsingPartial;
11107 /**
11108 * A method to indicate whether a CaptionParser has been initalized
11109 * @returns {Boolean}
11110 **/
11111
11112 this.isInitialized = function () {
11113 return isInitialized;
11114 };
11115 /**
11116 * Initializes the underlying CaptionStream, SEI NAL parsing
11117 * and management, and caption collection
11118 **/
11119
11120
11121 this.init = function (options) {
11122 captionStream = new CaptionStream();
11123 isInitialized = true;
11124 parsingPartial = options ? options.isPartial : false; // Collect dispatched captions
11125
11126 captionStream.on('data', function (event) {
11127 // Convert to seconds in the source's timescale
11128 event.startTime = event.startPts / timescale;
11129 event.endTime = event.endPts / timescale;
11130 parsedCaptions.captions.push(event);
11131 parsedCaptions.captionStreams[event.stream] = true;
11132 });
11133 captionStream.on('log', function (log) {
11134 parsedCaptions.logs.push(log);
11135 });
11136 };
11137 /**
11138 * Determines if a new video track will be selected
11139 * or if the timescale changed
11140 * @return {Boolean}
11141 **/
11142
11143
11144 this.isNewInit = function (videoTrackIds, timescales) {
11145 if (videoTrackIds && videoTrackIds.length === 0 || timescales && typeof timescales === 'object' && Object.keys(timescales).length === 0) {
11146 return false;
11147 }
11148
11149 return trackId !== videoTrackIds[0] || timescale !== timescales[trackId];
11150 };
11151 /**
11152 * Parses out SEI captions and interacts with underlying
11153 * CaptionStream to return dispatched captions
11154 *
11155 * @param {Uint8Array} segment - The fmp4 segment containing embedded captions
11156 * @param {Number[]} videoTrackIds - A list of video tracks found in the init segment
11157 * @param {Object.<Number, Number>} timescales - The timescales found in the init segment
11158 * @see parseEmbeddedCaptions
11159 * @see m2ts/caption-stream.js
11160 **/
11161
11162
11163 this.parse = function (segment, videoTrackIds, timescales) {
11164 var parsedData;
11165
11166 if (!this.isInitialized()) {
11167 return null; // This is not likely to be a video segment
11168 } else if (!videoTrackIds || !timescales) {
11169 return null;
11170 } else if (this.isNewInit(videoTrackIds, timescales)) {
11171 // Use the first video track only as there is no
11172 // mechanism to switch to other video tracks
11173 trackId = videoTrackIds[0];
11174 timescale = timescales[trackId]; // If an init segment has not been seen yet, hold onto segment
11175 // data until we have one.
11176 // the ISO-BMFF spec says that trackId can't be zero, but there's some broken content out there
11177 } else if (trackId === null || !timescale) {
11178 segmentCache.push(segment);
11179 return null;
11180 } // Now that a timescale and trackId is set, parse cached segments
11181
11182
11183 while (segmentCache.length > 0) {
11184 var cachedSegment = segmentCache.shift();
11185 this.parse(cachedSegment, videoTrackIds, timescales);
11186 }
11187
11188 parsedData = parseEmbeddedCaptions(segment, trackId, timescale);
11189
11190 if (parsedData && parsedData.logs) {
11191 parsedCaptions.logs = parsedCaptions.logs.concat(parsedData.logs);
11192 }
11193
11194 if (parsedData === null || !parsedData.seiNals) {
11195 if (parsedCaptions.logs.length) {
11196 return {
11197 logs: parsedCaptions.logs,
11198 captions: [],
11199 captionStreams: []
11200 };
11201 }
11202
11203 return null;
11204 }
11205
11206 this.pushNals(parsedData.seiNals); // Force the parsed captions to be dispatched
11207
11208 this.flushStream();
11209 return parsedCaptions;
11210 };
11211 /**
11212 * Pushes SEI NALUs onto CaptionStream
11213 * @param {Object[]} nals - A list of SEI nals parsed using `parseCaptionNals`
11214 * Assumes that `parseCaptionNals` has been called first
11215 * @see m2ts/caption-stream.js
11216 **/
11217
11218
11219 this.pushNals = function (nals) {
11220 if (!this.isInitialized() || !nals || nals.length === 0) {
11221 return null;
11222 }
11223
11224 nals.forEach(function (nal) {
11225 captionStream.push(nal);
11226 });
11227 };
11228 /**
11229 * Flushes underlying CaptionStream to dispatch processed, displayable captions
11230 * @see m2ts/caption-stream.js
11231 **/
11232
11233
11234 this.flushStream = function () {
11235 if (!this.isInitialized()) {
11236 return null;
11237 }
11238
11239 if (!parsingPartial) {
11240 captionStream.flush();
11241 } else {
11242 captionStream.partialFlush();
11243 }
11244 };
11245 /**
11246 * Reset caption buckets for new data
11247 **/
11248
11249
11250 this.clearParsedCaptions = function () {
11251 parsedCaptions.captions = [];
11252 parsedCaptions.captionStreams = {};
11253 parsedCaptions.logs = [];
11254 };
11255 /**
11256 * Resets underlying CaptionStream
11257 * @see m2ts/caption-stream.js
11258 **/
11259
11260
11261 this.resetCaptionStream = function () {
11262 if (!this.isInitialized()) {
11263 return null;
11264 }
11265
11266 captionStream.reset();
11267 };
11268 /**
11269 * Convenience method to clear all captions flushed from the
11270 * CaptionStream and still being parsed
11271 * @see m2ts/caption-stream.js
11272 **/
11273
11274
11275 this.clearAllCaptions = function () {
11276 this.clearParsedCaptions();
11277 this.resetCaptionStream();
11278 };
11279 /**
11280 * Reset caption parser
11281 **/
11282
11283
11284 this.reset = function () {
11285 segmentCache = [];
11286 trackId = null;
11287 timescale = null;
11288
11289 if (!parsedCaptions) {
11290 parsedCaptions = {
11291 captions: [],
11292 // CC1, CC2, CC3, CC4
11293 captionStreams: {},
11294 logs: []
11295 };
11296 } else {
11297 this.clearParsedCaptions();
11298 }
11299
11300 this.resetCaptionStream();
11301 };
11302
11303 this.reset();
11304 };
11305
11306 var captionParser = CaptionParser;
11307 var toUnsigned = bin.toUnsigned;
11308 var toHexString = bin.toHexString;
11309 var timescale, startTime, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader;
11310 /**
11311 * Parses an MP4 initialization segment and extracts the timescale
11312 * values for any declared tracks. Timescale values indicate the
11313 * number of clock ticks per second to assume for time-based values
11314 * elsewhere in the MP4.
11315 *
11316 * To determine the start time of an MP4, you need two pieces of
11317 * information: the timescale unit and the earliest base media decode
11318 * time. Multiple timescales can be specified within an MP4 but the
11319 * base media decode time is always expressed in the timescale from
11320 * the media header box for the track:
11321 * ```
11322 * moov > trak > mdia > mdhd.timescale
11323 * ```
11324 * @param init {Uint8Array} the bytes of the init segment
11325 * @return {object} a hash of track ids to timescale values or null if
11326 * the init segment is malformed.
11327 */
11328
11329 timescale = function timescale(init) {
11330 var result = {},
11331 traks = findBox_1(init, ['moov', 'trak']); // mdhd timescale
11332
11333 return traks.reduce(function (result, trak) {
11334 var tkhd, version, index, id, mdhd;
11335 tkhd = findBox_1(trak, ['tkhd'])[0];
11336
11337 if (!tkhd) {
11338 return null;
11339 }
11340
11341 version = tkhd[0];
11342 index = version === 0 ? 12 : 20;
11343 id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]);
11344 mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11345
11346 if (!mdhd) {
11347 return null;
11348 }
11349
11350 version = mdhd[0];
11351 index = version === 0 ? 12 : 20;
11352 result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11353 return result;
11354 }, result);
11355 };
11356 /**
11357 * Determine the base media decode start time, in seconds, for an MP4
11358 * fragment. If multiple fragments are specified, the earliest time is
11359 * returned.
11360 *
11361 * The base media decode time can be parsed from track fragment
11362 * metadata:
11363 * ```
11364 * moof > traf > tfdt.baseMediaDecodeTime
11365 * ```
11366 * It requires the timescale value from the mdhd to interpret.
11367 *
11368 * @param timescale {object} a hash of track ids to timescale values.
11369 * @return {number} the earliest base media decode start time for the
11370 * fragment, in seconds
11371 */
11372
11373
11374 startTime = function startTime(timescale, fragment) {
11375 var trafs, baseTimes, result; // we need info from two childrend of each track fragment box
11376
11377 trafs = findBox_1(fragment, ['moof', 'traf']); // determine the start times for each track
11378
11379 baseTimes = [].concat.apply([], trafs.map(function (traf) {
11380 return findBox_1(traf, ['tfhd']).map(function (tfhd) {
11381 var id, scale, baseTime; // get the track id from the tfhd
11382
11383 id = toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7]); // assume a 90kHz clock if no timescale was specified
11384
11385 scale = timescale[id] || 90e3; // get the base media decode time from the tfdt
11386
11387 baseTime = findBox_1(traf, ['tfdt']).map(function (tfdt) {
11388 var version, result;
11389 version = tfdt[0];
11390 result = toUnsigned(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]);
11391
11392 if (version === 1) {
11393 result *= Math.pow(2, 32);
11394 result += toUnsigned(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11]);
11395 }
11396
11397 return result;
11398 })[0];
11399 baseTime = typeof baseTime === 'number' && !isNaN(baseTime) ? baseTime : Infinity; // convert base time to seconds
11400
11401 return baseTime / scale;
11402 });
11403 })); // return the minimum
11404
11405 result = Math.min.apply(null, baseTimes);
11406 return isFinite(result) ? result : 0;
11407 };
11408 /**
11409 * Determine the composition start, in seconds, for an MP4
11410 * fragment.
11411 *
11412 * The composition start time of a fragment can be calculated using the base
11413 * media decode time, composition time offset, and timescale, as follows:
11414 *
11415 * compositionStartTime = (baseMediaDecodeTime + compositionTimeOffset) / timescale
11416 *
11417 * All of the aforementioned information is contained within a media fragment's
11418 * `traf` box, except for timescale info, which comes from the initialization
11419 * segment, so a track id (also contained within a `traf`) is also necessary to
11420 * associate it with a timescale
11421 *
11422 *
11423 * @param timescales {object} - a hash of track ids to timescale values.
11424 * @param fragment {Unit8Array} - the bytes of a media segment
11425 * @return {number} the composition start time for the fragment, in seconds
11426 **/
11427
11428
11429 compositionStartTime = function compositionStartTime(timescales, fragment) {
11430 var trafBoxes = findBox_1(fragment, ['moof', 'traf']);
11431 var baseMediaDecodeTime = 0;
11432 var compositionTimeOffset = 0;
11433 var trackId;
11434
11435 if (trafBoxes && trafBoxes.length) {
11436 // The spec states that track run samples contained within a `traf` box are contiguous, but
11437 // it does not explicitly state whether the `traf` boxes themselves are contiguous.
11438 // We will assume that they are, so we only need the first to calculate start time.
11439 var tfhd = findBox_1(trafBoxes[0], ['tfhd'])[0];
11440 var trun = findBox_1(trafBoxes[0], ['trun'])[0];
11441 var tfdt = findBox_1(trafBoxes[0], ['tfdt'])[0];
11442
11443 if (tfhd) {
11444 var parsedTfhd = parseTfhd(tfhd);
11445 trackId = parsedTfhd.trackId;
11446 }
11447
11448 if (tfdt) {
11449 var parsedTfdt = parseTfdt(tfdt);
11450 baseMediaDecodeTime = parsedTfdt.baseMediaDecodeTime;
11451 }
11452
11453 if (trun) {
11454 var parsedTrun = parseTrun(trun);
11455
11456 if (parsedTrun.samples && parsedTrun.samples.length) {
11457 compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0;
11458 }
11459 }
11460 } // Get timescale for this specific track. Assume a 90kHz clock if no timescale was
11461 // specified.
11462
11463
11464 var timescale = timescales[trackId] || 90e3; // return the composition start time, in seconds
11465
11466 return (baseMediaDecodeTime + compositionTimeOffset) / timescale;
11467 };
11468 /**
11469 * Find the trackIds of the video tracks in this source.
11470 * Found by parsing the Handler Reference and Track Header Boxes:
11471 * moov > trak > mdia > hdlr
11472 * moov > trak > tkhd
11473 *
11474 * @param {Uint8Array} init - The bytes of the init segment for this source
11475 * @return {Number[]} A list of trackIds
11476 *
11477 * @see ISO-BMFF-12/2015, Section 8.4.3
11478 **/
11479
11480
11481 getVideoTrackIds = function getVideoTrackIds(init) {
11482 var traks = findBox_1(init, ['moov', 'trak']);
11483 var videoTrackIds = [];
11484 traks.forEach(function (trak) {
11485 var hdlrs = findBox_1(trak, ['mdia', 'hdlr']);
11486 var tkhds = findBox_1(trak, ['tkhd']);
11487 hdlrs.forEach(function (hdlr, index) {
11488 var handlerType = parseType_1(hdlr.subarray(8, 12));
11489 var tkhd = tkhds[index];
11490 var view;
11491 var version;
11492 var trackId;
11493
11494 if (handlerType === 'vide') {
11495 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11496 version = view.getUint8(0);
11497 trackId = version === 0 ? view.getUint32(12) : view.getUint32(20);
11498 videoTrackIds.push(trackId);
11499 }
11500 });
11501 });
11502 return videoTrackIds;
11503 };
11504
11505 getTimescaleFromMediaHeader = function getTimescaleFromMediaHeader(mdhd) {
11506 // mdhd is a FullBox, meaning it will have its own version as the first byte
11507 var version = mdhd[0];
11508 var index = version === 0 ? 12 : 20;
11509 return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
11510 };
11511 /**
11512 * Get all the video, audio, and hint tracks from a non fragmented
11513 * mp4 segment
11514 */
11515
11516
11517 getTracks = function getTracks(init) {
11518 var traks = findBox_1(init, ['moov', 'trak']);
11519 var tracks = [];
11520 traks.forEach(function (trak) {
11521 var track = {};
11522 var tkhd = findBox_1(trak, ['tkhd'])[0];
11523 var view, tkhdVersion; // id
11524
11525 if (tkhd) {
11526 view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
11527 tkhdVersion = view.getUint8(0);
11528 track.id = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
11529 }
11530
11531 var hdlr = findBox_1(trak, ['mdia', 'hdlr'])[0]; // type
11532
11533 if (hdlr) {
11534 var type = parseType_1(hdlr.subarray(8, 12));
11535
11536 if (type === 'vide') {
11537 track.type = 'video';
11538 } else if (type === 'soun') {
11539 track.type = 'audio';
11540 } else {
11541 track.type = type;
11542 }
11543 } // codec
11544
11545
11546 var stsd = findBox_1(trak, ['mdia', 'minf', 'stbl', 'stsd'])[0];
11547
11548 if (stsd) {
11549 var sampleDescriptions = stsd.subarray(8); // gives the codec type string
11550
11551 track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
11552 var codecBox = findBox_1(sampleDescriptions, [track.codec])[0];
11553 var codecConfig, codecConfigType;
11554
11555 if (codecBox) {
11556 // https://tools.ietf.org/html/rfc6381#section-3.3
11557 if (/^[asm]vc[1-9]$/i.test(track.codec)) {
11558 // we don't need anything but the "config" parameter of the
11559 // avc1 codecBox
11560 codecConfig = codecBox.subarray(78);
11561 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11562
11563 if (codecConfigType === 'avcC' && codecConfig.length > 11) {
11564 track.codec += '.'; // left padded with zeroes for single digit hex
11565 // profile idc
11566
11567 track.codec += toHexString(codecConfig[9]); // the byte containing the constraint_set flags
11568
11569 track.codec += toHexString(codecConfig[10]); // level idc
11570
11571 track.codec += toHexString(codecConfig[11]);
11572 } else {
11573 // TODO: show a warning that we couldn't parse the codec
11574 // and are using the default
11575 track.codec = 'avc1.4d400d';
11576 }
11577 } else if (/^mp4[a,v]$/i.test(track.codec)) {
11578 // we do not need anything but the streamDescriptor of the mp4a codecBox
11579 codecConfig = codecBox.subarray(28);
11580 codecConfigType = parseType_1(codecConfig.subarray(4, 8));
11581
11582 if (codecConfigType === 'esds' && codecConfig.length > 20 && codecConfig[19] !== 0) {
11583 track.codec += '.' + toHexString(codecConfig[19]); // this value is only a single digit
11584
11585 track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '');
11586 } else {
11587 // TODO: show a warning that we couldn't parse the codec
11588 // and are using the default
11589 track.codec = 'mp4a.40.2';
11590 }
11591 } else {
11592 // flac, opus, etc
11593 track.codec = track.codec.toLowerCase();
11594 }
11595 }
11596 }
11597
11598 var mdhd = findBox_1(trak, ['mdia', 'mdhd'])[0];
11599
11600 if (mdhd) {
11601 track.timescale = getTimescaleFromMediaHeader(mdhd);
11602 }
11603
11604 tracks.push(track);
11605 });
11606 return tracks;
11607 };
11608
11609 var probe$2 = {
11610 // export mp4 inspector's findBox and parseType for backwards compatibility
11611 findBox: findBox_1,
11612 parseType: parseType_1,
11613 timescale: timescale,
11614 startTime: startTime,
11615 compositionStartTime: compositionStartTime,
11616 videoTrackIds: getVideoTrackIds,
11617 tracks: getTracks,
11618 getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
11619 };
11620
11621 var parsePid = function parsePid(packet) {
11622 var pid = packet[1] & 0x1f;
11623 pid <<= 8;
11624 pid |= packet[2];
11625 return pid;
11626 };
11627
11628 var parsePayloadUnitStartIndicator = function parsePayloadUnitStartIndicator(packet) {
11629 return !!(packet[1] & 0x40);
11630 };
11631
11632 var parseAdaptionField = function parseAdaptionField(packet) {
11633 var offset = 0; // if an adaption field is present, its length is specified by the
11634 // fifth byte of the TS packet header. The adaptation field is
11635 // used to add stuffing to PES packets that don't fill a complete
11636 // TS packet, and to specify some forms of timing and control data
11637 // that we do not currently use.
11638
11639 if ((packet[3] & 0x30) >>> 4 > 0x01) {
11640 offset += packet[4] + 1;
11641 }
11642
11643 return offset;
11644 };
11645
11646 var parseType = function parseType(packet, pmtPid) {
11647 var pid = parsePid(packet);
11648
11649 if (pid === 0) {
11650 return 'pat';
11651 } else if (pid === pmtPid) {
11652 return 'pmt';
11653 } else if (pmtPid) {
11654 return 'pes';
11655 }
11656
11657 return null;
11658 };
11659
11660 var parsePat = function parsePat(packet) {
11661 var pusi = parsePayloadUnitStartIndicator(packet);
11662 var offset = 4 + parseAdaptionField(packet);
11663
11664 if (pusi) {
11665 offset += packet[offset] + 1;
11666 }
11667
11668 return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
11669 };
11670
11671 var parsePmt = function parsePmt(packet) {
11672 var programMapTable = {};
11673 var pusi = parsePayloadUnitStartIndicator(packet);
11674 var payloadOffset = 4 + parseAdaptionField(packet);
11675
11676 if (pusi) {
11677 payloadOffset += packet[payloadOffset] + 1;
11678 } // PMTs can be sent ahead of the time when they should actually
11679 // take effect. We don't believe this should ever be the case
11680 // for HLS but we'll ignore "forward" PMT declarations if we see
11681 // them. Future PMT declarations have the current_next_indicator
11682 // set to zero.
11683
11684
11685 if (!(packet[payloadOffset + 5] & 0x01)) {
11686 return;
11687 }
11688
11689 var sectionLength, tableEnd, programInfoLength; // the mapping table ends at the end of the current section
11690
11691 sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
11692 tableEnd = 3 + sectionLength - 4; // to determine where the table is, we have to figure out how
11693 // long the program info descriptors are
11694
11695 programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11]; // advance the offset to the first entry in the mapping table
11696
11697 var offset = 12 + programInfoLength;
11698
11699 while (offset < tableEnd) {
11700 var i = payloadOffset + offset; // add an entry that maps the elementary_pid to the stream_type
11701
11702 programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i]; // move to the next table entry
11703 // skip past the elementary stream descriptors, if present
11704
11705 offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
11706 }
11707
11708 return programMapTable;
11709 };
11710
11711 var parsePesType = function parsePesType(packet, programMapTable) {
11712 var pid = parsePid(packet);
11713 var type = programMapTable[pid];
11714
11715 switch (type) {
11716 case streamTypes.H264_STREAM_TYPE:
11717 return 'video';
11718
11719 case streamTypes.ADTS_STREAM_TYPE:
11720 return 'audio';
11721
11722 case streamTypes.METADATA_STREAM_TYPE:
11723 return 'timed-metadata';
11724
11725 default:
11726 return null;
11727 }
11728 };
11729
11730 var parsePesTime = function parsePesTime(packet) {
11731 var pusi = parsePayloadUnitStartIndicator(packet);
11732
11733 if (!pusi) {
11734 return null;
11735 }
11736
11737 var offset = 4 + parseAdaptionField(packet);
11738
11739 if (offset >= packet.byteLength) {
11740 // From the H 222.0 MPEG-TS spec
11741 // "For transport stream packets carrying PES packets, stuffing is needed when there
11742 // is insufficient PES packet data to completely fill the transport stream packet
11743 // payload bytes. Stuffing is accomplished by defining an adaptation field longer than
11744 // the sum of the lengths of the data elements in it, so that the payload bytes
11745 // remaining after the adaptation field exactly accommodates the available PES packet
11746 // data."
11747 //
11748 // If the offset is >= the length of the packet, then the packet contains no data
11749 // and instead is just adaption field stuffing bytes
11750 return null;
11751 }
11752
11753 var pes = null;
11754 var ptsDtsFlags; // PES packets may be annotated with a PTS value, or a PTS value
11755 // and a DTS value. Determine what combination of values is
11756 // available to work with.
11757
11758 ptsDtsFlags = packet[offset + 7]; // PTS and DTS are normally stored as a 33-bit number. Javascript
11759 // performs all bitwise operations on 32-bit integers but javascript
11760 // supports a much greater range (52-bits) of integer using standard
11761 // mathematical operations.
11762 // We construct a 31-bit value using bitwise operators over the 31
11763 // most significant bits and then multiply by 4 (equal to a left-shift
11764 // of 2) before we add the final 2 least significant bits of the
11765 // timestamp (equal to an OR.)
11766
11767 if (ptsDtsFlags & 0xC0) {
11768 pes = {}; // the PTS and DTS are not written out directly. For information
11769 // on how they are encoded, see
11770 // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
11771
11772 pes.pts = (packet[offset + 9] & 0x0E) << 27 | (packet[offset + 10] & 0xFF) << 20 | (packet[offset + 11] & 0xFE) << 12 | (packet[offset + 12] & 0xFF) << 5 | (packet[offset + 13] & 0xFE) >>> 3;
11773 pes.pts *= 4; // Left shift by 2
11774
11775 pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
11776
11777 pes.dts = pes.pts;
11778
11779 if (ptsDtsFlags & 0x40) {
11780 pes.dts = (packet[offset + 14] & 0x0E) << 27 | (packet[offset + 15] & 0xFF) << 20 | (packet[offset + 16] & 0xFE) << 12 | (packet[offset + 17] & 0xFF) << 5 | (packet[offset + 18] & 0xFE) >>> 3;
11781 pes.dts *= 4; // Left shift by 2
11782
11783 pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
11784 }
11785 }
11786
11787 return pes;
11788 };
11789
11790 var parseNalUnitType = function parseNalUnitType(type) {
11791 switch (type) {
11792 case 0x05:
11793 return 'slice_layer_without_partitioning_rbsp_idr';
11794
11795 case 0x06:
11796 return 'sei_rbsp';
11797
11798 case 0x07:
11799 return 'seq_parameter_set_rbsp';
11800
11801 case 0x08:
11802 return 'pic_parameter_set_rbsp';
11803
11804 case 0x09:
11805 return 'access_unit_delimiter_rbsp';
11806
11807 default:
11808 return null;
11809 }
11810 };
11811
11812 var videoPacketContainsKeyFrame = function videoPacketContainsKeyFrame(packet) {
11813 var offset = 4 + parseAdaptionField(packet);
11814 var frameBuffer = packet.subarray(offset);
11815 var frameI = 0;
11816 var frameSyncPoint = 0;
11817 var foundKeyFrame = false;
11818 var nalType; // advance the sync point to a NAL start, if necessary
11819
11820 for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
11821 if (frameBuffer[frameSyncPoint + 2] === 1) {
11822 // the sync point is properly aligned
11823 frameI = frameSyncPoint + 5;
11824 break;
11825 }
11826 }
11827
11828 while (frameI < frameBuffer.byteLength) {
11829 // look at the current byte to determine if we've hit the end of
11830 // a NAL unit boundary
11831 switch (frameBuffer[frameI]) {
11832 case 0:
11833 // skip past non-sync sequences
11834 if (frameBuffer[frameI - 1] !== 0) {
11835 frameI += 2;
11836 break;
11837 } else if (frameBuffer[frameI - 2] !== 0) {
11838 frameI++;
11839 break;
11840 }
11841
11842 if (frameSyncPoint + 3 !== frameI - 2) {
11843 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11844
11845 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11846 foundKeyFrame = true;
11847 }
11848 } // drop trailing zeroes
11849
11850
11851 do {
11852 frameI++;
11853 } while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
11854
11855 frameSyncPoint = frameI - 2;
11856 frameI += 3;
11857 break;
11858
11859 case 1:
11860 // skip past non-sync sequences
11861 if (frameBuffer[frameI - 1] !== 0 || frameBuffer[frameI - 2] !== 0) {
11862 frameI += 3;
11863 break;
11864 }
11865
11866 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11867
11868 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11869 foundKeyFrame = true;
11870 }
11871
11872 frameSyncPoint = frameI - 2;
11873 frameI += 3;
11874 break;
11875
11876 default:
11877 // the current byte isn't a one or zero, so it cannot be part
11878 // of a sync sequence
11879 frameI += 3;
11880 break;
11881 }
11882 }
11883
11884 frameBuffer = frameBuffer.subarray(frameSyncPoint);
11885 frameI -= frameSyncPoint;
11886 frameSyncPoint = 0; // parse the final nal
11887
11888 if (frameBuffer && frameBuffer.byteLength > 3) {
11889 nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
11890
11891 if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
11892 foundKeyFrame = true;
11893 }
11894 }
11895
11896 return foundKeyFrame;
11897 };
11898
11899 var probe$1 = {
11900 parseType: parseType,
11901 parsePat: parsePat,
11902 parsePmt: parsePmt,
11903 parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
11904 parsePesType: parsePesType,
11905 parsePesTime: parsePesTime,
11906 videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
11907 };
11908 var handleRollover = timestampRolloverStream.handleRollover;
11909 var probe = {};
11910 probe.ts = probe$1;
11911 probe.aac = utils;
11912 var ONE_SECOND_IN_TS = clock.ONE_SECOND_IN_TS;
11913 var MP2T_PACKET_LENGTH = 188,
11914 // bytes
11915 SYNC_BYTE = 0x47;
11916 /**
11917 * walks through segment data looking for pat and pmt packets to parse out
11918 * program map table information
11919 */
11920
11921 var parsePsi_ = function parsePsi_(bytes, pmt) {
11922 var startIndex = 0,
11923 endIndex = MP2T_PACKET_LENGTH,
11924 packet,
11925 type;
11926
11927 while (endIndex < bytes.byteLength) {
11928 // Look for a pair of start and end sync bytes in the data..
11929 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
11930 // We found a packet
11931 packet = bytes.subarray(startIndex, endIndex);
11932 type = probe.ts.parseType(packet, pmt.pid);
11933
11934 switch (type) {
11935 case 'pat':
11936 pmt.pid = probe.ts.parsePat(packet);
11937 break;
11938
11939 case 'pmt':
11940 var table = probe.ts.parsePmt(packet);
11941 pmt.table = pmt.table || {};
11942 Object.keys(table).forEach(function (key) {
11943 pmt.table[key] = table[key];
11944 });
11945 break;
11946 }
11947
11948 startIndex += MP2T_PACKET_LENGTH;
11949 endIndex += MP2T_PACKET_LENGTH;
11950 continue;
11951 } // If we get here, we have somehow become de-synchronized and we need to step
11952 // forward one byte at a time until we find a pair of sync bytes that denote
11953 // a packet
11954
11955
11956 startIndex++;
11957 endIndex++;
11958 }
11959 };
11960 /**
11961 * walks through the segment data from the start and end to get timing information
11962 * for the first and last audio pes packets
11963 */
11964
11965
11966 var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
11967 var startIndex = 0,
11968 endIndex = MP2T_PACKET_LENGTH,
11969 packet,
11970 type,
11971 pesType,
11972 pusi,
11973 parsed;
11974 var endLoop = false; // Start walking from start of segment to get first audio packet
11975
11976 while (endIndex <= bytes.byteLength) {
11977 // Look for a pair of start and end sync bytes in the data..
11978 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
11979 // We found a packet
11980 packet = bytes.subarray(startIndex, endIndex);
11981 type = probe.ts.parseType(packet, pmt.pid);
11982
11983 switch (type) {
11984 case 'pes':
11985 pesType = probe.ts.parsePesType(packet, pmt.table);
11986 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
11987
11988 if (pesType === 'audio' && pusi) {
11989 parsed = probe.ts.parsePesTime(packet);
11990
11991 if (parsed) {
11992 parsed.type = 'audio';
11993 result.audio.push(parsed);
11994 endLoop = true;
11995 }
11996 }
11997
11998 break;
11999 }
12000
12001 if (endLoop) {
12002 break;
12003 }
12004
12005 startIndex += MP2T_PACKET_LENGTH;
12006 endIndex += MP2T_PACKET_LENGTH;
12007 continue;
12008 } // If we get here, we have somehow become de-synchronized and we need to step
12009 // forward one byte at a time until we find a pair of sync bytes that denote
12010 // a packet
12011
12012
12013 startIndex++;
12014 endIndex++;
12015 } // Start walking from end of segment to get last audio packet
12016
12017
12018 endIndex = bytes.byteLength;
12019 startIndex = endIndex - MP2T_PACKET_LENGTH;
12020 endLoop = false;
12021
12022 while (startIndex >= 0) {
12023 // Look for a pair of start and end sync bytes in the data..
12024 if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
12025 // We found a packet
12026 packet = bytes.subarray(startIndex, endIndex);
12027 type = probe.ts.parseType(packet, pmt.pid);
12028
12029 switch (type) {
12030 case 'pes':
12031 pesType = probe.ts.parsePesType(packet, pmt.table);
12032 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12033
12034 if (pesType === 'audio' && pusi) {
12035 parsed = probe.ts.parsePesTime(packet);
12036
12037 if (parsed) {
12038 parsed.type = 'audio';
12039 result.audio.push(parsed);
12040 endLoop = true;
12041 }
12042 }
12043
12044 break;
12045 }
12046
12047 if (endLoop) {
12048 break;
12049 }
12050
12051 startIndex -= MP2T_PACKET_LENGTH;
12052 endIndex -= MP2T_PACKET_LENGTH;
12053 continue;
12054 } // If we get here, we have somehow become de-synchronized and we need to step
12055 // forward one byte at a time until we find a pair of sync bytes that denote
12056 // a packet
12057
12058
12059 startIndex--;
12060 endIndex--;
12061 }
12062 };
12063 /**
12064 * walks through the segment data from the start and end to get timing information
12065 * for the first and last video pes packets as well as timing information for the first
12066 * key frame.
12067 */
12068
12069
12070 var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
12071 var startIndex = 0,
12072 endIndex = MP2T_PACKET_LENGTH,
12073 packet,
12074 type,
12075 pesType,
12076 pusi,
12077 parsed,
12078 frame,
12079 i,
12080 pes;
12081 var endLoop = false;
12082 var currentFrame = {
12083 data: [],
12084 size: 0
12085 }; // Start walking from start of segment to get first video packet
12086
12087 while (endIndex < bytes.byteLength) {
12088 // Look for a pair of start and end sync bytes in the data..
12089 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12090 // We found a packet
12091 packet = bytes.subarray(startIndex, endIndex);
12092 type = probe.ts.parseType(packet, pmt.pid);
12093
12094 switch (type) {
12095 case 'pes':
12096 pesType = probe.ts.parsePesType(packet, pmt.table);
12097 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12098
12099 if (pesType === 'video') {
12100 if (pusi && !endLoop) {
12101 parsed = probe.ts.parsePesTime(packet);
12102
12103 if (parsed) {
12104 parsed.type = 'video';
12105 result.video.push(parsed);
12106 endLoop = true;
12107 }
12108 }
12109
12110 if (!result.firstKeyFrame) {
12111 if (pusi) {
12112 if (currentFrame.size !== 0) {
12113 frame = new Uint8Array(currentFrame.size);
12114 i = 0;
12115
12116 while (currentFrame.data.length) {
12117 pes = currentFrame.data.shift();
12118 frame.set(pes, i);
12119 i += pes.byteLength;
12120 }
12121
12122 if (probe.ts.videoPacketContainsKeyFrame(frame)) {
12123 var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
12124 // the keyframe seems to work fine with HLS playback
12125 // and definitely preferable to a crash with TypeError...
12126
12127 if (firstKeyFrame) {
12128 result.firstKeyFrame = firstKeyFrame;
12129 result.firstKeyFrame.type = 'video';
12130 } else {
12131 // eslint-disable-next-line
12132 console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
12133 }
12134 }
12135
12136 currentFrame.size = 0;
12137 }
12138 }
12139
12140 currentFrame.data.push(packet);
12141 currentFrame.size += packet.byteLength;
12142 }
12143 }
12144
12145 break;
12146 }
12147
12148 if (endLoop && result.firstKeyFrame) {
12149 break;
12150 }
12151
12152 startIndex += MP2T_PACKET_LENGTH;
12153 endIndex += MP2T_PACKET_LENGTH;
12154 continue;
12155 } // If we get here, we have somehow become de-synchronized and we need to step
12156 // forward one byte at a time until we find a pair of sync bytes that denote
12157 // a packet
12158
12159
12160 startIndex++;
12161 endIndex++;
12162 } // Start walking from end of segment to get last video packet
12163
12164
12165 endIndex = bytes.byteLength;
12166 startIndex = endIndex - MP2T_PACKET_LENGTH;
12167 endLoop = false;
12168
12169 while (startIndex >= 0) {
12170 // Look for a pair of start and end sync bytes in the data..
12171 if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
12172 // We found a packet
12173 packet = bytes.subarray(startIndex, endIndex);
12174 type = probe.ts.parseType(packet, pmt.pid);
12175
12176 switch (type) {
12177 case 'pes':
12178 pesType = probe.ts.parsePesType(packet, pmt.table);
12179 pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
12180
12181 if (pesType === 'video' && pusi) {
12182 parsed = probe.ts.parsePesTime(packet);
12183
12184 if (parsed) {
12185 parsed.type = 'video';
12186 result.video.push(parsed);
12187 endLoop = true;
12188 }
12189 }
12190
12191 break;
12192 }
12193
12194 if (endLoop) {
12195 break;
12196 }
12197
12198 startIndex -= MP2T_PACKET_LENGTH;
12199 endIndex -= MP2T_PACKET_LENGTH;
12200 continue;
12201 } // If we get here, we have somehow become de-synchronized and we need to step
12202 // forward one byte at a time until we find a pair of sync bytes that denote
12203 // a packet
12204
12205
12206 startIndex--;
12207 endIndex--;
12208 }
12209 };
12210 /**
12211 * Adjusts the timestamp information for the segment to account for
12212 * rollover and convert to seconds based on pes packet timescale (90khz clock)
12213 */
12214
12215
12216 var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
12217 if (segmentInfo.audio && segmentInfo.audio.length) {
12218 var audioBaseTimestamp = baseTimestamp;
12219
12220 if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
12221 audioBaseTimestamp = segmentInfo.audio[0].dts;
12222 }
12223
12224 segmentInfo.audio.forEach(function (info) {
12225 info.dts = handleRollover(info.dts, audioBaseTimestamp);
12226 info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
12227
12228 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12229 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12230 });
12231 }
12232
12233 if (segmentInfo.video && segmentInfo.video.length) {
12234 var videoBaseTimestamp = baseTimestamp;
12235
12236 if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
12237 videoBaseTimestamp = segmentInfo.video[0].dts;
12238 }
12239
12240 segmentInfo.video.forEach(function (info) {
12241 info.dts = handleRollover(info.dts, videoBaseTimestamp);
12242 info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
12243
12244 info.dtsTime = info.dts / ONE_SECOND_IN_TS;
12245 info.ptsTime = info.pts / ONE_SECOND_IN_TS;
12246 });
12247
12248 if (segmentInfo.firstKeyFrame) {
12249 var frame = segmentInfo.firstKeyFrame;
12250 frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
12251 frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
12252
12253 frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
12254 frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
12255 }
12256 }
12257 };
12258 /**
12259 * inspects the aac data stream for start and end time information
12260 */
12261
12262
12263 var inspectAac_ = function inspectAac_(bytes) {
12264 var endLoop = false,
12265 audioCount = 0,
12266 sampleRate = null,
12267 timestamp = null,
12268 frameSize = 0,
12269 byteIndex = 0,
12270 packet;
12271
12272 while (bytes.length - byteIndex >= 3) {
12273 var type = probe.aac.parseType(bytes, byteIndex);
12274
12275 switch (type) {
12276 case 'timed-metadata':
12277 // Exit early because we don't have enough to parse
12278 // the ID3 tag header
12279 if (bytes.length - byteIndex < 10) {
12280 endLoop = true;
12281 break;
12282 }
12283
12284 frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12285 // to emit a full packet
12286
12287 if (frameSize > bytes.length) {
12288 endLoop = true;
12289 break;
12290 }
12291
12292 if (timestamp === null) {
12293 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12294 timestamp = probe.aac.parseAacTimestamp(packet);
12295 }
12296
12297 byteIndex += frameSize;
12298 break;
12299
12300 case 'audio':
12301 // Exit early because we don't have enough to parse
12302 // the ADTS frame header
12303 if (bytes.length - byteIndex < 7) {
12304 endLoop = true;
12305 break;
12306 }
12307
12308 frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
12309 // to emit a full packet
12310
12311 if (frameSize > bytes.length) {
12312 endLoop = true;
12313 break;
12314 }
12315
12316 if (sampleRate === null) {
12317 packet = bytes.subarray(byteIndex, byteIndex + frameSize);
12318 sampleRate = probe.aac.parseSampleRate(packet);
12319 }
12320
12321 audioCount++;
12322 byteIndex += frameSize;
12323 break;
12324
12325 default:
12326 byteIndex++;
12327 break;
12328 }
12329
12330 if (endLoop) {
12331 return null;
12332 }
12333 }
12334
12335 if (sampleRate === null || timestamp === null) {
12336 return null;
12337 }
12338
12339 var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
12340 var result = {
12341 audio: [{
12342 type: 'audio',
12343 dts: timestamp,
12344 pts: timestamp
12345 }, {
12346 type: 'audio',
12347 dts: timestamp + audioCount * 1024 * audioTimescale,
12348 pts: timestamp + audioCount * 1024 * audioTimescale
12349 }]
12350 };
12351 return result;
12352 };
12353 /**
12354 * inspects the transport stream segment data for start and end time information
12355 * of the audio and video tracks (when present) as well as the first key frame's
12356 * start time.
12357 */
12358
12359
12360 var inspectTs_ = function inspectTs_(bytes) {
12361 var pmt = {
12362 pid: null,
12363 table: null
12364 };
12365 var result = {};
12366 parsePsi_(bytes, pmt);
12367
12368 for (var pid in pmt.table) {
12369 if (pmt.table.hasOwnProperty(pid)) {
12370 var type = pmt.table[pid];
12371
12372 switch (type) {
12373 case streamTypes.H264_STREAM_TYPE:
12374 result.video = [];
12375 parseVideoPes_(bytes, pmt, result);
12376
12377 if (result.video.length === 0) {
12378 delete result.video;
12379 }
12380
12381 break;
12382
12383 case streamTypes.ADTS_STREAM_TYPE:
12384 result.audio = [];
12385 parseAudioPes_(bytes, pmt, result);
12386
12387 if (result.audio.length === 0) {
12388 delete result.audio;
12389 }
12390
12391 break;
12392 }
12393 }
12394 }
12395
12396 return result;
12397 };
12398 /**
12399 * Inspects segment byte data and returns an object with start and end timing information
12400 *
12401 * @param {Uint8Array} bytes The segment byte data
12402 * @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
12403 * timestamps for rollover. This value must be in 90khz clock.
12404 * @return {Object} Object containing start and end frame timing info of segment.
12405 */
12406
12407
12408 var inspect = function inspect(bytes, baseTimestamp) {
12409 var isAacData = probe.aac.isLikelyAacData(bytes);
12410 var result;
12411
12412 if (isAacData) {
12413 result = inspectAac_(bytes);
12414 } else {
12415 result = inspectTs_(bytes);
12416 }
12417
12418 if (!result || !result.audio && !result.video) {
12419 return null;
12420 }
12421
12422 adjustTimestamp_(result, baseTimestamp);
12423 return result;
12424 };
12425
12426 var tsInspector = {
12427 inspect: inspect,
12428 parseAudioPes_: parseAudioPes_
12429 };
12430 /* global self */
12431
12432 /**
12433 * Re-emits transmuxer events by converting them into messages to the
12434 * world outside the worker.
12435 *
12436 * @param {Object} transmuxer the transmuxer to wire events on
12437 * @private
12438 */
12439
12440 var wireTransmuxerEvents = function wireTransmuxerEvents(self, transmuxer) {
12441 transmuxer.on('data', function (segment) {
12442 // transfer ownership of the underlying ArrayBuffer
12443 // instead of doing a copy to save memory
12444 // ArrayBuffers are transferable but generic TypedArrays are not
12445 // @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
12446 var initArray = segment.initSegment;
12447 segment.initSegment = {
12448 data: initArray.buffer,
12449 byteOffset: initArray.byteOffset,
12450 byteLength: initArray.byteLength
12451 };
12452 var typedArray = segment.data;
12453 segment.data = typedArray.buffer;
12454 self.postMessage({
12455 action: 'data',
12456 segment: segment,
12457 byteOffset: typedArray.byteOffset,
12458 byteLength: typedArray.byteLength
12459 }, [segment.data]);
12460 });
12461 transmuxer.on('done', function (data) {
12462 self.postMessage({
12463 action: 'done'
12464 });
12465 });
12466 transmuxer.on('gopInfo', function (gopInfo) {
12467 self.postMessage({
12468 action: 'gopInfo',
12469 gopInfo: gopInfo
12470 });
12471 });
12472 transmuxer.on('videoSegmentTimingInfo', function (timingInfo) {
12473 var videoSegmentTimingInfo = {
12474 start: {
12475 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12476 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12477 },
12478 end: {
12479 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12480 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12481 },
12482 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12483 };
12484
12485 if (timingInfo.prependedContentDuration) {
12486 videoSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12487 }
12488
12489 self.postMessage({
12490 action: 'videoSegmentTimingInfo',
12491 videoSegmentTimingInfo: videoSegmentTimingInfo
12492 });
12493 });
12494 transmuxer.on('audioSegmentTimingInfo', function (timingInfo) {
12495 // Note that all times for [audio/video]SegmentTimingInfo events are in video clock
12496 var audioSegmentTimingInfo = {
12497 start: {
12498 decode: clock.videoTsToSeconds(timingInfo.start.dts),
12499 presentation: clock.videoTsToSeconds(timingInfo.start.pts)
12500 },
12501 end: {
12502 decode: clock.videoTsToSeconds(timingInfo.end.dts),
12503 presentation: clock.videoTsToSeconds(timingInfo.end.pts)
12504 },
12505 baseMediaDecodeTime: clock.videoTsToSeconds(timingInfo.baseMediaDecodeTime)
12506 };
12507
12508 if (timingInfo.prependedContentDuration) {
12509 audioSegmentTimingInfo.prependedContentDuration = clock.videoTsToSeconds(timingInfo.prependedContentDuration);
12510 }
12511
12512 self.postMessage({
12513 action: 'audioSegmentTimingInfo',
12514 audioSegmentTimingInfo: audioSegmentTimingInfo
12515 });
12516 });
12517 transmuxer.on('id3Frame', function (id3Frame) {
12518 self.postMessage({
12519 action: 'id3Frame',
12520 id3Frame: id3Frame
12521 });
12522 });
12523 transmuxer.on('caption', function (caption) {
12524 self.postMessage({
12525 action: 'caption',
12526 caption: caption
12527 });
12528 });
12529 transmuxer.on('trackinfo', function (trackInfo) {
12530 self.postMessage({
12531 action: 'trackinfo',
12532 trackInfo: trackInfo
12533 });
12534 });
12535 transmuxer.on('audioTimingInfo', function (audioTimingInfo) {
12536 // convert to video TS since we prioritize video time over audio
12537 self.postMessage({
12538 action: 'audioTimingInfo',
12539 audioTimingInfo: {
12540 start: clock.videoTsToSeconds(audioTimingInfo.start),
12541 end: clock.videoTsToSeconds(audioTimingInfo.end)
12542 }
12543 });
12544 });
12545 transmuxer.on('videoTimingInfo', function (videoTimingInfo) {
12546 self.postMessage({
12547 action: 'videoTimingInfo',
12548 videoTimingInfo: {
12549 start: clock.videoTsToSeconds(videoTimingInfo.start),
12550 end: clock.videoTsToSeconds(videoTimingInfo.end)
12551 }
12552 });
12553 });
12554 transmuxer.on('log', function (log) {
12555 self.postMessage({
12556 action: 'log',
12557 log: log
12558 });
12559 });
12560 };
12561 /**
12562 * All incoming messages route through this hash. If no function exists
12563 * to handle an incoming message, then we ignore the message.
12564 *
12565 * @class MessageHandlers
12566 * @param {Object} options the options to initialize with
12567 */
12568
12569
12570 var MessageHandlers = /*#__PURE__*/function () {
12571 function MessageHandlers(self, options) {
12572 this.options = options || {};
12573 this.self = self;
12574 this.init();
12575 }
12576 /**
12577 * initialize our web worker and wire all the events.
12578 */
12579
12580
12581 var _proto = MessageHandlers.prototype;
12582
12583 _proto.init = function init() {
12584 if (this.transmuxer) {
12585 this.transmuxer.dispose();
12586 }
12587
12588 this.transmuxer = new transmuxer.Transmuxer(this.options);
12589 wireTransmuxerEvents(this.self, this.transmuxer);
12590 };
12591
12592 _proto.pushMp4Captions = function pushMp4Captions(data) {
12593 if (!this.captionParser) {
12594 this.captionParser = new captionParser();
12595 this.captionParser.init();
12596 }
12597
12598 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12599 var parsed = this.captionParser.parse(segment, data.trackIds, data.timescales);
12600 this.self.postMessage({
12601 action: 'mp4Captions',
12602 captions: parsed && parsed.captions || [],
12603 logs: parsed && parsed.logs || [],
12604 data: segment.buffer
12605 }, [segment.buffer]);
12606 };
12607
12608 _proto.probeMp4StartTime = function probeMp4StartTime(_ref) {
12609 var timescales = _ref.timescales,
12610 data = _ref.data;
12611 var startTime = probe$2.startTime(timescales, data);
12612 this.self.postMessage({
12613 action: 'probeMp4StartTime',
12614 startTime: startTime,
12615 data: data
12616 }, [data.buffer]);
12617 };
12618
12619 _proto.probeMp4Tracks = function probeMp4Tracks(_ref2) {
12620 var data = _ref2.data;
12621 var tracks = probe$2.tracks(data);
12622 this.self.postMessage({
12623 action: 'probeMp4Tracks',
12624 tracks: tracks,
12625 data: data
12626 }, [data.buffer]);
12627 }
12628 /**
12629 * Probe an mpeg2-ts segment to determine the start time of the segment in it's
12630 * internal "media time," as well as whether it contains video and/or audio.
12631 *
12632 * @private
12633 * @param {Uint8Array} bytes - segment bytes
12634 * @param {number} baseStartTime
12635 * Relative reference timestamp used when adjusting frame timestamps for rollover.
12636 * This value should be in seconds, as it's converted to a 90khz clock within the
12637 * function body.
12638 * @return {Object} The start time of the current segment in "media time" as well as
12639 * whether it contains video and/or audio
12640 */
12641 ;
12642
12643 _proto.probeTs = function probeTs(_ref3) {
12644 var data = _ref3.data,
12645 baseStartTime = _ref3.baseStartTime;
12646 var tsStartTime = typeof baseStartTime === 'number' && !isNaN(baseStartTime) ? baseStartTime * clock.ONE_SECOND_IN_TS : void 0;
12647 var timeInfo = tsInspector.inspect(data, tsStartTime);
12648 var result = null;
12649
12650 if (timeInfo) {
12651 result = {
12652 // each type's time info comes back as an array of 2 times, start and end
12653 hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
12654 hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
12655 };
12656
12657 if (result.hasVideo) {
12658 result.videoStart = timeInfo.video[0].ptsTime;
12659 }
12660
12661 if (result.hasAudio) {
12662 result.audioStart = timeInfo.audio[0].ptsTime;
12663 }
12664 }
12665
12666 this.self.postMessage({
12667 action: 'probeTs',
12668 result: result,
12669 data: data
12670 }, [data.buffer]);
12671 };
12672
12673 _proto.clearAllMp4Captions = function clearAllMp4Captions() {
12674 if (this.captionParser) {
12675 this.captionParser.clearAllCaptions();
12676 }
12677 };
12678
12679 _proto.clearParsedMp4Captions = function clearParsedMp4Captions() {
12680 if (this.captionParser) {
12681 this.captionParser.clearParsedCaptions();
12682 }
12683 }
12684 /**
12685 * Adds data (a ts segment) to the start of the transmuxer pipeline for
12686 * processing.
12687 *
12688 * @param {ArrayBuffer} data data to push into the muxer
12689 */
12690 ;
12691
12692 _proto.push = function push(data) {
12693 // Cast array buffer to correct type for transmuxer
12694 var segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
12695 this.transmuxer.push(segment);
12696 }
12697 /**
12698 * Recreate the transmuxer so that the next segment added via `push`
12699 * start with a fresh transmuxer.
12700 */
12701 ;
12702
12703 _proto.reset = function reset() {
12704 this.transmuxer.reset();
12705 }
12706 /**
12707 * Set the value that will be used as the `baseMediaDecodeTime` time for the
12708 * next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
12709 * set relative to the first based on the PTS values.
12710 *
12711 * @param {Object} data used to set the timestamp offset in the muxer
12712 */
12713 ;
12714
12715 _proto.setTimestampOffset = function setTimestampOffset(data) {
12716 var timestampOffset = data.timestampOffset || 0;
12717 this.transmuxer.setBaseMediaDecodeTime(Math.round(clock.secondsToVideoTs(timestampOffset)));
12718 };
12719
12720 _proto.setAudioAppendStart = function setAudioAppendStart(data) {
12721 this.transmuxer.setAudioAppendStart(Math.ceil(clock.secondsToVideoTs(data.appendStart)));
12722 };
12723
12724 _proto.setRemux = function setRemux(data) {
12725 this.transmuxer.setRemux(data.remux);
12726 }
12727 /**
12728 * Forces the pipeline to finish processing the last segment and emit it's
12729 * results.
12730 *
12731 * @param {Object} data event data, not really used
12732 */
12733 ;
12734
12735 _proto.flush = function flush(data) {
12736 this.transmuxer.flush(); // transmuxed done action is fired after both audio/video pipelines are flushed
12737
12738 self.postMessage({
12739 action: 'done',
12740 type: 'transmuxed'
12741 });
12742 };
12743
12744 _proto.endTimeline = function endTimeline() {
12745 this.transmuxer.endTimeline(); // transmuxed endedtimeline action is fired after both audio/video pipelines end their
12746 // timelines
12747
12748 self.postMessage({
12749 action: 'endedtimeline',
12750 type: 'transmuxed'
12751 });
12752 };
12753
12754 _proto.alignGopsWith = function alignGopsWith(data) {
12755 this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
12756 };
12757
12758 return MessageHandlers;
12759 }();
12760 /**
12761 * Our web worker interface so that things can talk to mux.js
12762 * that will be running in a web worker. the scope is passed to this by
12763 * webworkify.
12764 *
12765 * @param {Object} self the scope for the web worker
12766 */
12767
12768
12769 self.onmessage = function (event) {
12770 if (event.data.action === 'init' && event.data.options) {
12771 this.messageHandlers = new MessageHandlers(self, event.data.options);
12772 return;
12773 }
12774
12775 if (!this.messageHandlers) {
12776 this.messageHandlers = new MessageHandlers(self);
12777 }
12778
12779 if (event.data && event.data.action && event.data.action !== 'init') {
12780 if (this.messageHandlers[event.data.action]) {
12781 this.messageHandlers[event.data.action](event.data);
12782 }
12783 }
12784 };
12785}));
12786var TransmuxWorker = factory(workerCode$1);
12787/* rollup-plugin-worker-factory end for worker!/Users/bcasey/Projects/videojs-http-streaming/src/transmuxer-worker.js */
12788
12789var handleData_ = function handleData_(event, transmuxedData, callback) {
12790 var _event$data$segment = event.data.segment,
12791 type = _event$data$segment.type,
12792 initSegment = _event$data$segment.initSegment,
12793 captions = _event$data$segment.captions,
12794 captionStreams = _event$data$segment.captionStreams,
12795 metadata = _event$data$segment.metadata,
12796 videoFrameDtsTime = _event$data$segment.videoFrameDtsTime,
12797 videoFramePtsTime = _event$data$segment.videoFramePtsTime;
12798 transmuxedData.buffer.push({
12799 captions: captions,
12800 captionStreams: captionStreams,
12801 metadata: metadata
12802 });
12803 var boxes = event.data.segment.boxes || {
12804 data: event.data.segment.data
12805 };
12806 var result = {
12807 type: type,
12808 // cast ArrayBuffer to TypedArray
12809 data: new Uint8Array(boxes.data, boxes.data.byteOffset, boxes.data.byteLength),
12810 initSegment: new Uint8Array(initSegment.data, initSegment.byteOffset, initSegment.byteLength)
12811 };
12812
12813 if (typeof videoFrameDtsTime !== 'undefined') {
12814 result.videoFrameDtsTime = videoFrameDtsTime;
12815 }
12816
12817 if (typeof videoFramePtsTime !== 'undefined') {
12818 result.videoFramePtsTime = videoFramePtsTime;
12819 }
12820
12821 callback(result);
12822};
12823var handleDone_ = function handleDone_(_ref) {
12824 var transmuxedData = _ref.transmuxedData,
12825 callback = _ref.callback;
12826 // Previously we only returned data on data events,
12827 // not on done events. Clear out the buffer to keep that consistent.
12828 transmuxedData.buffer = []; // all buffers should have been flushed from the muxer, so start processing anything we
12829 // have received
12830
12831 callback(transmuxedData);
12832};
12833var handleGopInfo_ = function handleGopInfo_(event, transmuxedData) {
12834 transmuxedData.gopInfo = event.data.gopInfo;
12835};
12836var processTransmux = function processTransmux(options) {
12837 var transmuxer = options.transmuxer,
12838 bytes = options.bytes,
12839 audioAppendStart = options.audioAppendStart,
12840 gopsToAlignWith = options.gopsToAlignWith,
12841 remux = options.remux,
12842 onData = options.onData,
12843 onTrackInfo = options.onTrackInfo,
12844 onAudioTimingInfo = options.onAudioTimingInfo,
12845 onVideoTimingInfo = options.onVideoTimingInfo,
12846 onVideoSegmentTimingInfo = options.onVideoSegmentTimingInfo,
12847 onAudioSegmentTimingInfo = options.onAudioSegmentTimingInfo,
12848 onId3 = options.onId3,
12849 onCaptions = options.onCaptions,
12850 onDone = options.onDone,
12851 onEndedTimeline = options.onEndedTimeline,
12852 onTransmuxerLog = options.onTransmuxerLog,
12853 isEndOfTimeline = options.isEndOfTimeline;
12854 var transmuxedData = {
12855 buffer: []
12856 };
12857 var waitForEndedTimelineEvent = isEndOfTimeline;
12858
12859 var handleMessage = function handleMessage(event) {
12860 if (transmuxer.currentTransmux !== options) {
12861 // disposed
12862 return;
12863 }
12864
12865 if (event.data.action === 'data') {
12866 handleData_(event, transmuxedData, onData);
12867 }
12868
12869 if (event.data.action === 'trackinfo') {
12870 onTrackInfo(event.data.trackInfo);
12871 }
12872
12873 if (event.data.action === 'gopInfo') {
12874 handleGopInfo_(event, transmuxedData);
12875 }
12876
12877 if (event.data.action === 'audioTimingInfo') {
12878 onAudioTimingInfo(event.data.audioTimingInfo);
12879 }
12880
12881 if (event.data.action === 'videoTimingInfo') {
12882 onVideoTimingInfo(event.data.videoTimingInfo);
12883 }
12884
12885 if (event.data.action === 'videoSegmentTimingInfo') {
12886 onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
12887 }
12888
12889 if (event.data.action === 'audioSegmentTimingInfo') {
12890 onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
12891 }
12892
12893 if (event.data.action === 'id3Frame') {
12894 onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
12895 }
12896
12897 if (event.data.action === 'caption') {
12898 onCaptions(event.data.caption);
12899 }
12900
12901 if (event.data.action === 'endedtimeline') {
12902 waitForEndedTimelineEvent = false;
12903 onEndedTimeline();
12904 }
12905
12906 if (event.data.action === 'log') {
12907 onTransmuxerLog(event.data.log);
12908 } // wait for the transmuxed event since we may have audio and video
12909
12910
12911 if (event.data.type !== 'transmuxed') {
12912 return;
12913 } // If the "endedtimeline" event has not yet fired, and this segment represents the end
12914 // of a timeline, that means there may still be data events before the segment
12915 // processing can be considerred complete. In that case, the final event should be
12916 // an "endedtimeline" event with the type "transmuxed."
12917
12918
12919 if (waitForEndedTimelineEvent) {
12920 return;
12921 }
12922
12923 transmuxer.onmessage = null;
12924 handleDone_({
12925 transmuxedData: transmuxedData,
12926 callback: onDone
12927 });
12928 /* eslint-disable no-use-before-define */
12929
12930 dequeue(transmuxer);
12931 /* eslint-enable */
12932 };
12933
12934 transmuxer.onmessage = handleMessage;
12935
12936 if (audioAppendStart) {
12937 transmuxer.postMessage({
12938 action: 'setAudioAppendStart',
12939 appendStart: audioAppendStart
12940 });
12941 } // allow empty arrays to be passed to clear out GOPs
12942
12943
12944 if (Array.isArray(gopsToAlignWith)) {
12945 transmuxer.postMessage({
12946 action: 'alignGopsWith',
12947 gopsToAlignWith: gopsToAlignWith
12948 });
12949 }
12950
12951 if (typeof remux !== 'undefined') {
12952 transmuxer.postMessage({
12953 action: 'setRemux',
12954 remux: remux
12955 });
12956 }
12957
12958 if (bytes.byteLength) {
12959 var buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
12960 var byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
12961 transmuxer.postMessage({
12962 action: 'push',
12963 // Send the typed-array of data as an ArrayBuffer so that
12964 // it can be sent as a "Transferable" and avoid the costly
12965 // memory copy
12966 data: buffer,
12967 // To recreate the original typed-array, we need information
12968 // about what portion of the ArrayBuffer it was a view into
12969 byteOffset: byteOffset,
12970 byteLength: bytes.byteLength
12971 }, [buffer]);
12972 }
12973
12974 if (isEndOfTimeline) {
12975 transmuxer.postMessage({
12976 action: 'endTimeline'
12977 });
12978 } // even if we didn't push any bytes, we have to make sure we flush in case we reached
12979 // the end of the segment
12980
12981
12982 transmuxer.postMessage({
12983 action: 'flush'
12984 });
12985};
12986var dequeue = function dequeue(transmuxer) {
12987 transmuxer.currentTransmux = null;
12988
12989 if (transmuxer.transmuxQueue.length) {
12990 transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
12991
12992 if (typeof transmuxer.currentTransmux === 'function') {
12993 transmuxer.currentTransmux();
12994 } else {
12995 processTransmux(transmuxer.currentTransmux);
12996 }
12997 }
12998};
12999var processAction = function processAction(transmuxer, action) {
13000 transmuxer.postMessage({
13001 action: action
13002 });
13003 dequeue(transmuxer);
13004};
13005var enqueueAction = function enqueueAction(action, transmuxer) {
13006 if (!transmuxer.currentTransmux) {
13007 transmuxer.currentTransmux = action;
13008 processAction(transmuxer, action);
13009 return;
13010 }
13011
13012 transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
13013};
13014var reset = function reset(transmuxer) {
13015 enqueueAction('reset', transmuxer);
13016};
13017var endTimeline = function endTimeline(transmuxer) {
13018 enqueueAction('endTimeline', transmuxer);
13019};
13020var transmux = function transmux(options) {
13021 if (!options.transmuxer.currentTransmux) {
13022 options.transmuxer.currentTransmux = options;
13023 processTransmux(options);
13024 return;
13025 }
13026
13027 options.transmuxer.transmuxQueue.push(options);
13028};
13029var createTransmuxer = function createTransmuxer(options) {
13030 var transmuxer = new TransmuxWorker();
13031 transmuxer.currentTransmux = null;
13032 transmuxer.transmuxQueue = [];
13033 var term = transmuxer.terminate;
13034
13035 transmuxer.terminate = function () {
13036 transmuxer.currentTransmux = null;
13037 transmuxer.transmuxQueue.length = 0;
13038 return term.call(transmuxer);
13039 };
13040
13041 transmuxer.postMessage({
13042 action: 'init',
13043 options: options
13044 });
13045 return transmuxer;
13046};
13047var segmentTransmuxer = {
13048 reset: reset,
13049 endTimeline: endTimeline,
13050 transmux: transmux,
13051 createTransmuxer: createTransmuxer
13052};
13053
13054var workerCallback = function workerCallback(options) {
13055 var transmuxer = options.transmuxer;
13056 var endAction = options.endAction || options.action;
13057 var callback = options.callback;
13058
13059 var message = _extends__default["default"]({}, options, {
13060 endAction: null,
13061 transmuxer: null,
13062 callback: null
13063 });
13064
13065 var listenForEndEvent = function listenForEndEvent(event) {
13066 if (event.data.action !== endAction) {
13067 return;
13068 }
13069
13070 transmuxer.removeEventListener('message', listenForEndEvent); // transfer ownership of bytes back to us.
13071
13072 if (event.data.data) {
13073 event.data.data = new Uint8Array(event.data.data, options.byteOffset || 0, options.byteLength || event.data.data.byteLength);
13074
13075 if (options.data) {
13076 options.data = event.data.data;
13077 }
13078 }
13079
13080 callback(event.data);
13081 };
13082
13083 transmuxer.addEventListener('message', listenForEndEvent);
13084
13085 if (options.data) {
13086 var isArrayBuffer = options.data instanceof ArrayBuffer;
13087 message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
13088 message.byteLength = options.data.byteLength;
13089 var transfers = [isArrayBuffer ? options.data : options.data.buffer];
13090 transmuxer.postMessage(message, transfers);
13091 } else {
13092 transmuxer.postMessage(message);
13093 }
13094};
13095
13096var REQUEST_ERRORS = {
13097 FAILURE: 2,
13098 TIMEOUT: -101,
13099 ABORTED: -102
13100};
13101/**
13102 * Abort all requests
13103 *
13104 * @param {Object} activeXhrs - an object that tracks all XHR requests
13105 */
13106
13107var abortAll = function abortAll(activeXhrs) {
13108 activeXhrs.forEach(function (xhr) {
13109 xhr.abort();
13110 });
13111};
13112/**
13113 * Gather important bandwidth stats once a request has completed
13114 *
13115 * @param {Object} request - the XHR request from which to gather stats
13116 */
13117
13118
13119var getRequestStats = function getRequestStats(request) {
13120 return {
13121 bandwidth: request.bandwidth,
13122 bytesReceived: request.bytesReceived || 0,
13123 roundTripTime: request.roundTripTime || 0
13124 };
13125};
13126/**
13127 * If possible gather bandwidth stats as a request is in
13128 * progress
13129 *
13130 * @param {Event} progressEvent - an event object from an XHR's progress event
13131 */
13132
13133
13134var getProgressStats = function getProgressStats(progressEvent) {
13135 var request = progressEvent.target;
13136 var roundTripTime = Date.now() - request.requestTime;
13137 var stats = {
13138 bandwidth: Infinity,
13139 bytesReceived: 0,
13140 roundTripTime: roundTripTime || 0
13141 };
13142 stats.bytesReceived = progressEvent.loaded; // This can result in Infinity if stats.roundTripTime is 0 but that is ok
13143 // because we should only use bandwidth stats on progress to determine when
13144 // abort a request early due to insufficient bandwidth
13145
13146 stats.bandwidth = Math.floor(stats.bytesReceived / stats.roundTripTime * 8 * 1000);
13147 return stats;
13148};
13149/**
13150 * Handle all error conditions in one place and return an object
13151 * with all the information
13152 *
13153 * @param {Error|null} error - if non-null signals an error occured with the XHR
13154 * @param {Object} request - the XHR request that possibly generated the error
13155 */
13156
13157
13158var handleErrors = function handleErrors(error, request) {
13159 if (request.timedout) {
13160 return {
13161 status: request.status,
13162 message: 'HLS request timed-out at URL: ' + request.uri,
13163 code: REQUEST_ERRORS.TIMEOUT,
13164 xhr: request
13165 };
13166 }
13167
13168 if (request.aborted) {
13169 return {
13170 status: request.status,
13171 message: 'HLS request aborted at URL: ' + request.uri,
13172 code: REQUEST_ERRORS.ABORTED,
13173 xhr: request
13174 };
13175 }
13176
13177 if (error) {
13178 return {
13179 status: request.status,
13180 message: 'HLS request errored at URL: ' + request.uri,
13181 code: REQUEST_ERRORS.FAILURE,
13182 xhr: request
13183 };
13184 }
13185
13186 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
13187 return {
13188 status: request.status,
13189 message: 'Empty HLS response at URL: ' + request.uri,
13190 code: REQUEST_ERRORS.FAILURE,
13191 xhr: request
13192 };
13193 }
13194
13195 return null;
13196};
13197/**
13198 * Handle responses for key data and convert the key data to the correct format
13199 * for the decryption step later
13200 *
13201 * @param {Object} segment - a simplified copy of the segmentInfo object
13202 * from SegmentLoader
13203 * @param {Array} objects - objects to add the key bytes to.
13204 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13205 * this request
13206 */
13207
13208
13209var handleKeyResponse = function handleKeyResponse(segment, objects, finishProcessingFn) {
13210 return function (error, request) {
13211 var response = request.response;
13212 var errorObj = handleErrors(error, request);
13213
13214 if (errorObj) {
13215 return finishProcessingFn(errorObj, segment);
13216 }
13217
13218 if (response.byteLength !== 16) {
13219 return finishProcessingFn({
13220 status: request.status,
13221 message: 'Invalid HLS key at URL: ' + request.uri,
13222 code: REQUEST_ERRORS.FAILURE,
13223 xhr: request
13224 }, segment);
13225 }
13226
13227 var view = new DataView(response);
13228 var bytes = new Uint32Array([view.getUint32(0), view.getUint32(4), view.getUint32(8), view.getUint32(12)]);
13229
13230 for (var i = 0; i < objects.length; i++) {
13231 objects[i].bytes = bytes;
13232 }
13233
13234 return finishProcessingFn(null, segment);
13235 };
13236};
13237
13238var parseInitSegment = function parseInitSegment(segment, _callback) {
13239 var type = containers.detectContainerForBytes(segment.map.bytes); // TODO: We should also handle ts init segments here, but we
13240 // only know how to parse mp4 init segments at the moment
13241
13242 if (type !== 'mp4') {
13243 var uri = segment.map.resolvedUri || segment.map.uri;
13244 return _callback({
13245 internal: true,
13246 message: "Found unsupported " + (type || 'unknown') + " container for initialization segment at URL: " + uri,
13247 code: REQUEST_ERRORS.FAILURE
13248 });
13249 }
13250
13251 workerCallback({
13252 action: 'probeMp4Tracks',
13253 data: segment.map.bytes,
13254 transmuxer: segment.transmuxer,
13255 callback: function callback(_ref) {
13256 var tracks = _ref.tracks,
13257 data = _ref.data;
13258 // transfer bytes back to us
13259 segment.map.bytes = data;
13260 tracks.forEach(function (track) {
13261 segment.map.tracks = segment.map.tracks || {}; // only support one track of each type for now
13262
13263 if (segment.map.tracks[track.type]) {
13264 return;
13265 }
13266
13267 segment.map.tracks[track.type] = track;
13268
13269 if (typeof track.id === 'number' && track.timescale) {
13270 segment.map.timescales = segment.map.timescales || {};
13271 segment.map.timescales[track.id] = track.timescale;
13272 }
13273 });
13274 return _callback(null);
13275 }
13276 });
13277};
13278/**
13279 * Handle init-segment responses
13280 *
13281 * @param {Object} segment - a simplified copy of the segmentInfo object
13282 * from SegmentLoader
13283 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13284 * this request
13285 */
13286
13287
13288var handleInitSegmentResponse = function handleInitSegmentResponse(_ref2) {
13289 var segment = _ref2.segment,
13290 finishProcessingFn = _ref2.finishProcessingFn;
13291 return function (error, request) {
13292 var errorObj = handleErrors(error, request);
13293
13294 if (errorObj) {
13295 return finishProcessingFn(errorObj, segment);
13296 }
13297
13298 var bytes = new Uint8Array(request.response); // init segment is encypted, we will have to wait
13299 // until the key request is done to decrypt.
13300
13301 if (segment.map.key) {
13302 segment.map.encryptedBytes = bytes;
13303 return finishProcessingFn(null, segment);
13304 }
13305
13306 segment.map.bytes = bytes;
13307 parseInitSegment(segment, function (parseError) {
13308 if (parseError) {
13309 parseError.xhr = request;
13310 parseError.status = request.status;
13311 return finishProcessingFn(parseError, segment);
13312 }
13313
13314 finishProcessingFn(null, segment);
13315 });
13316 };
13317};
13318/**
13319 * Response handler for segment-requests being sure to set the correct
13320 * property depending on whether the segment is encryped or not
13321 * Also records and keeps track of stats that are used for ABR purposes
13322 *
13323 * @param {Object} segment - a simplified copy of the segmentInfo object
13324 * from SegmentLoader
13325 * @param {Function} finishProcessingFn - a callback to execute to continue processing
13326 * this request
13327 */
13328
13329
13330var handleSegmentResponse = function handleSegmentResponse(_ref3) {
13331 var segment = _ref3.segment,
13332 finishProcessingFn = _ref3.finishProcessingFn,
13333 responseType = _ref3.responseType;
13334 return function (error, request) {
13335 var errorObj = handleErrors(error, request);
13336
13337 if (errorObj) {
13338 return finishProcessingFn(errorObj, segment);
13339 }
13340
13341 var newBytes = // although responseText "should" exist, this guard serves to prevent an error being
13342 // thrown for two primary cases:
13343 // 1. the mime type override stops working, or is not implemented for a specific
13344 // browser
13345 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
13346 responseType === 'arraybuffer' || !request.responseText ? request.response : stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
13347 segment.stats = getRequestStats(request);
13348
13349 if (segment.key) {
13350 segment.encryptedBytes = new Uint8Array(newBytes);
13351 } else {
13352 segment.bytes = new Uint8Array(newBytes);
13353 }
13354
13355 return finishProcessingFn(null, segment);
13356 };
13357};
13358
13359var transmuxAndNotify = function transmuxAndNotify(_ref4) {
13360 var segment = _ref4.segment,
13361 bytes = _ref4.bytes,
13362 trackInfoFn = _ref4.trackInfoFn,
13363 timingInfoFn = _ref4.timingInfoFn,
13364 videoSegmentTimingInfoFn = _ref4.videoSegmentTimingInfoFn,
13365 audioSegmentTimingInfoFn = _ref4.audioSegmentTimingInfoFn,
13366 id3Fn = _ref4.id3Fn,
13367 captionsFn = _ref4.captionsFn,
13368 isEndOfTimeline = _ref4.isEndOfTimeline,
13369 endedTimelineFn = _ref4.endedTimelineFn,
13370 dataFn = _ref4.dataFn,
13371 doneFn = _ref4.doneFn,
13372 onTransmuxerLog = _ref4.onTransmuxerLog;
13373 var fmp4Tracks = segment.map && segment.map.tracks || {};
13374 var isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video); // Keep references to each function so we can null them out after we're done with them.
13375 // One reason for this is that in the case of full segments, we want to trust start
13376 // times from the probe, rather than the transmuxer.
13377
13378 var audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
13379 var audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
13380 var videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
13381 var videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
13382
13383 var finish = function finish() {
13384 return transmux({
13385 bytes: bytes,
13386 transmuxer: segment.transmuxer,
13387 audioAppendStart: segment.audioAppendStart,
13388 gopsToAlignWith: segment.gopsToAlignWith,
13389 remux: isMuxed,
13390 onData: function onData(result) {
13391 result.type = result.type === 'combined' ? 'video' : result.type;
13392 dataFn(segment, result);
13393 },
13394 onTrackInfo: function onTrackInfo(trackInfo) {
13395 if (trackInfoFn) {
13396 if (isMuxed) {
13397 trackInfo.isMuxed = true;
13398 }
13399
13400 trackInfoFn(segment, trackInfo);
13401 }
13402 },
13403 onAudioTimingInfo: function onAudioTimingInfo(audioTimingInfo) {
13404 // we only want the first start value we encounter
13405 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
13406 audioStartFn(audioTimingInfo.start);
13407 audioStartFn = null;
13408 } // we want to continually update the end time
13409
13410
13411 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
13412 audioEndFn(audioTimingInfo.end);
13413 }
13414 },
13415 onVideoTimingInfo: function onVideoTimingInfo(videoTimingInfo) {
13416 // we only want the first start value we encounter
13417 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
13418 videoStartFn(videoTimingInfo.start);
13419 videoStartFn = null;
13420 } // we want to continually update the end time
13421
13422
13423 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
13424 videoEndFn(videoTimingInfo.end);
13425 }
13426 },
13427 onVideoSegmentTimingInfo: function onVideoSegmentTimingInfo(videoSegmentTimingInfo) {
13428 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
13429 },
13430 onAudioSegmentTimingInfo: function onAudioSegmentTimingInfo(audioSegmentTimingInfo) {
13431 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
13432 },
13433 onId3: function onId3(id3Frames, dispatchType) {
13434 id3Fn(segment, id3Frames, dispatchType);
13435 },
13436 onCaptions: function onCaptions(captions) {
13437 captionsFn(segment, [captions]);
13438 },
13439 isEndOfTimeline: isEndOfTimeline,
13440 onEndedTimeline: function onEndedTimeline() {
13441 endedTimelineFn();
13442 },
13443 onTransmuxerLog: onTransmuxerLog,
13444 onDone: function onDone(result) {
13445 if (!doneFn) {
13446 return;
13447 }
13448
13449 result.type = result.type === 'combined' ? 'video' : result.type;
13450 doneFn(null, segment, result);
13451 }
13452 });
13453 }; // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
13454 // Meaning cached frame data may corrupt our notion of where this segment
13455 // really starts. To get around this, probe for the info needed.
13456
13457
13458 workerCallback({
13459 action: 'probeTs',
13460 transmuxer: segment.transmuxer,
13461 data: bytes,
13462 baseStartTime: segment.baseStartTime,
13463 callback: function callback(data) {
13464 segment.bytes = bytes = data.data;
13465 var probeResult = data.result;
13466
13467 if (probeResult) {
13468 trackInfoFn(segment, {
13469 hasAudio: probeResult.hasAudio,
13470 hasVideo: probeResult.hasVideo,
13471 isMuxed: isMuxed
13472 });
13473 trackInfoFn = null;
13474
13475 if (probeResult.hasAudio && !isMuxed) {
13476 audioStartFn(probeResult.audioStart);
13477 }
13478
13479 if (probeResult.hasVideo) {
13480 videoStartFn(probeResult.videoStart);
13481 }
13482
13483 audioStartFn = null;
13484 videoStartFn = null;
13485 }
13486
13487 finish();
13488 }
13489 });
13490};
13491
13492var handleSegmentBytes = function handleSegmentBytes(_ref5) {
13493 var segment = _ref5.segment,
13494 bytes = _ref5.bytes,
13495 trackInfoFn = _ref5.trackInfoFn,
13496 timingInfoFn = _ref5.timingInfoFn,
13497 videoSegmentTimingInfoFn = _ref5.videoSegmentTimingInfoFn,
13498 audioSegmentTimingInfoFn = _ref5.audioSegmentTimingInfoFn,
13499 id3Fn = _ref5.id3Fn,
13500 captionsFn = _ref5.captionsFn,
13501 isEndOfTimeline = _ref5.isEndOfTimeline,
13502 endedTimelineFn = _ref5.endedTimelineFn,
13503 dataFn = _ref5.dataFn,
13504 doneFn = _ref5.doneFn,
13505 onTransmuxerLog = _ref5.onTransmuxerLog;
13506 var bytesAsUint8Array = new Uint8Array(bytes); // TODO:
13507 // We should have a handler that fetches the number of bytes required
13508 // to check if something is fmp4. This will allow us to save bandwidth
13509 // because we can only blacklist a playlist and abort requests
13510 // by codec after trackinfo triggers.
13511
13512 if (containers.isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
13513 segment.isFmp4 = true;
13514 var tracks = segment.map.tracks;
13515 var trackInfo = {
13516 isFmp4: true,
13517 hasVideo: !!tracks.video,
13518 hasAudio: !!tracks.audio
13519 }; // if we have a audio track, with a codec that is not set to
13520 // encrypted audio
13521
13522 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
13523 trackInfo.audioCodec = tracks.audio.codec;
13524 } // if we have a video track, with a codec that is not set to
13525 // encrypted video
13526
13527
13528 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
13529 trackInfo.videoCodec = tracks.video.codec;
13530 }
13531
13532 if (tracks.video && tracks.audio) {
13533 trackInfo.isMuxed = true;
13534 } // since we don't support appending fmp4 data on progress, we know we have the full
13535 // segment here
13536
13537
13538 trackInfoFn(segment, trackInfo); // The probe doesn't provide the segment end time, so only callback with the start
13539 // time. The end time can be roughly calculated by the receiver using the duration.
13540 //
13541 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
13542 // that is the true start of the segment (where the playback engine should begin
13543 // decoding).
13544
13545 var finishLoading = function finishLoading(captions) {
13546 // if the track still has audio at this point it is only possible
13547 // for it to be audio only. See `tracks.video && tracks.audio` if statement
13548 // above.
13549 // we make sure to use segment.bytes here as that
13550 dataFn(segment, {
13551 data: bytesAsUint8Array,
13552 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
13553 });
13554
13555 if (captions && captions.length) {
13556 captionsFn(segment, captions);
13557 }
13558
13559 doneFn(null, segment, {});
13560 };
13561
13562 workerCallback({
13563 action: 'probeMp4StartTime',
13564 timescales: segment.map.timescales,
13565 data: bytesAsUint8Array,
13566 transmuxer: segment.transmuxer,
13567 callback: function callback(_ref6) {
13568 var data = _ref6.data,
13569 startTime = _ref6.startTime;
13570 // transfer bytes back to us
13571 bytes = data.buffer;
13572 segment.bytes = bytesAsUint8Array = data;
13573
13574 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
13575 timingInfoFn(segment, 'audio', 'start', startTime);
13576 }
13577
13578 if (trackInfo.hasVideo) {
13579 timingInfoFn(segment, 'video', 'start', startTime);
13580 } // Run through the CaptionParser in case there are captions.
13581 // Initialize CaptionParser if it hasn't been yet
13582
13583
13584 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
13585 finishLoading();
13586 return;
13587 }
13588
13589 workerCallback({
13590 action: 'pushMp4Captions',
13591 endAction: 'mp4Captions',
13592 transmuxer: segment.transmuxer,
13593 data: bytesAsUint8Array,
13594 timescales: segment.map.timescales,
13595 trackIds: [tracks.video.id],
13596 callback: function callback(message) {
13597 // transfer bytes back to us
13598 bytes = message.data.buffer;
13599 segment.bytes = bytesAsUint8Array = message.data;
13600 message.logs.forEach(function (log) {
13601 onTransmuxerLog(videojs__default["default"].mergeOptions(log, {
13602 stream: 'mp4CaptionParser'
13603 }));
13604 });
13605 finishLoading(message.captions);
13606 }
13607 });
13608 }
13609 });
13610 return;
13611 } // VTT or other segments that don't need processing
13612
13613
13614 if (!segment.transmuxer) {
13615 doneFn(null, segment, {});
13616 return;
13617 }
13618
13619 if (typeof segment.container === 'undefined') {
13620 segment.container = containers.detectContainerForBytes(bytesAsUint8Array);
13621 }
13622
13623 if (segment.container !== 'ts' && segment.container !== 'aac') {
13624 trackInfoFn(segment, {
13625 hasAudio: false,
13626 hasVideo: false
13627 });
13628 doneFn(null, segment, {});
13629 return;
13630 } // ts or aac
13631
13632
13633 transmuxAndNotify({
13634 segment: segment,
13635 bytes: bytes,
13636 trackInfoFn: trackInfoFn,
13637 timingInfoFn: timingInfoFn,
13638 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13639 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13640 id3Fn: id3Fn,
13641 captionsFn: captionsFn,
13642 isEndOfTimeline: isEndOfTimeline,
13643 endedTimelineFn: endedTimelineFn,
13644 dataFn: dataFn,
13645 doneFn: doneFn,
13646 onTransmuxerLog: onTransmuxerLog
13647 });
13648};
13649
13650var decrypt = function decrypt(_ref7, callback) {
13651 var id = _ref7.id,
13652 key = _ref7.key,
13653 encryptedBytes = _ref7.encryptedBytes,
13654 decryptionWorker = _ref7.decryptionWorker;
13655
13656 var decryptionHandler = function decryptionHandler(event) {
13657 if (event.data.source === id) {
13658 decryptionWorker.removeEventListener('message', decryptionHandler);
13659 var decrypted = event.data.decrypted;
13660 callback(new Uint8Array(decrypted.bytes, decrypted.byteOffset, decrypted.byteLength));
13661 }
13662 };
13663
13664 decryptionWorker.addEventListener('message', decryptionHandler);
13665 var keyBytes;
13666
13667 if (key.bytes.slice) {
13668 keyBytes = key.bytes.slice();
13669 } else {
13670 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
13671 } // incrementally decrypt the bytes
13672
13673
13674 decryptionWorker.postMessage(createTransferableMessage({
13675 source: id,
13676 encrypted: encryptedBytes,
13677 key: keyBytes,
13678 iv: key.iv
13679 }), [encryptedBytes.buffer, keyBytes.buffer]);
13680};
13681/**
13682 * Decrypt the segment via the decryption web worker
13683 *
13684 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13685 * routines
13686 * @param {Object} segment - a simplified copy of the segmentInfo object
13687 * from SegmentLoader
13688 * @param {Function} trackInfoFn - a callback that receives track info
13689 * @param {Function} timingInfoFn - a callback that receives timing info
13690 * @param {Function} videoSegmentTimingInfoFn
13691 * a callback that receives video timing info based on media times and
13692 * any adjustments made by the transmuxer
13693 * @param {Function} audioSegmentTimingInfoFn
13694 * a callback that receives audio timing info based on media times and
13695 * any adjustments made by the transmuxer
13696 * @param {boolean} isEndOfTimeline
13697 * true if this segment represents the last segment in a timeline
13698 * @param {Function} endedTimelineFn
13699 * a callback made when a timeline is ended, will only be called if
13700 * isEndOfTimeline is true
13701 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13702 * and ready to use
13703 * @param {Function} doneFn - a callback that is executed after decryption has completed
13704 */
13705
13706
13707var decryptSegment = function decryptSegment(_ref8) {
13708 var decryptionWorker = _ref8.decryptionWorker,
13709 segment = _ref8.segment,
13710 trackInfoFn = _ref8.trackInfoFn,
13711 timingInfoFn = _ref8.timingInfoFn,
13712 videoSegmentTimingInfoFn = _ref8.videoSegmentTimingInfoFn,
13713 audioSegmentTimingInfoFn = _ref8.audioSegmentTimingInfoFn,
13714 id3Fn = _ref8.id3Fn,
13715 captionsFn = _ref8.captionsFn,
13716 isEndOfTimeline = _ref8.isEndOfTimeline,
13717 endedTimelineFn = _ref8.endedTimelineFn,
13718 dataFn = _ref8.dataFn,
13719 doneFn = _ref8.doneFn,
13720 onTransmuxerLog = _ref8.onTransmuxerLog;
13721 decrypt({
13722 id: segment.requestId,
13723 key: segment.key,
13724 encryptedBytes: segment.encryptedBytes,
13725 decryptionWorker: decryptionWorker
13726 }, function (decryptedBytes) {
13727 segment.bytes = decryptedBytes;
13728 handleSegmentBytes({
13729 segment: segment,
13730 bytes: segment.bytes,
13731 trackInfoFn: trackInfoFn,
13732 timingInfoFn: timingInfoFn,
13733 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13734 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13735 id3Fn: id3Fn,
13736 captionsFn: captionsFn,
13737 isEndOfTimeline: isEndOfTimeline,
13738 endedTimelineFn: endedTimelineFn,
13739 dataFn: dataFn,
13740 doneFn: doneFn,
13741 onTransmuxerLog: onTransmuxerLog
13742 });
13743 });
13744};
13745/**
13746 * This function waits for all XHRs to finish (with either success or failure)
13747 * before continueing processing via it's callback. The function gathers errors
13748 * from each request into a single errors array so that the error status for
13749 * each request can be examined later.
13750 *
13751 * @param {Object} activeXhrs - an object that tracks all XHR requests
13752 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
13753 * routines
13754 * @param {Function} trackInfoFn - a callback that receives track info
13755 * @param {Function} timingInfoFn - a callback that receives timing info
13756 * @param {Function} videoSegmentTimingInfoFn
13757 * a callback that receives video timing info based on media times and
13758 * any adjustments made by the transmuxer
13759 * @param {Function} audioSegmentTimingInfoFn
13760 * a callback that receives audio timing info based on media times and
13761 * any adjustments made by the transmuxer
13762 * @param {Function} id3Fn - a callback that receives ID3 metadata
13763 * @param {Function} captionsFn - a callback that receives captions
13764 * @param {boolean} isEndOfTimeline
13765 * true if this segment represents the last segment in a timeline
13766 * @param {Function} endedTimelineFn
13767 * a callback made when a timeline is ended, will only be called if
13768 * isEndOfTimeline is true
13769 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13770 * and ready to use
13771 * @param {Function} doneFn - a callback that is executed after all resources have been
13772 * downloaded and any decryption completed
13773 */
13774
13775
13776var waitForCompletion = function waitForCompletion(_ref9) {
13777 var activeXhrs = _ref9.activeXhrs,
13778 decryptionWorker = _ref9.decryptionWorker,
13779 trackInfoFn = _ref9.trackInfoFn,
13780 timingInfoFn = _ref9.timingInfoFn,
13781 videoSegmentTimingInfoFn = _ref9.videoSegmentTimingInfoFn,
13782 audioSegmentTimingInfoFn = _ref9.audioSegmentTimingInfoFn,
13783 id3Fn = _ref9.id3Fn,
13784 captionsFn = _ref9.captionsFn,
13785 isEndOfTimeline = _ref9.isEndOfTimeline,
13786 endedTimelineFn = _ref9.endedTimelineFn,
13787 dataFn = _ref9.dataFn,
13788 doneFn = _ref9.doneFn,
13789 onTransmuxerLog = _ref9.onTransmuxerLog;
13790 var count = 0;
13791 var didError = false;
13792 return function (error, segment) {
13793 if (didError) {
13794 return;
13795 }
13796
13797 if (error) {
13798 didError = true; // If there are errors, we have to abort any outstanding requests
13799
13800 abortAll(activeXhrs); // Even though the requests above are aborted, and in theory we could wait until we
13801 // handle the aborted events from those requests, there are some cases where we may
13802 // never get an aborted event. For instance, if the network connection is lost and
13803 // there were two requests, the first may have triggered an error immediately, while
13804 // the second request remains unsent. In that case, the aborted algorithm will not
13805 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
13806 //
13807 // We also can't rely on the ready state of the XHR, since the request that
13808 // triggered the connection error may also show as a ready state of 0 (unsent).
13809 // Therefore, we have to finish this group of requests immediately after the first
13810 // seen error.
13811
13812 return doneFn(error, segment);
13813 }
13814
13815 count += 1;
13816
13817 if (count === activeXhrs.length) {
13818 var segmentFinish = function segmentFinish() {
13819 if (segment.encryptedBytes) {
13820 return decryptSegment({
13821 decryptionWorker: decryptionWorker,
13822 segment: segment,
13823 trackInfoFn: trackInfoFn,
13824 timingInfoFn: timingInfoFn,
13825 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13826 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13827 id3Fn: id3Fn,
13828 captionsFn: captionsFn,
13829 isEndOfTimeline: isEndOfTimeline,
13830 endedTimelineFn: endedTimelineFn,
13831 dataFn: dataFn,
13832 doneFn: doneFn,
13833 onTransmuxerLog: onTransmuxerLog
13834 });
13835 } // Otherwise, everything is ready just continue
13836
13837
13838 handleSegmentBytes({
13839 segment: segment,
13840 bytes: segment.bytes,
13841 trackInfoFn: trackInfoFn,
13842 timingInfoFn: timingInfoFn,
13843 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
13844 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
13845 id3Fn: id3Fn,
13846 captionsFn: captionsFn,
13847 isEndOfTimeline: isEndOfTimeline,
13848 endedTimelineFn: endedTimelineFn,
13849 dataFn: dataFn,
13850 doneFn: doneFn,
13851 onTransmuxerLog: onTransmuxerLog
13852 });
13853 }; // Keep track of when *all* of the requests have completed
13854
13855
13856 segment.endOfAllRequests = Date.now();
13857
13858 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
13859 return decrypt({
13860 decryptionWorker: decryptionWorker,
13861 // add -init to the "id" to differentiate between segment
13862 // and init segment decryption, just in case they happen
13863 // at the same time at some point in the future.
13864 id: segment.requestId + '-init',
13865 encryptedBytes: segment.map.encryptedBytes,
13866 key: segment.map.key
13867 }, function (decryptedBytes) {
13868 segment.map.bytes = decryptedBytes;
13869 parseInitSegment(segment, function (parseError) {
13870 if (parseError) {
13871 abortAll(activeXhrs);
13872 return doneFn(parseError, segment);
13873 }
13874
13875 segmentFinish();
13876 });
13877 });
13878 }
13879
13880 segmentFinish();
13881 }
13882 };
13883};
13884/**
13885 * Calls the abort callback if any request within the batch was aborted. Will only call
13886 * the callback once per batch of requests, even if multiple were aborted.
13887 *
13888 * @param {Object} loadendState - state to check to see if the abort function was called
13889 * @param {Function} abortFn - callback to call for abort
13890 */
13891
13892
13893var handleLoadEnd = function handleLoadEnd(_ref10) {
13894 var loadendState = _ref10.loadendState,
13895 abortFn = _ref10.abortFn;
13896 return function (event) {
13897 var request = event.target;
13898
13899 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
13900 abortFn();
13901 loadendState.calledAbortFn = true;
13902 }
13903 };
13904};
13905/**
13906 * Simple progress event callback handler that gathers some stats before
13907 * executing a provided callback with the `segment` object
13908 *
13909 * @param {Object} segment - a simplified copy of the segmentInfo object
13910 * from SegmentLoader
13911 * @param {Function} progressFn - a callback that is executed each time a progress event
13912 * is received
13913 * @param {Function} trackInfoFn - a callback that receives track info
13914 * @param {Function} timingInfoFn - a callback that receives timing info
13915 * @param {Function} videoSegmentTimingInfoFn
13916 * a callback that receives video timing info based on media times and
13917 * any adjustments made by the transmuxer
13918 * @param {Function} audioSegmentTimingInfoFn
13919 * a callback that receives audio timing info based on media times and
13920 * any adjustments made by the transmuxer
13921 * @param {boolean} isEndOfTimeline
13922 * true if this segment represents the last segment in a timeline
13923 * @param {Function} endedTimelineFn
13924 * a callback made when a timeline is ended, will only be called if
13925 * isEndOfTimeline is true
13926 * @param {Function} dataFn - a callback that is executed when segment bytes are available
13927 * and ready to use
13928 * @param {Event} event - the progress event object from XMLHttpRequest
13929 */
13930
13931
13932var handleProgress = function handleProgress(_ref11) {
13933 var segment = _ref11.segment,
13934 progressFn = _ref11.progressFn;
13935 _ref11.trackInfoFn;
13936 _ref11.timingInfoFn;
13937 _ref11.videoSegmentTimingInfoFn;
13938 _ref11.audioSegmentTimingInfoFn;
13939 _ref11.id3Fn;
13940 _ref11.captionsFn;
13941 _ref11.isEndOfTimeline;
13942 _ref11.endedTimelineFn;
13943 _ref11.dataFn;
13944 return function (event) {
13945 var request = event.target;
13946
13947 if (request.aborted) {
13948 return;
13949 }
13950
13951 segment.stats = videojs__default["default"].mergeOptions(segment.stats, getProgressStats(event)); // record the time that we receive the first byte of data
13952
13953 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
13954 segment.stats.firstBytesReceivedAt = Date.now();
13955 }
13956
13957 return progressFn(event, segment);
13958 };
13959};
13960/**
13961 * Load all resources and does any processing necessary for a media-segment
13962 *
13963 * Features:
13964 * decrypts the media-segment if it has a key uri and an iv
13965 * aborts *all* requests if *any* one request fails
13966 *
13967 * The segment object, at minimum, has the following format:
13968 * {
13969 * resolvedUri: String,
13970 * [transmuxer]: Object,
13971 * [byterange]: {
13972 * offset: Number,
13973 * length: Number
13974 * },
13975 * [key]: {
13976 * resolvedUri: String
13977 * [byterange]: {
13978 * offset: Number,
13979 * length: Number
13980 * },
13981 * iv: {
13982 * bytes: Uint32Array
13983 * }
13984 * },
13985 * [map]: {
13986 * resolvedUri: String,
13987 * [byterange]: {
13988 * offset: Number,
13989 * length: Number
13990 * },
13991 * [bytes]: Uint8Array
13992 * }
13993 * }
13994 * ...where [name] denotes optional properties
13995 *
13996 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
13997 * @param {Object} xhrOptions - the base options to provide to all xhr requests
13998 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
13999 * decryption routines
14000 * @param {Object} segment - a simplified copy of the segmentInfo object
14001 * from SegmentLoader
14002 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
14003 * aborted
14004 * @param {Function} progressFn - a callback that receives progress events from the main
14005 * segment's xhr request
14006 * @param {Function} trackInfoFn - a callback that receives track info
14007 * @param {Function} timingInfoFn - a callback that receives timing info
14008 * @param {Function} videoSegmentTimingInfoFn
14009 * a callback that receives video timing info based on media times and
14010 * any adjustments made by the transmuxer
14011 * @param {Function} audioSegmentTimingInfoFn
14012 * a callback that receives audio timing info based on media times and
14013 * any adjustments made by the transmuxer
14014 * @param {Function} id3Fn - a callback that receives ID3 metadata
14015 * @param {Function} captionsFn - a callback that receives captions
14016 * @param {boolean} isEndOfTimeline
14017 * true if this segment represents the last segment in a timeline
14018 * @param {Function} endedTimelineFn
14019 * a callback made when a timeline is ended, will only be called if
14020 * isEndOfTimeline is true
14021 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
14022 * request, transmuxed if needed
14023 * @param {Function} doneFn - a callback that is executed only once all requests have
14024 * succeeded or failed
14025 * @return {Function} a function that, when invoked, immediately aborts all
14026 * outstanding requests
14027 */
14028
14029
14030var mediaSegmentRequest = function mediaSegmentRequest(_ref12) {
14031 var xhr = _ref12.xhr,
14032 xhrOptions = _ref12.xhrOptions,
14033 decryptionWorker = _ref12.decryptionWorker,
14034 segment = _ref12.segment,
14035 abortFn = _ref12.abortFn,
14036 progressFn = _ref12.progressFn,
14037 trackInfoFn = _ref12.trackInfoFn,
14038 timingInfoFn = _ref12.timingInfoFn,
14039 videoSegmentTimingInfoFn = _ref12.videoSegmentTimingInfoFn,
14040 audioSegmentTimingInfoFn = _ref12.audioSegmentTimingInfoFn,
14041 id3Fn = _ref12.id3Fn,
14042 captionsFn = _ref12.captionsFn,
14043 isEndOfTimeline = _ref12.isEndOfTimeline,
14044 endedTimelineFn = _ref12.endedTimelineFn,
14045 dataFn = _ref12.dataFn,
14046 doneFn = _ref12.doneFn,
14047 onTransmuxerLog = _ref12.onTransmuxerLog;
14048 var activeXhrs = [];
14049 var finishProcessingFn = waitForCompletion({
14050 activeXhrs: activeXhrs,
14051 decryptionWorker: decryptionWorker,
14052 trackInfoFn: trackInfoFn,
14053 timingInfoFn: timingInfoFn,
14054 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14055 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14056 id3Fn: id3Fn,
14057 captionsFn: captionsFn,
14058 isEndOfTimeline: isEndOfTimeline,
14059 endedTimelineFn: endedTimelineFn,
14060 dataFn: dataFn,
14061 doneFn: doneFn,
14062 onTransmuxerLog: onTransmuxerLog
14063 }); // optionally, request the decryption key
14064
14065 if (segment.key && !segment.key.bytes) {
14066 var objects = [segment.key];
14067
14068 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
14069 objects.push(segment.map.key);
14070 }
14071
14072 var keyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14073 uri: segment.key.resolvedUri,
14074 responseType: 'arraybuffer'
14075 });
14076 var keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
14077 var keyXhr = xhr(keyRequestOptions, keyRequestCallback);
14078 activeXhrs.push(keyXhr);
14079 } // optionally, request the associated media init segment
14080
14081
14082 if (segment.map && !segment.map.bytes) {
14083 var differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
14084
14085 if (differentMapKey) {
14086 var mapKeyRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14087 uri: segment.map.key.resolvedUri,
14088 responseType: 'arraybuffer'
14089 });
14090 var mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
14091 var mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
14092 activeXhrs.push(mapKeyXhr);
14093 }
14094
14095 var initSegmentOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14096 uri: segment.map.resolvedUri,
14097 responseType: 'arraybuffer',
14098 headers: segmentXhrHeaders(segment.map)
14099 });
14100 var initSegmentRequestCallback = handleInitSegmentResponse({
14101 segment: segment,
14102 finishProcessingFn: finishProcessingFn
14103 });
14104 var initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
14105 activeXhrs.push(initSegmentXhr);
14106 }
14107
14108 var segmentRequestOptions = videojs__default["default"].mergeOptions(xhrOptions, {
14109 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
14110 responseType: 'arraybuffer',
14111 headers: segmentXhrHeaders(segment)
14112 });
14113 var segmentRequestCallback = handleSegmentResponse({
14114 segment: segment,
14115 finishProcessingFn: finishProcessingFn,
14116 responseType: segmentRequestOptions.responseType
14117 });
14118 var segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
14119 segmentXhr.addEventListener('progress', handleProgress({
14120 segment: segment,
14121 progressFn: progressFn,
14122 trackInfoFn: trackInfoFn,
14123 timingInfoFn: timingInfoFn,
14124 videoSegmentTimingInfoFn: videoSegmentTimingInfoFn,
14125 audioSegmentTimingInfoFn: audioSegmentTimingInfoFn,
14126 id3Fn: id3Fn,
14127 captionsFn: captionsFn,
14128 isEndOfTimeline: isEndOfTimeline,
14129 endedTimelineFn: endedTimelineFn,
14130 dataFn: dataFn
14131 }));
14132 activeXhrs.push(segmentXhr); // since all parts of the request must be considered, but should not make callbacks
14133 // multiple times, provide a shared state object
14134
14135 var loadendState = {};
14136 activeXhrs.forEach(function (activeXhr) {
14137 activeXhr.addEventListener('loadend', handleLoadEnd({
14138 loadendState: loadendState,
14139 abortFn: abortFn
14140 }));
14141 });
14142 return function () {
14143 return abortAll(activeXhrs);
14144 };
14145};
14146
14147/**
14148 * @file - codecs.js - Handles tasks regarding codec strings such as translating them to
14149 * codec strings, or translating codec strings into objects that can be examined.
14150 */
14151var logFn$1 = logger('CodecUtils');
14152/**
14153 * Returns a set of codec strings parsed from the playlist or the default
14154 * codec strings if no codecs were specified in the playlist
14155 *
14156 * @param {Playlist} media the current media playlist
14157 * @return {Object} an object with the video and audio codecs
14158 */
14159
14160var getCodecs = function getCodecs(media) {
14161 // if the codecs were explicitly specified, use them instead of the
14162 // defaults
14163 var mediaAttributes = media.attributes || {};
14164
14165 if (mediaAttributes.CODECS) {
14166 return codecs_js.parseCodecs(mediaAttributes.CODECS);
14167 }
14168};
14169
14170var isMaat = function isMaat(master, media) {
14171 var mediaAttributes = media.attributes || {};
14172 return master && master.mediaGroups && master.mediaGroups.AUDIO && mediaAttributes.AUDIO && master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14173};
14174var isMuxed = function isMuxed(master, media) {
14175 if (!isMaat(master, media)) {
14176 return true;
14177 }
14178
14179 var mediaAttributes = media.attributes || {};
14180 var audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
14181
14182 for (var groupId in audioGroup) {
14183 // If an audio group has a URI (the case for HLS, as HLS will use external playlists),
14184 // or there are listed playlists (the case for DASH, as the manifest will have already
14185 // provided all of the details necessary to generate the audio playlist, as opposed to
14186 // HLS' externally requested playlists), then the content is demuxed.
14187 if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
14188 return true;
14189 }
14190 }
14191
14192 return false;
14193};
14194var unwrapCodecList = function unwrapCodecList(codecList) {
14195 var codecs = {};
14196 codecList.forEach(function (_ref) {
14197 var mediaType = _ref.mediaType,
14198 type = _ref.type,
14199 details = _ref.details;
14200 codecs[mediaType] = codecs[mediaType] || [];
14201 codecs[mediaType].push(codecs_js.translateLegacyCodec("" + type + details));
14202 });
14203 Object.keys(codecs).forEach(function (mediaType) {
14204 if (codecs[mediaType].length > 1) {
14205 logFn$1("multiple " + mediaType + " codecs found as attributes: " + codecs[mediaType].join(', ') + ". Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.");
14206 codecs[mediaType] = null;
14207 return;
14208 }
14209
14210 codecs[mediaType] = codecs[mediaType][0];
14211 });
14212 return codecs;
14213};
14214var codecCount = function codecCount(codecObj) {
14215 var count = 0;
14216
14217 if (codecObj.audio) {
14218 count++;
14219 }
14220
14221 if (codecObj.video) {
14222 count++;
14223 }
14224
14225 return count;
14226};
14227/**
14228 * Calculates the codec strings for a working configuration of
14229 * SourceBuffers to play variant streams in a master playlist. If
14230 * there is no possible working configuration, an empty object will be
14231 * returned.
14232 *
14233 * @param master {Object} the m3u8 object for the master playlist
14234 * @param media {Object} the m3u8 object for the variant playlist
14235 * @return {Object} the codec strings.
14236 *
14237 * @private
14238 */
14239
14240var codecsForPlaylist = function codecsForPlaylist(master, media) {
14241 var mediaAttributes = media.attributes || {};
14242 var codecInfo = unwrapCodecList(getCodecs(media) || []); // HLS with multiple-audio tracks must always get an audio codec.
14243 // Put another way, there is no way to have a video-only multiple-audio HLS!
14244
14245 if (isMaat(master, media) && !codecInfo.audio) {
14246 if (!isMuxed(master, media)) {
14247 // It is possible for codecs to be specified on the audio media group playlist but
14248 // not on the rendition playlist. This is mostly the case for DASH, where audio and
14249 // video are always separate (and separately specified).
14250 var defaultCodecs = unwrapCodecList(codecs_js.codecsFromDefault(master, mediaAttributes.AUDIO) || []);
14251
14252 if (defaultCodecs.audio) {
14253 codecInfo.audio = defaultCodecs.audio;
14254 }
14255 }
14256 }
14257
14258 return codecInfo;
14259};
14260
14261var logFn = logger('PlaylistSelector');
14262
14263var representationToString = function representationToString(representation) {
14264 if (!representation || !representation.playlist) {
14265 return;
14266 }
14267
14268 var playlist = representation.playlist;
14269 return JSON.stringify({
14270 id: playlist.id,
14271 bandwidth: representation.bandwidth,
14272 width: representation.width,
14273 height: representation.height,
14274 codecs: playlist.attributes && playlist.attributes.CODECS || ''
14275 });
14276}; // Utilities
14277
14278/**
14279 * Returns the CSS value for the specified property on an element
14280 * using `getComputedStyle`. Firefox has a long-standing issue where
14281 * getComputedStyle() may return null when running in an iframe with
14282 * `display: none`.
14283 *
14284 * @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
14285 * @param {HTMLElement} el the htmlelement to work on
14286 * @param {string} the proprety to get the style for
14287 */
14288
14289
14290var safeGetComputedStyle = function safeGetComputedStyle(el, property) {
14291 if (!el) {
14292 return '';
14293 }
14294
14295 var result = window__default["default"].getComputedStyle(el);
14296
14297 if (!result) {
14298 return '';
14299 }
14300
14301 return result[property];
14302};
14303/**
14304 * Resuable stable sort function
14305 *
14306 * @param {Playlists} array
14307 * @param {Function} sortFn Different comparators
14308 * @function stableSort
14309 */
14310
14311
14312var stableSort = function stableSort(array, sortFn) {
14313 var newArray = array.slice();
14314 array.sort(function (left, right) {
14315 var cmp = sortFn(left, right);
14316
14317 if (cmp === 0) {
14318 return newArray.indexOf(left) - newArray.indexOf(right);
14319 }
14320
14321 return cmp;
14322 });
14323};
14324/**
14325 * A comparator function to sort two playlist object by bandwidth.
14326 *
14327 * @param {Object} left a media playlist object
14328 * @param {Object} right a media playlist object
14329 * @return {number} Greater than zero if the bandwidth attribute of
14330 * left is greater than the corresponding attribute of right. Less
14331 * than zero if the bandwidth of right is greater than left and
14332 * exactly zero if the two are equal.
14333 */
14334
14335
14336var comparePlaylistBandwidth = function comparePlaylistBandwidth(left, right) {
14337 var leftBandwidth;
14338 var rightBandwidth;
14339
14340 if (left.attributes.BANDWIDTH) {
14341 leftBandwidth = left.attributes.BANDWIDTH;
14342 }
14343
14344 leftBandwidth = leftBandwidth || window__default["default"].Number.MAX_VALUE;
14345
14346 if (right.attributes.BANDWIDTH) {
14347 rightBandwidth = right.attributes.BANDWIDTH;
14348 }
14349
14350 rightBandwidth = rightBandwidth || window__default["default"].Number.MAX_VALUE;
14351 return leftBandwidth - rightBandwidth;
14352};
14353/**
14354 * A comparator function to sort two playlist object by resolution (width).
14355 *
14356 * @param {Object} left a media playlist object
14357 * @param {Object} right a media playlist object
14358 * @return {number} Greater than zero if the resolution.width attribute of
14359 * left is greater than the corresponding attribute of right. Less
14360 * than zero if the resolution.width of right is greater than left and
14361 * exactly zero if the two are equal.
14362 */
14363
14364var comparePlaylistResolution = function comparePlaylistResolution(left, right) {
14365 var leftWidth;
14366 var rightWidth;
14367
14368 if (left.attributes.RESOLUTION && left.attributes.RESOLUTION.width) {
14369 leftWidth = left.attributes.RESOLUTION.width;
14370 }
14371
14372 leftWidth = leftWidth || window__default["default"].Number.MAX_VALUE;
14373
14374 if (right.attributes.RESOLUTION && right.attributes.RESOLUTION.width) {
14375 rightWidth = right.attributes.RESOLUTION.width;
14376 }
14377
14378 rightWidth = rightWidth || window__default["default"].Number.MAX_VALUE; // NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
14379 // have the same media dimensions/ resolution
14380
14381 if (leftWidth === rightWidth && left.attributes.BANDWIDTH && right.attributes.BANDWIDTH) {
14382 return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
14383 }
14384
14385 return leftWidth - rightWidth;
14386};
14387/**
14388 * Chooses the appropriate media playlist based on bandwidth and player size
14389 *
14390 * @param {Object} master
14391 * Object representation of the master manifest
14392 * @param {number} playerBandwidth
14393 * Current calculated bandwidth of the player
14394 * @param {number} playerWidth
14395 * Current width of the player element (should account for the device pixel ratio)
14396 * @param {number} playerHeight
14397 * Current height of the player element (should account for the device pixel ratio)
14398 * @param {boolean} limitRenditionByPlayerDimensions
14399 * True if the player width and height should be used during the selection, false otherwise
14400 * @param {Object} masterPlaylistController
14401 * the current masterPlaylistController object
14402 * @return {Playlist} the highest bitrate playlist less than the
14403 * currently detected bandwidth, accounting for some amount of
14404 * bandwidth variance
14405 */
14406
14407var simpleSelector = function simpleSelector(master, playerBandwidth, playerWidth, playerHeight, limitRenditionByPlayerDimensions, masterPlaylistController) {
14408 // If we end up getting called before `master` is available, exit early
14409 if (!master) {
14410 return;
14411 }
14412
14413 var options = {
14414 bandwidth: playerBandwidth,
14415 width: playerWidth,
14416 height: playerHeight,
14417 limitRenditionByPlayerDimensions: limitRenditionByPlayerDimensions
14418 };
14419 var playlists = master.playlists; // if playlist is audio only, select between currently active audio group playlists.
14420
14421 if (Playlist.isAudioOnly(master)) {
14422 playlists = masterPlaylistController.getAudioTrackPlaylists_(); // add audioOnly to options so that we log audioOnly: true
14423 // at the buttom of this function for debugging.
14424
14425 options.audioOnly = true;
14426 } // convert the playlists to an intermediary representation to make comparisons easier
14427
14428
14429 var sortedPlaylistReps = playlists.map(function (playlist) {
14430 var bandwidth;
14431 var width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
14432 var height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
14433 bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
14434 bandwidth = bandwidth || window__default["default"].Number.MAX_VALUE;
14435 return {
14436 bandwidth: bandwidth,
14437 width: width,
14438 height: height,
14439 playlist: playlist
14440 };
14441 });
14442 stableSort(sortedPlaylistReps, function (left, right) {
14443 return left.bandwidth - right.bandwidth;
14444 }); // filter out any playlists that have been excluded due to
14445 // incompatible configurations
14446
14447 sortedPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14448 return !Playlist.isIncompatible(rep.playlist);
14449 }); // filter out any playlists that have been disabled manually through the representations
14450 // api or blacklisted temporarily due to playback errors.
14451
14452 var enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14453 return Playlist.isEnabled(rep.playlist);
14454 });
14455
14456 if (!enabledPlaylistReps.length) {
14457 // if there are no enabled playlists, then they have all been blacklisted or disabled
14458 // by the user through the representations api. In this case, ignore blacklisting and
14459 // fallback to what the user wants by using playlists the user has not disabled.
14460 enabledPlaylistReps = sortedPlaylistReps.filter(function (rep) {
14461 return !Playlist.isDisabled(rep.playlist);
14462 });
14463 } // filter out any variant that has greater effective bitrate
14464 // than the current estimated bandwidth
14465
14466
14467 var bandwidthPlaylistReps = enabledPlaylistReps.filter(function (rep) {
14468 return rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth;
14469 });
14470 var highestRemainingBandwidthRep = bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1]; // get all of the renditions with the same (highest) bandwidth
14471 // and then taking the very first element
14472
14473 var bandwidthBestRep = bandwidthPlaylistReps.filter(function (rep) {
14474 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14475 })[0]; // if we're not going to limit renditions by player size, make an early decision.
14476
14477 if (limitRenditionByPlayerDimensions === false) {
14478 var _chosenRep = bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14479
14480 if (_chosenRep && _chosenRep.playlist) {
14481 var type = 'sortedPlaylistReps';
14482
14483 if (bandwidthBestRep) {
14484 type = 'bandwidthBestRep';
14485 }
14486
14487 if (enabledPlaylistReps[0]) {
14488 type = 'enabledPlaylistReps';
14489 }
14490
14491 logFn("choosing " + representationToString(_chosenRep) + " using " + type + " with options", options);
14492 return _chosenRep.playlist;
14493 }
14494
14495 logFn('could not choose a playlist with options', options);
14496 return null;
14497 } // filter out playlists without resolution information
14498
14499
14500 var haveResolution = bandwidthPlaylistReps.filter(function (rep) {
14501 return rep.width && rep.height;
14502 }); // sort variants by resolution
14503
14504 stableSort(haveResolution, function (left, right) {
14505 return left.width - right.width;
14506 }); // if we have the exact resolution as the player use it
14507
14508 var resolutionBestRepList = haveResolution.filter(function (rep) {
14509 return rep.width === playerWidth && rep.height === playerHeight;
14510 });
14511 highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1]; // ensure that we pick the highest bandwidth variant that have exact resolution
14512
14513 var resolutionBestRep = resolutionBestRepList.filter(function (rep) {
14514 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14515 })[0];
14516 var resolutionPlusOneList;
14517 var resolutionPlusOneSmallest;
14518 var resolutionPlusOneRep; // find the smallest variant that is larger than the player
14519 // if there is no match of exact resolution
14520
14521 if (!resolutionBestRep) {
14522 resolutionPlusOneList = haveResolution.filter(function (rep) {
14523 return rep.width > playerWidth || rep.height > playerHeight;
14524 }); // find all the variants have the same smallest resolution
14525
14526 resolutionPlusOneSmallest = resolutionPlusOneList.filter(function (rep) {
14527 return rep.width === resolutionPlusOneList[0].width && rep.height === resolutionPlusOneList[0].height;
14528 }); // ensure that we also pick the highest bandwidth variant that
14529 // is just-larger-than the video player
14530
14531 highestRemainingBandwidthRep = resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
14532 resolutionPlusOneRep = resolutionPlusOneSmallest.filter(function (rep) {
14533 return rep.bandwidth === highestRemainingBandwidthRep.bandwidth;
14534 })[0];
14535 }
14536
14537 var leastPixelDiffRep; // If this selector proves to be better than others,
14538 // resolutionPlusOneRep and resolutionBestRep and all
14539 // the code involving them should be removed.
14540
14541 if (masterPlaylistController.experimentalLeastPixelDiffSelector) {
14542 // find the variant that is closest to the player's pixel size
14543 var leastPixelDiffList = haveResolution.map(function (rep) {
14544 rep.pixelDiff = Math.abs(rep.width - playerWidth) + Math.abs(rep.height - playerHeight);
14545 return rep;
14546 }); // get the highest bandwidth, closest resolution playlist
14547
14548 stableSort(leastPixelDiffList, function (left, right) {
14549 // sort by highest bandwidth if pixelDiff is the same
14550 if (left.pixelDiff === right.pixelDiff) {
14551 return right.bandwidth - left.bandwidth;
14552 }
14553
14554 return left.pixelDiff - right.pixelDiff;
14555 });
14556 leastPixelDiffRep = leastPixelDiffList[0];
14557 } // fallback chain of variants
14558
14559
14560 var chosenRep = leastPixelDiffRep || resolutionPlusOneRep || resolutionBestRep || bandwidthBestRep || enabledPlaylistReps[0] || sortedPlaylistReps[0];
14561
14562 if (chosenRep && chosenRep.playlist) {
14563 var _type = 'sortedPlaylistReps';
14564
14565 if (leastPixelDiffRep) {
14566 _type = 'leastPixelDiffRep';
14567 } else if (resolutionPlusOneRep) {
14568 _type = 'resolutionPlusOneRep';
14569 } else if (resolutionBestRep) {
14570 _type = 'resolutionBestRep';
14571 } else if (bandwidthBestRep) {
14572 _type = 'bandwidthBestRep';
14573 } else if (enabledPlaylistReps[0]) {
14574 _type = 'enabledPlaylistReps';
14575 }
14576
14577 logFn("choosing " + representationToString(chosenRep) + " using " + _type + " with options", options);
14578 return chosenRep.playlist;
14579 }
14580
14581 logFn('could not choose a playlist with options', options);
14582 return null;
14583};
14584
14585/**
14586 * Chooses the appropriate media playlist based on the most recent
14587 * bandwidth estimate and the player size.
14588 *
14589 * Expects to be called within the context of an instance of VhsHandler
14590 *
14591 * @return {Playlist} the highest bitrate playlist less than the
14592 * currently detected bandwidth, accounting for some amount of
14593 * bandwidth variance
14594 */
14595
14596var lastBandwidthSelector = function lastBandwidthSelector() {
14597 var pixelRatio = this.useDevicePixelRatio ? window__default["default"].devicePixelRatio || 1 : 1;
14598 return simpleSelector(this.playlists.master, this.systemBandwidth, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14599};
14600/**
14601 * Chooses the appropriate media playlist based on an
14602 * exponential-weighted moving average of the bandwidth after
14603 * filtering for player size.
14604 *
14605 * Expects to be called within the context of an instance of VhsHandler
14606 *
14607 * @param {number} decay - a number between 0 and 1. Higher values of
14608 * this parameter will cause previous bandwidth estimates to lose
14609 * significance more quickly.
14610 * @return {Function} a function which can be invoked to create a new
14611 * playlist selector function.
14612 * @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
14613 */
14614
14615var movingAverageBandwidthSelector = function movingAverageBandwidthSelector(decay) {
14616 var average = -1;
14617 var lastSystemBandwidth = -1;
14618
14619 if (decay < 0 || decay > 1) {
14620 throw new Error('Moving average bandwidth decay must be between 0 and 1.');
14621 }
14622
14623 return function () {
14624 var pixelRatio = this.useDevicePixelRatio ? window__default["default"].devicePixelRatio || 1 : 1;
14625
14626 if (average < 0) {
14627 average = this.systemBandwidth;
14628 lastSystemBandwidth = this.systemBandwidth;
14629 } // stop the average value from decaying for every 250ms
14630 // when the systemBandwidth is constant
14631 // and
14632 // stop average from setting to a very low value when the
14633 // systemBandwidth becomes 0 in case of chunk cancellation
14634
14635
14636 if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
14637 average = decay * this.systemBandwidth + (1 - decay) * average;
14638 lastSystemBandwidth = this.systemBandwidth;
14639 }
14640
14641 return simpleSelector(this.playlists.master, average, parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio, parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio, this.limitRenditionByPlayerDimensions, this.masterPlaylistController_);
14642 };
14643};
14644/**
14645 * Chooses the appropriate media playlist based on the potential to rebuffer
14646 *
14647 * @param {Object} settings
14648 * Object of information required to use this selector
14649 * @param {Object} settings.master
14650 * Object representation of the master manifest
14651 * @param {number} settings.currentTime
14652 * The current time of the player
14653 * @param {number} settings.bandwidth
14654 * Current measured bandwidth
14655 * @param {number} settings.duration
14656 * Duration of the media
14657 * @param {number} settings.segmentDuration
14658 * Segment duration to be used in round trip time calculations
14659 * @param {number} settings.timeUntilRebuffer
14660 * Time left in seconds until the player has to rebuffer
14661 * @param {number} settings.currentTimeline
14662 * The current timeline segments are being loaded from
14663 * @param {SyncController} settings.syncController
14664 * SyncController for determining if we have a sync point for a given playlist
14665 * @return {Object|null}
14666 * {Object} return.playlist
14667 * The highest bandwidth playlist with the least amount of rebuffering
14668 * {Number} return.rebufferingImpact
14669 * The amount of time in seconds switching to this playlist will rebuffer. A
14670 * negative value means that switching will cause zero rebuffering.
14671 */
14672
14673var minRebufferMaxBandwidthSelector = function minRebufferMaxBandwidthSelector(settings) {
14674 var master = settings.master,
14675 currentTime = settings.currentTime,
14676 bandwidth = settings.bandwidth,
14677 duration = settings.duration,
14678 segmentDuration = settings.segmentDuration,
14679 timeUntilRebuffer = settings.timeUntilRebuffer,
14680 currentTimeline = settings.currentTimeline,
14681 syncController = settings.syncController; // filter out any playlists that have been excluded due to
14682 // incompatible configurations
14683
14684 var compatiblePlaylists = master.playlists.filter(function (playlist) {
14685 return !Playlist.isIncompatible(playlist);
14686 }); // filter out any playlists that have been disabled manually through the representations
14687 // api or blacklisted temporarily due to playback errors.
14688
14689 var enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
14690
14691 if (!enabledPlaylists.length) {
14692 // if there are no enabled playlists, then they have all been blacklisted or disabled
14693 // by the user through the representations api. In this case, ignore blacklisting and
14694 // fallback to what the user wants by using playlists the user has not disabled.
14695 enabledPlaylists = compatiblePlaylists.filter(function (playlist) {
14696 return !Playlist.isDisabled(playlist);
14697 });
14698 }
14699
14700 var bandwidthPlaylists = enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
14701 var rebufferingEstimates = bandwidthPlaylists.map(function (playlist) {
14702 var syncPoint = syncController.getSyncPoint(playlist, duration, currentTimeline, currentTime); // If there is no sync point for this playlist, switching to it will require a
14703 // sync request first. This will double the request time
14704
14705 var numRequests = syncPoint ? 1 : 2;
14706 var requestTimeEstimate = Playlist.estimateSegmentRequestTime(segmentDuration, bandwidth, playlist);
14707 var rebufferingImpact = requestTimeEstimate * numRequests - timeUntilRebuffer;
14708 return {
14709 playlist: playlist,
14710 rebufferingImpact: rebufferingImpact
14711 };
14712 });
14713 var noRebufferingPlaylists = rebufferingEstimates.filter(function (estimate) {
14714 return estimate.rebufferingImpact <= 0;
14715 }); // Sort by bandwidth DESC
14716
14717 stableSort(noRebufferingPlaylists, function (a, b) {
14718 return comparePlaylistBandwidth(b.playlist, a.playlist);
14719 });
14720
14721 if (noRebufferingPlaylists.length) {
14722 return noRebufferingPlaylists[0];
14723 }
14724
14725 stableSort(rebufferingEstimates, function (a, b) {
14726 return a.rebufferingImpact - b.rebufferingImpact;
14727 });
14728 return rebufferingEstimates[0] || null;
14729};
14730/**
14731 * Chooses the appropriate media playlist, which in this case is the lowest bitrate
14732 * one with video. If no renditions with video exist, return the lowest audio rendition.
14733 *
14734 * Expects to be called within the context of an instance of VhsHandler
14735 *
14736 * @return {Object|null}
14737 * {Object} return.playlist
14738 * The lowest bitrate playlist that contains a video codec. If no such rendition
14739 * exists pick the lowest audio rendition.
14740 */
14741
14742var lowestBitrateCompatibleVariantSelector = function lowestBitrateCompatibleVariantSelector() {
14743 var _this = this;
14744
14745 // filter out any playlists that have been excluded due to
14746 // incompatible configurations or playback errors
14747 var playlists = this.playlists.master.playlists.filter(Playlist.isEnabled); // Sort ascending by bitrate
14748
14749 stableSort(playlists, function (a, b) {
14750 return comparePlaylistBandwidth(a, b);
14751 }); // Parse and assume that playlists with no video codec have no video
14752 // (this is not necessarily true, although it is generally true).
14753 //
14754 // If an entire manifest has no valid videos everything will get filtered
14755 // out.
14756
14757 var playlistsWithVideo = playlists.filter(function (playlist) {
14758 return !!codecsForPlaylist(_this.playlists.master, playlist).video;
14759 });
14760 return playlistsWithVideo[0] || null;
14761};
14762
14763/**
14764 * Combine all segments into a single Uint8Array
14765 *
14766 * @param {Object} segmentObj
14767 * @return {Uint8Array} concatenated bytes
14768 * @private
14769 */
14770var concatSegments = function concatSegments(segmentObj) {
14771 var offset = 0;
14772 var tempBuffer;
14773
14774 if (segmentObj.bytes) {
14775 tempBuffer = new Uint8Array(segmentObj.bytes); // combine the individual segments into one large typed-array
14776
14777 segmentObj.segments.forEach(function (segment) {
14778 tempBuffer.set(segment, offset);
14779 offset += segment.byteLength;
14780 });
14781 }
14782
14783 return tempBuffer;
14784};
14785
14786/**
14787 * @file text-tracks.js
14788 */
14789/**
14790 * Create captions text tracks on video.js if they do not exist
14791 *
14792 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14793 * @param {Object} tech the video.js tech
14794 * @param {Object} captionStream the caption stream to create
14795 * @private
14796 */
14797
14798var createCaptionsTrackIfNotExists = function createCaptionsTrackIfNotExists(inbandTextTracks, tech, captionStream) {
14799 if (!inbandTextTracks[captionStream]) {
14800 tech.trigger({
14801 type: 'usage',
14802 name: 'vhs-608'
14803 });
14804 tech.trigger({
14805 type: 'usage',
14806 name: 'hls-608'
14807 });
14808 var instreamId = captionStream; // we need to translate SERVICEn for 708 to how mux.js currently labels them
14809
14810 if (/^cc708_/.test(captionStream)) {
14811 instreamId = 'SERVICE' + captionStream.split('_')[1];
14812 }
14813
14814 var track = tech.textTracks().getTrackById(instreamId);
14815
14816 if (track) {
14817 // Resuse an existing track with a CC# id because this was
14818 // very likely created by videojs-contrib-hls from information
14819 // in the m3u8 for us to use
14820 inbandTextTracks[captionStream] = track;
14821 } else {
14822 // This section gets called when we have caption services that aren't specified in the manifest.
14823 // Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
14824 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
14825 var label = captionStream;
14826 var language = captionStream;
14827 var def = false;
14828 var captionService = captionServices[instreamId];
14829
14830 if (captionService) {
14831 label = captionService.label;
14832 language = captionService.language;
14833 def = captionService.default;
14834 } // Otherwise, create a track with the default `CC#` label and
14835 // without a language
14836
14837
14838 inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
14839 kind: 'captions',
14840 id: instreamId,
14841 // TODO: investigate why this doesn't seem to turn the caption on by default
14842 default: def,
14843 label: label,
14844 language: language
14845 }, false).track;
14846 }
14847 }
14848};
14849/**
14850 * Add caption text track data to a source handler given an array of captions
14851 *
14852 * @param {Object}
14853 * @param {Object} inbandTextTracks the inband text tracks
14854 * @param {number} timestampOffset the timestamp offset of the source buffer
14855 * @param {Array} captionArray an array of caption data
14856 * @private
14857 */
14858
14859var addCaptionData = function addCaptionData(_ref) {
14860 var inbandTextTracks = _ref.inbandTextTracks,
14861 captionArray = _ref.captionArray,
14862 timestampOffset = _ref.timestampOffset;
14863
14864 if (!captionArray) {
14865 return;
14866 }
14867
14868 var Cue = window__default["default"].WebKitDataCue || window__default["default"].VTTCue;
14869 captionArray.forEach(function (caption) {
14870 var track = caption.stream;
14871 inbandTextTracks[track].addCue(new Cue(caption.startTime + timestampOffset, caption.endTime + timestampOffset, caption.text));
14872 });
14873};
14874/**
14875 * Define properties on a cue for backwards compatability,
14876 * but warn the user that the way that they are using it
14877 * is depricated and will be removed at a later date.
14878 *
14879 * @param {Cue} cue the cue to add the properties on
14880 * @private
14881 */
14882
14883var deprecateOldCue = function deprecateOldCue(cue) {
14884 Object.defineProperties(cue.frame, {
14885 id: {
14886 get: function get() {
14887 videojs__default["default"].log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
14888 return cue.value.key;
14889 }
14890 },
14891 value: {
14892 get: function get() {
14893 videojs__default["default"].log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
14894 return cue.value.data;
14895 }
14896 },
14897 privateData: {
14898 get: function get() {
14899 videojs__default["default"].log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
14900 return cue.value.data;
14901 }
14902 }
14903 });
14904};
14905/**
14906 * Add metadata text track data to a source handler given an array of metadata
14907 *
14908 * @param {Object}
14909 * @param {Object} inbandTextTracks the inband text tracks
14910 * @param {Array} metadataArray an array of meta data
14911 * @param {number} timestampOffset the timestamp offset of the source buffer
14912 * @param {number} videoDuration the duration of the video
14913 * @private
14914 */
14915
14916
14917var addMetadata = function addMetadata(_ref2) {
14918 var inbandTextTracks = _ref2.inbandTextTracks,
14919 metadataArray = _ref2.metadataArray,
14920 timestampOffset = _ref2.timestampOffset,
14921 videoDuration = _ref2.videoDuration;
14922
14923 if (!metadataArray) {
14924 return;
14925 }
14926
14927 var Cue = window__default["default"].WebKitDataCue || window__default["default"].VTTCue;
14928 var metadataTrack = inbandTextTracks.metadataTrack_;
14929
14930 if (!metadataTrack) {
14931 return;
14932 }
14933
14934 metadataArray.forEach(function (metadata) {
14935 var time = metadata.cueTime + timestampOffset; // if time isn't a finite number between 0 and Infinity, like NaN,
14936 // ignore this bit of metadata.
14937 // This likely occurs when you have an non-timed ID3 tag like TIT2,
14938 // which is the "Title/Songname/Content description" frame
14939
14940 if (typeof time !== 'number' || window__default["default"].isNaN(time) || time < 0 || !(time < Infinity)) {
14941 return;
14942 }
14943
14944 metadata.frames.forEach(function (frame) {
14945 var cue = new Cue(time, time, frame.value || frame.url || frame.data || '');
14946 cue.frame = frame;
14947 cue.value = frame;
14948 deprecateOldCue(cue);
14949 metadataTrack.addCue(cue);
14950 });
14951 });
14952
14953 if (!metadataTrack.cues || !metadataTrack.cues.length) {
14954 return;
14955 } // Updating the metadeta cues so that
14956 // the endTime of each cue is the startTime of the next cue
14957 // the endTime of last cue is the duration of the video
14958
14959
14960 var cues = metadataTrack.cues;
14961 var cuesArray = []; // Create a copy of the TextTrackCueList...
14962 // ...disregarding cues with a falsey value
14963
14964 for (var i = 0; i < cues.length; i++) {
14965 if (cues[i]) {
14966 cuesArray.push(cues[i]);
14967 }
14968 } // Group cues by their startTime value
14969
14970
14971 var cuesGroupedByStartTime = cuesArray.reduce(function (obj, cue) {
14972 var timeSlot = obj[cue.startTime] || [];
14973 timeSlot.push(cue);
14974 obj[cue.startTime] = timeSlot;
14975 return obj;
14976 }, {}); // Sort startTimes by ascending order
14977
14978 var sortedStartTimes = Object.keys(cuesGroupedByStartTime).sort(function (a, b) {
14979 return Number(a) - Number(b);
14980 }); // Map each cue group's endTime to the next group's startTime
14981
14982 sortedStartTimes.forEach(function (startTime, idx) {
14983 var cueGroup = cuesGroupedByStartTime[startTime];
14984 var nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration; // Map each cue's endTime the next group's startTime
14985
14986 cueGroup.forEach(function (cue) {
14987 cue.endTime = nextTime;
14988 });
14989 });
14990};
14991/**
14992 * Create metadata text track on video.js if it does not exist
14993 *
14994 * @param {Object} inbandTextTracks a reference to current inbandTextTracks
14995 * @param {string} dispatchType the inband metadata track dispatch type
14996 * @param {Object} tech the video.js tech
14997 * @private
14998 */
14999
15000var createMetadataTrackIfNotExists = function createMetadataTrackIfNotExists(inbandTextTracks, dispatchType, tech) {
15001 if (inbandTextTracks.metadataTrack_) {
15002 return;
15003 }
15004
15005 inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
15006 kind: 'metadata',
15007 label: 'Timed Metadata'
15008 }, false).track;
15009 inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
15010};
15011/**
15012 * Remove cues from a track on video.js.
15013 *
15014 * @param {Double} start start of where we should remove the cue
15015 * @param {Double} end end of where the we should remove the cue
15016 * @param {Object} track the text track to remove the cues from
15017 * @private
15018 */
15019
15020var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
15021 var i;
15022 var cue;
15023
15024 if (!track) {
15025 return;
15026 }
15027
15028 if (!track.cues) {
15029 return;
15030 }
15031
15032 i = track.cues.length;
15033
15034 while (i--) {
15035 cue = track.cues[i]; // Remove any cue within the provided start and end time
15036
15037 if (cue.startTime >= start && cue.endTime <= end) {
15038 track.removeCue(cue);
15039 }
15040 }
15041};
15042/**
15043 * Remove duplicate cues from a track on video.js (a cue is considered a
15044 * duplicate if it has the same time interval and text as another)
15045 *
15046 * @param {Object} track the text track to remove the duplicate cues from
15047 * @private
15048 */
15049
15050var removeDuplicateCuesFromTrack = function removeDuplicateCuesFromTrack(track) {
15051 var cues = track.cues;
15052
15053 if (!cues) {
15054 return;
15055 }
15056
15057 for (var i = 0; i < cues.length; i++) {
15058 var duplicates = [];
15059 var occurrences = 0;
15060
15061 for (var j = 0; j < cues.length; j++) {
15062 if (cues[i].startTime === cues[j].startTime && cues[i].endTime === cues[j].endTime && cues[i].text === cues[j].text) {
15063 occurrences++;
15064
15065 if (occurrences > 1) {
15066 duplicates.push(cues[j]);
15067 }
15068 }
15069 }
15070
15071 if (duplicates.length) {
15072 duplicates.forEach(function (dupe) {
15073 return track.removeCue(dupe);
15074 });
15075 }
15076 }
15077};
15078
15079/**
15080 * Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
15081 * front of current time.
15082 *
15083 * @param {Array} buffer
15084 * The current buffer of gop information
15085 * @param {number} currentTime
15086 * The current time
15087 * @param {Double} mapping
15088 * Offset to map display time to stream presentation time
15089 * @return {Array}
15090 * List of gops considered safe to append over
15091 */
15092
15093var gopsSafeToAlignWith = function gopsSafeToAlignWith(buffer, currentTime, mapping) {
15094 if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
15095 return [];
15096 } // pts value for current time + 3 seconds to give a bit more wiggle room
15097
15098
15099 var currentTimePts = Math.ceil((currentTime - mapping + 3) * clock.ONE_SECOND_IN_TS);
15100 var i;
15101
15102 for (i = 0; i < buffer.length; i++) {
15103 if (buffer[i].pts > currentTimePts) {
15104 break;
15105 }
15106 }
15107
15108 return buffer.slice(i);
15109};
15110/**
15111 * Appends gop information (timing and byteLength) received by the transmuxer for the
15112 * gops appended in the last call to appendBuffer
15113 *
15114 * @param {Array} buffer
15115 * The current buffer of gop information
15116 * @param {Array} gops
15117 * List of new gop information
15118 * @param {boolean} replace
15119 * If true, replace the buffer with the new gop information. If false, append the
15120 * new gop information to the buffer in the right location of time.
15121 * @return {Array}
15122 * Updated list of gop information
15123 */
15124
15125var updateGopBuffer = function updateGopBuffer(buffer, gops, replace) {
15126 if (!gops.length) {
15127 return buffer;
15128 }
15129
15130 if (replace) {
15131 // If we are in safe append mode, then completely overwrite the gop buffer
15132 // with the most recent appeneded data. This will make sure that when appending
15133 // future segments, we only try to align with gops that are both ahead of current
15134 // time and in the last segment appended.
15135 return gops.slice();
15136 }
15137
15138 var start = gops[0].pts;
15139 var i = 0;
15140
15141 for (i; i < buffer.length; i++) {
15142 if (buffer[i].pts >= start) {
15143 break;
15144 }
15145 }
15146
15147 return buffer.slice(0, i).concat(gops);
15148};
15149/**
15150 * Removes gop information in buffer that overlaps with provided start and end
15151 *
15152 * @param {Array} buffer
15153 * The current buffer of gop information
15154 * @param {Double} start
15155 * position to start the remove at
15156 * @param {Double} end
15157 * position to end the remove at
15158 * @param {Double} mapping
15159 * Offset to map display time to stream presentation time
15160 */
15161
15162var removeGopBuffer = function removeGopBuffer(buffer, start, end, mapping) {
15163 var startPts = Math.ceil((start - mapping) * clock.ONE_SECOND_IN_TS);
15164 var endPts = Math.ceil((end - mapping) * clock.ONE_SECOND_IN_TS);
15165 var updatedBuffer = buffer.slice();
15166 var i = buffer.length;
15167
15168 while (i--) {
15169 if (buffer[i].pts <= endPts) {
15170 break;
15171 }
15172 }
15173
15174 if (i === -1) {
15175 // no removal because end of remove range is before start of buffer
15176 return updatedBuffer;
15177 }
15178
15179 var j = i + 1;
15180
15181 while (j--) {
15182 if (buffer[j].pts <= startPts) {
15183 break;
15184 }
15185 } // clamp remove range start to 0 index
15186
15187
15188 j = Math.max(j, 0);
15189 updatedBuffer.splice(j, i - j + 1);
15190 return updatedBuffer;
15191};
15192
15193var shallowEqual = function shallowEqual(a, b) {
15194 // if both are undefined
15195 // or one or the other is undefined
15196 // they are not equal
15197 if (!a && !b || !a && b || a && !b) {
15198 return false;
15199 } // they are the same object and thus, equal
15200
15201
15202 if (a === b) {
15203 return true;
15204 } // sort keys so we can make sure they have
15205 // all the same keys later.
15206
15207
15208 var akeys = Object.keys(a).sort();
15209 var bkeys = Object.keys(b).sort(); // different number of keys, not equal
15210
15211 if (akeys.length !== bkeys.length) {
15212 return false;
15213 }
15214
15215 for (var i = 0; i < akeys.length; i++) {
15216 var key = akeys[i]; // different sorted keys, not equal
15217
15218 if (key !== bkeys[i]) {
15219 return false;
15220 } // different values, not equal
15221
15222
15223 if (a[key] !== b[key]) {
15224 return false;
15225 }
15226 }
15227
15228 return true;
15229};
15230
15231// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
15232var QUOTA_EXCEEDED_ERR = 22;
15233
15234/**
15235 * The segment loader has no recourse except to fetch a segment in the
15236 * current playlist and use the internal timestamps in that segment to
15237 * generate a syncPoint. This function returns a good candidate index
15238 * for that process.
15239 *
15240 * @param {Array} segments - the segments array from a playlist.
15241 * @return {number} An index of a segment from the playlist to load
15242 */
15243
15244var getSyncSegmentCandidate = function getSyncSegmentCandidate(currentTimeline, segments, targetTime) {
15245 segments = segments || [];
15246 var timelineSegments = [];
15247 var time = 0;
15248
15249 for (var i = 0; i < segments.length; i++) {
15250 var segment = segments[i];
15251
15252 if (currentTimeline === segment.timeline) {
15253 timelineSegments.push(i);
15254 time += segment.duration;
15255
15256 if (time > targetTime) {
15257 return i;
15258 }
15259 }
15260 }
15261
15262 if (timelineSegments.length === 0) {
15263 return 0;
15264 } // default to the last timeline segment
15265
15266
15267 return timelineSegments[timelineSegments.length - 1];
15268}; // In the event of a quota exceeded error, keep at least one second of back buffer. This
15269// number was arbitrarily chosen and may be updated in the future, but seemed reasonable
15270// as a start to prevent any potential issues with removing content too close to the
15271// playhead.
15272
15273var MIN_BACK_BUFFER = 1; // in ms
15274
15275var CHECK_BUFFER_DELAY = 500;
15276
15277var finite = function finite(num) {
15278 return typeof num === 'number' && isFinite(num);
15279}; // With most content hovering around 30fps, if a segment has a duration less than a half
15280// frame at 30fps or one frame at 60fps, the bandwidth and throughput calculations will
15281// not accurately reflect the rest of the content.
15282
15283
15284var MIN_SEGMENT_DURATION_TO_SAVE_STATS = 1 / 60;
15285var illegalMediaSwitch = function illegalMediaSwitch(loaderType, startingMedia, trackInfo) {
15286 // Although these checks should most likely cover non 'main' types, for now it narrows
15287 // the scope of our checks.
15288 if (loaderType !== 'main' || !startingMedia || !trackInfo) {
15289 return null;
15290 }
15291
15292 if (!trackInfo.hasAudio && !trackInfo.hasVideo) {
15293 return 'Neither audio nor video found in segment.';
15294 }
15295
15296 if (startingMedia.hasVideo && !trackInfo.hasVideo) {
15297 return 'Only audio found in segment when we expected video.' + ' We can\'t switch to audio only from a stream that had video.' + ' To get rid of this message, please add codec information to the manifest.';
15298 }
15299
15300 if (!startingMedia.hasVideo && trackInfo.hasVideo) {
15301 return 'Video found in segment when we expected only audio.' + ' We can\'t switch to a stream with video from an audio only stream.' + ' To get rid of this message, please add codec information to the manifest.';
15302 }
15303
15304 return null;
15305};
15306/**
15307 * Calculates a time value that is safe to remove from the back buffer without interrupting
15308 * playback.
15309 *
15310 * @param {TimeRange} seekable
15311 * The current seekable range
15312 * @param {number} currentTime
15313 * The current time of the player
15314 * @param {number} targetDuration
15315 * The target duration of the current playlist
15316 * @return {number}
15317 * Time that is safe to remove from the back buffer without interrupting playback
15318 */
15319
15320var safeBackBufferTrimTime = function safeBackBufferTrimTime(seekable, currentTime, targetDuration) {
15321 // 30 seconds before the playhead provides a safe default for trimming.
15322 //
15323 // Choosing a reasonable default is particularly important for high bitrate content and
15324 // VOD videos/live streams with large windows, as the buffer may end up overfilled and
15325 // throw an APPEND_BUFFER_ERR.
15326 var trimTime = currentTime - Config.BACK_BUFFER_LENGTH;
15327
15328 if (seekable.length) {
15329 // Some live playlists may have a shorter window of content than the full allowed back
15330 // buffer. For these playlists, don't save content that's no longer within the window.
15331 trimTime = Math.max(trimTime, seekable.start(0));
15332 } // Don't remove within target duration of the current time to avoid the possibility of
15333 // removing the GOP currently being played, as removing it can cause playback stalls.
15334
15335
15336 var maxTrimTime = currentTime - targetDuration;
15337 return Math.min(maxTrimTime, trimTime);
15338};
15339
15340var segmentInfoString = function segmentInfoString(segmentInfo) {
15341 var startOfSegment = segmentInfo.startOfSegment,
15342 duration = segmentInfo.duration,
15343 segment = segmentInfo.segment,
15344 part = segmentInfo.part,
15345 _segmentInfo$playlist = segmentInfo.playlist,
15346 seq = _segmentInfo$playlist.mediaSequence,
15347 id = _segmentInfo$playlist.id,
15348 _segmentInfo$playlist2 = _segmentInfo$playlist.segments,
15349 segments = _segmentInfo$playlist2 === void 0 ? [] : _segmentInfo$playlist2,
15350 index = segmentInfo.mediaIndex,
15351 partIndex = segmentInfo.partIndex,
15352 timeline = segmentInfo.timeline;
15353 var segmentLen = segments.length - 1;
15354 var selection = 'mediaIndex/partIndex increment';
15355
15356 if (segmentInfo.getMediaInfoForTime) {
15357 selection = "getMediaInfoForTime (" + segmentInfo.getMediaInfoForTime + ")";
15358 } else if (segmentInfo.isSyncRequest) {
15359 selection = 'getSyncSegmentCandidate (isSyncRequest)';
15360 }
15361
15362 var hasPartIndex = typeof partIndex === 'number';
15363 var name = segmentInfo.segment.uri ? 'segment' : 'pre-segment';
15364 var zeroBasedPartCount = hasPartIndex ? getKnownPartCount({
15365 preloadSegment: segment
15366 }) - 1 : 0;
15367 return name + " [" + (seq + index) + "/" + (seq + segmentLen) + "]" + (hasPartIndex ? " part [" + partIndex + "/" + zeroBasedPartCount + "]" : '') + (" segment start/end [" + segment.start + " => " + segment.end + "]") + (hasPartIndex ? " part start/end [" + part.start + " => " + part.end + "]" : '') + (" startOfSegment [" + startOfSegment + "]") + (" duration [" + duration + "]") + (" timeline [" + timeline + "]") + (" selected by [" + selection + "]") + (" playlist [" + id + "]");
15368};
15369
15370var timingInfoPropertyForMedia = function timingInfoPropertyForMedia(mediaType) {
15371 return mediaType + "TimingInfo";
15372};
15373/**
15374 * Returns the timestamp offset to use for the segment.
15375 *
15376 * @param {number} segmentTimeline
15377 * The timeline of the segment
15378 * @param {number} currentTimeline
15379 * The timeline currently being followed by the loader
15380 * @param {number} startOfSegment
15381 * The estimated segment start
15382 * @param {TimeRange[]} buffered
15383 * The loader's buffer
15384 * @param {boolean} overrideCheck
15385 * If true, no checks are made to see if the timestamp offset value should be set,
15386 * but sets it directly to a value.
15387 *
15388 * @return {number|null}
15389 * Either a number representing a new timestamp offset, or null if the segment is
15390 * part of the same timeline
15391 */
15392
15393
15394var timestampOffsetForSegment = function timestampOffsetForSegment(_ref) {
15395 var segmentTimeline = _ref.segmentTimeline,
15396 currentTimeline = _ref.currentTimeline,
15397 startOfSegment = _ref.startOfSegment,
15398 buffered = _ref.buffered,
15399 overrideCheck = _ref.overrideCheck;
15400
15401 // Check to see if we are crossing a discontinuity to see if we need to set the
15402 // timestamp offset on the transmuxer and source buffer.
15403 //
15404 // Previously, we changed the timestampOffset if the start of this segment was less than
15405 // the currently set timestampOffset, but this isn't desirable as it can produce bad
15406 // behavior, especially around long running live streams.
15407 if (!overrideCheck && segmentTimeline === currentTimeline) {
15408 return null;
15409 } // When changing renditions, it's possible to request a segment on an older timeline. For
15410 // instance, given two renditions with the following:
15411 //
15412 // #EXTINF:10
15413 // segment1
15414 // #EXT-X-DISCONTINUITY
15415 // #EXTINF:10
15416 // segment2
15417 // #EXTINF:10
15418 // segment3
15419 //
15420 // And the current player state:
15421 //
15422 // current time: 8
15423 // buffer: 0 => 20
15424 //
15425 // The next segment on the current rendition would be segment3, filling the buffer from
15426 // 20s onwards. However, if a rendition switch happens after segment2 was requested,
15427 // then the next segment to be requested will be segment1 from the new rendition in
15428 // order to fill time 8 and onwards. Using the buffered end would result in repeated
15429 // content (since it would position segment1 of the new rendition starting at 20s). This
15430 // case can be identified when the new segment's timeline is a prior value. Instead of
15431 // using the buffered end, the startOfSegment can be used, which, hopefully, will be
15432 // more accurate to the actual start time of the segment.
15433
15434
15435 if (segmentTimeline < currentTimeline) {
15436 return startOfSegment;
15437 } // segmentInfo.startOfSegment used to be used as the timestamp offset, however, that
15438 // value uses the end of the last segment if it is available. While this value
15439 // should often be correct, it's better to rely on the buffered end, as the new
15440 // content post discontinuity should line up with the buffered end as if it were
15441 // time 0 for the new content.
15442
15443
15444 return buffered.length ? buffered.end(buffered.length - 1) : startOfSegment;
15445};
15446/**
15447 * Returns whether or not the loader should wait for a timeline change from the timeline
15448 * change controller before processing the segment.
15449 *
15450 * Primary timing in VHS goes by video. This is different from most media players, as
15451 * audio is more often used as the primary timing source. For the foreseeable future, VHS
15452 * will continue to use video as the primary timing source, due to the current logic and
15453 * expectations built around it.
15454
15455 * Since the timing follows video, in order to maintain sync, the video loader is
15456 * responsible for setting both audio and video source buffer timestamp offsets.
15457 *
15458 * Setting different values for audio and video source buffers could lead to
15459 * desyncing. The following examples demonstrate some of the situations where this
15460 * distinction is important. Note that all of these cases involve demuxed content. When
15461 * content is muxed, the audio and video are packaged together, therefore syncing
15462 * separate media playlists is not an issue.
15463 *
15464 * CASE 1: Audio prepares to load a new timeline before video:
15465 *
15466 * Timeline: 0 1
15467 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15468 * Audio Loader: ^
15469 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15470 * Video Loader ^
15471 *
15472 * In the above example, the audio loader is preparing to load the 6th segment, the first
15473 * after a discontinuity, while the video loader is still loading the 5th segment, before
15474 * the discontinuity.
15475 *
15476 * If the audio loader goes ahead and loads and appends the 6th segment before the video
15477 * loader crosses the discontinuity, then when appended, the 6th audio segment will use
15478 * the timestamp offset from timeline 0. This will likely lead to desyncing. In addition,
15479 * the audio loader must provide the audioAppendStart value to trim the content in the
15480 * transmuxer, and that value relies on the audio timestamp offset. Since the audio
15481 * timestamp offset is set by the video (main) loader, the audio loader shouldn't load the
15482 * segment until that value is provided.
15483 *
15484 * CASE 2: Video prepares to load a new timeline before audio:
15485 *
15486 * Timeline: 0 1
15487 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15488 * Audio Loader: ^
15489 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15490 * Video Loader ^
15491 *
15492 * In the above example, the video loader is preparing to load the 6th segment, the first
15493 * after a discontinuity, while the audio loader is still loading the 5th segment, before
15494 * the discontinuity.
15495 *
15496 * If the video loader goes ahead and loads and appends the 6th segment, then once the
15497 * segment is loaded and processed, both the video and audio timestamp offsets will be
15498 * set, since video is used as the primary timing source. This is to ensure content lines
15499 * up appropriately, as any modifications to the video timing are reflected by audio when
15500 * the video loader sets the audio and video timestamp offsets to the same value. However,
15501 * setting the timestamp offset for audio before audio has had a chance to change
15502 * timelines will likely lead to desyncing, as the audio loader will append segment 5 with
15503 * a timestamp intended to apply to segments from timeline 1 rather than timeline 0.
15504 *
15505 * CASE 3: When seeking, audio prepares to load a new timeline before video
15506 *
15507 * Timeline: 0 1
15508 * Audio Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15509 * Audio Loader: ^
15510 * Video Segments: 0 1 2 3 4 5 DISCO 6 7 8 9
15511 * Video Loader ^
15512 *
15513 * In the above example, both audio and video loaders are loading segments from timeline
15514 * 0, but imagine that the seek originated from timeline 1.
15515 *
15516 * When seeking to a new timeline, the timestamp offset will be set based on the expected
15517 * segment start of the loaded video segment. In order to maintain sync, the audio loader
15518 * must wait for the video loader to load its segment and update both the audio and video
15519 * timestamp offsets before it may load and append its own segment. This is the case
15520 * whether the seek results in a mismatched segment request (e.g., the audio loader
15521 * chooses to load segment 3 and the video loader chooses to load segment 4) or the
15522 * loaders choose to load the same segment index from each playlist, as the segments may
15523 * not be aligned perfectly, even for matching segment indexes.
15524 *
15525 * @param {Object} timelinechangeController
15526 * @param {number} currentTimeline
15527 * The timeline currently being followed by the loader
15528 * @param {number} segmentTimeline
15529 * The timeline of the segment being loaded
15530 * @param {('main'|'audio')} loaderType
15531 * The loader type
15532 * @param {boolean} audioDisabled
15533 * Whether the audio is disabled for the loader. This should only be true when the
15534 * loader may have muxed audio in its segment, but should not append it, e.g., for
15535 * the main loader when an alternate audio playlist is active.
15536 *
15537 * @return {boolean}
15538 * Whether the loader should wait for a timeline change from the timeline change
15539 * controller before processing the segment
15540 */
15541
15542var shouldWaitForTimelineChange = function shouldWaitForTimelineChange(_ref2) {
15543 var timelineChangeController = _ref2.timelineChangeController,
15544 currentTimeline = _ref2.currentTimeline,
15545 segmentTimeline = _ref2.segmentTimeline,
15546 loaderType = _ref2.loaderType,
15547 audioDisabled = _ref2.audioDisabled;
15548
15549 if (currentTimeline === segmentTimeline) {
15550 return false;
15551 }
15552
15553 if (loaderType === 'audio') {
15554 var lastMainTimelineChange = timelineChangeController.lastTimelineChange({
15555 type: 'main'
15556 }); // Audio loader should wait if:
15557 //
15558 // * main hasn't had a timeline change yet (thus has not loaded its first segment)
15559 // * main hasn't yet changed to the timeline audio is looking to load
15560
15561 return !lastMainTimelineChange || lastMainTimelineChange.to !== segmentTimeline;
15562 } // The main loader only needs to wait for timeline changes if there's demuxed audio.
15563 // Otherwise, there's nothing to wait for, since audio would be muxed into the main
15564 // loader's segments (or the content is audio/video only and handled by the main
15565 // loader).
15566
15567
15568 if (loaderType === 'main' && audioDisabled) {
15569 var pendingAudioTimelineChange = timelineChangeController.pendingTimelineChange({
15570 type: 'audio'
15571 }); // Main loader should wait for the audio loader if audio is not pending a timeline
15572 // change to the current timeline.
15573 //
15574 // Since the main loader is responsible for setting the timestamp offset for both
15575 // audio and video, the main loader must wait for audio to be about to change to its
15576 // timeline before setting the offset, otherwise, if audio is behind in loading,
15577 // segments from the previous timeline would be adjusted by the new timestamp offset.
15578 //
15579 // This requirement means that video will not cross a timeline until the audio is
15580 // about to cross to it, so that way audio and video will always cross the timeline
15581 // together.
15582 //
15583 // In addition to normal timeline changes, these rules also apply to the start of a
15584 // stream (going from a non-existent timeline, -1, to timeline 0). It's important
15585 // that these rules apply to the first timeline change because if they did not, it's
15586 // possible that the main loader will cross two timelines before the audio loader has
15587 // crossed one. Logic may be implemented to handle the startup as a special case, but
15588 // it's easier to simply treat all timeline changes the same.
15589
15590 if (pendingAudioTimelineChange && pendingAudioTimelineChange.to === segmentTimeline) {
15591 return false;
15592 }
15593
15594 return true;
15595 }
15596
15597 return false;
15598};
15599var mediaDuration = function mediaDuration(audioTimingInfo, videoTimingInfo) {
15600 var audioDuration = audioTimingInfo && typeof audioTimingInfo.start === 'number' && typeof audioTimingInfo.end === 'number' ? audioTimingInfo.end - audioTimingInfo.start : 0;
15601 var videoDuration = videoTimingInfo && typeof videoTimingInfo.start === 'number' && typeof videoTimingInfo.end === 'number' ? videoTimingInfo.end - videoTimingInfo.start : 0;
15602 return Math.max(audioDuration, videoDuration);
15603};
15604var segmentTooLong = function segmentTooLong(_ref3) {
15605 var segmentDuration = _ref3.segmentDuration,
15606 maxDuration = _ref3.maxDuration;
15607
15608 // 0 duration segments are most likely due to metadata only segments or a lack of
15609 // information.
15610 if (!segmentDuration) {
15611 return false;
15612 } // For HLS:
15613 //
15614 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1
15615 // The EXTINF duration of each Media Segment in the Playlist
15616 // file, when rounded to the nearest integer, MUST be less than or equal
15617 // to the target duration; longer segments can trigger playback stalls
15618 // or other errors.
15619 //
15620 // For DASH, the mpd-parser uses the largest reported segment duration as the target
15621 // duration. Although that reported duration is occasionally approximate (i.e., not
15622 // exact), a strict check may report that a segment is too long more often in DASH.
15623
15624
15625 return Math.round(segmentDuration) > maxDuration + TIME_FUDGE_FACTOR;
15626};
15627var getTroublesomeSegmentDurationMessage = function getTroublesomeSegmentDurationMessage(segmentInfo, sourceType) {
15628 // Right now we aren't following DASH's timing model exactly, so only perform
15629 // this check for HLS content.
15630 if (sourceType !== 'hls') {
15631 return null;
15632 }
15633
15634 var segmentDuration = mediaDuration(segmentInfo.audioTimingInfo, segmentInfo.videoTimingInfo); // Don't report if we lack information.
15635 //
15636 // If the segment has a duration of 0 it is either a lack of information or a
15637 // metadata only segment and shouldn't be reported here.
15638
15639 if (!segmentDuration) {
15640 return null;
15641 }
15642
15643 var targetDuration = segmentInfo.playlist.targetDuration;
15644 var isSegmentWayTooLong = segmentTooLong({
15645 segmentDuration: segmentDuration,
15646 maxDuration: targetDuration * 2
15647 });
15648 var isSegmentSlightlyTooLong = segmentTooLong({
15649 segmentDuration: segmentDuration,
15650 maxDuration: targetDuration
15651 });
15652 var segmentTooLongMessage = "Segment with index " + segmentInfo.mediaIndex + " " + ("from playlist " + segmentInfo.playlist.id + " ") + ("has a duration of " + segmentDuration + " ") + ("when the reported duration is " + segmentInfo.duration + " ") + ("and the target duration is " + targetDuration + ". ") + 'For HLS content, a duration in excess of the target duration may result in ' + 'playback issues. See the HLS specification section on EXT-X-TARGETDURATION for ' + 'more details: ' + 'https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.3.1';
15653
15654 if (isSegmentWayTooLong || isSegmentSlightlyTooLong) {
15655 return {
15656 severity: isSegmentWayTooLong ? 'warn' : 'info',
15657 message: segmentTooLongMessage
15658 };
15659 }
15660
15661 return null;
15662};
15663/**
15664 * An object that manages segment loading and appending.
15665 *
15666 * @class SegmentLoader
15667 * @param {Object} options required and optional options
15668 * @extends videojs.EventTarget
15669 */
15670
15671var SegmentLoader = /*#__PURE__*/function (_videojs$EventTarget) {
15672 _inheritsLoose__default["default"](SegmentLoader, _videojs$EventTarget);
15673
15674 function SegmentLoader(settings, options) {
15675 var _this;
15676
15677 _this = _videojs$EventTarget.call(this) || this; // check pre-conditions
15678
15679 if (!settings) {
15680 throw new TypeError('Initialization settings are required');
15681 }
15682
15683 if (typeof settings.currentTime !== 'function') {
15684 throw new TypeError('No currentTime getter specified');
15685 }
15686
15687 if (!settings.mediaSource) {
15688 throw new TypeError('No MediaSource specified');
15689 } // public properties
15690
15691
15692 _this.bandwidth = settings.bandwidth;
15693 _this.throughput = {
15694 rate: 0,
15695 count: 0
15696 };
15697 _this.roundTrip = NaN;
15698
15699 _this.resetStats_();
15700
15701 _this.mediaIndex = null;
15702 _this.partIndex = null; // private settings
15703
15704 _this.hasPlayed_ = settings.hasPlayed;
15705 _this.currentTime_ = settings.currentTime;
15706 _this.seekable_ = settings.seekable;
15707 _this.seeking_ = settings.seeking;
15708 _this.duration_ = settings.duration;
15709 _this.mediaSource_ = settings.mediaSource;
15710 _this.vhs_ = settings.vhs;
15711 _this.loaderType_ = settings.loaderType;
15712 _this.currentMediaInfo_ = void 0;
15713 _this.startingMediaInfo_ = void 0;
15714 _this.segmentMetadataTrack_ = settings.segmentMetadataTrack;
15715 _this.goalBufferLength_ = settings.goalBufferLength;
15716 _this.sourceType_ = settings.sourceType;
15717 _this.sourceUpdater_ = settings.sourceUpdater;
15718 _this.inbandTextTracks_ = settings.inbandTextTracks;
15719 _this.state_ = 'INIT';
15720 _this.timelineChangeController_ = settings.timelineChangeController;
15721 _this.shouldSaveSegmentTimingInfo_ = true;
15722 _this.parse708captions_ = settings.parse708captions;
15723 _this.captionServices_ = settings.captionServices;
15724 _this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings; // private instance variables
15725
15726 _this.checkBufferTimeout_ = null;
15727 _this.error_ = void 0;
15728 _this.currentTimeline_ = -1;
15729 _this.pendingSegment_ = null;
15730 _this.xhrOptions_ = null;
15731 _this.pendingSegments_ = [];
15732 _this.audioDisabled_ = false;
15733 _this.isPendingTimestampOffset_ = false; // TODO possibly move gopBuffer and timeMapping info to a separate controller
15734
15735 _this.gopBuffer_ = [];
15736 _this.timeMapping_ = 0;
15737 _this.safeAppend_ = videojs__default["default"].browser.IE_VERSION >= 11;
15738 _this.appendInitSegment_ = {
15739 audio: true,
15740 video: true
15741 };
15742 _this.playlistOfLastInitSegment_ = {
15743 audio: null,
15744 video: null
15745 };
15746 _this.callQueue_ = []; // If the segment loader prepares to load a segment, but does not have enough
15747 // information yet to start the loading process (e.g., if the audio loader wants to
15748 // load a segment from the next timeline but the main loader hasn't yet crossed that
15749 // timeline), then the load call will be added to the queue until it is ready to be
15750 // processed.
15751
15752 _this.loadQueue_ = [];
15753 _this.metadataQueue_ = {
15754 id3: [],
15755 caption: []
15756 };
15757 _this.waitingOnRemove_ = false;
15758 _this.quotaExceededErrorRetryTimeout_ = null; // Fragmented mp4 playback
15759
15760 _this.activeInitSegmentId_ = null;
15761 _this.initSegments_ = {}; // HLSe playback
15762
15763 _this.cacheEncryptionKeys_ = settings.cacheEncryptionKeys;
15764 _this.keyCache_ = {};
15765 _this.decrypter_ = settings.decrypter; // Manages the tracking and generation of sync-points, mappings
15766 // between a time in the display time and a segment index within
15767 // a playlist
15768
15769 _this.syncController_ = settings.syncController;
15770 _this.syncPoint_ = {
15771 segmentIndex: 0,
15772 time: 0
15773 };
15774 _this.transmuxer_ = _this.createTransmuxer_();
15775
15776 _this.triggerSyncInfoUpdate_ = function () {
15777 return _this.trigger('syncinfoupdate');
15778 };
15779
15780 _this.syncController_.on('syncinfoupdate', _this.triggerSyncInfoUpdate_);
15781
15782 _this.mediaSource_.addEventListener('sourceopen', function () {
15783 if (!_this.isEndOfStream_()) {
15784 _this.ended_ = false;
15785 }
15786 }); // ...for determining the fetch location
15787
15788
15789 _this.fetchAtBuffer_ = false;
15790 _this.logger_ = logger("SegmentLoader[" + _this.loaderType_ + "]");
15791 Object.defineProperty(_assertThisInitialized__default["default"](_this), 'state', {
15792 get: function get() {
15793 return this.state_;
15794 },
15795 set: function set(newState) {
15796 if (newState !== this.state_) {
15797 this.logger_(this.state_ + " -> " + newState);
15798 this.state_ = newState;
15799 this.trigger('statechange');
15800 }
15801 }
15802 });
15803
15804 _this.sourceUpdater_.on('ready', function () {
15805 if (_this.hasEnoughInfoToAppend_()) {
15806 _this.processCallQueue_();
15807 }
15808 }); // Only the main loader needs to listen for pending timeline changes, as the main
15809 // loader should wait for audio to be ready to change its timeline so that both main
15810 // and audio timelines change together. For more details, see the
15811 // shouldWaitForTimelineChange function.
15812
15813
15814 if (_this.loaderType_ === 'main') {
15815 _this.timelineChangeController_.on('pendingtimelinechange', function () {
15816 if (_this.hasEnoughInfoToAppend_()) {
15817 _this.processCallQueue_();
15818 }
15819 });
15820 } // The main loader only listens on pending timeline changes, but the audio loader,
15821 // since its loads follow main, needs to listen on timeline changes. For more details,
15822 // see the shouldWaitForTimelineChange function.
15823
15824
15825 if (_this.loaderType_ === 'audio') {
15826 _this.timelineChangeController_.on('timelinechange', function () {
15827 if (_this.hasEnoughInfoToLoad_()) {
15828 _this.processLoadQueue_();
15829 }
15830
15831 if (_this.hasEnoughInfoToAppend_()) {
15832 _this.processCallQueue_();
15833 }
15834 });
15835 }
15836
15837 return _this;
15838 }
15839
15840 var _proto = SegmentLoader.prototype;
15841
15842 _proto.createTransmuxer_ = function createTransmuxer_() {
15843 return segmentTransmuxer.createTransmuxer({
15844 remux: false,
15845 alignGopsAtEnd: this.safeAppend_,
15846 keepOriginalTimestamps: true,
15847 parse708captions: this.parse708captions_,
15848 captionServices: this.captionServices_
15849 });
15850 }
15851 /**
15852 * reset all of our media stats
15853 *
15854 * @private
15855 */
15856 ;
15857
15858 _proto.resetStats_ = function resetStats_() {
15859 this.mediaBytesTransferred = 0;
15860 this.mediaRequests = 0;
15861 this.mediaRequestsAborted = 0;
15862 this.mediaRequestsTimedout = 0;
15863 this.mediaRequestsErrored = 0;
15864 this.mediaTransferDuration = 0;
15865 this.mediaSecondsLoaded = 0;
15866 this.mediaAppends = 0;
15867 }
15868 /**
15869 * dispose of the SegmentLoader and reset to the default state
15870 */
15871 ;
15872
15873 _proto.dispose = function dispose() {
15874 this.trigger('dispose');
15875 this.state = 'DISPOSED';
15876 this.pause();
15877 this.abort_();
15878
15879 if (this.transmuxer_) {
15880 this.transmuxer_.terminate();
15881 }
15882
15883 this.resetStats_();
15884
15885 if (this.checkBufferTimeout_) {
15886 window__default["default"].clearTimeout(this.checkBufferTimeout_);
15887 }
15888
15889 if (this.syncController_ && this.triggerSyncInfoUpdate_) {
15890 this.syncController_.off('syncinfoupdate', this.triggerSyncInfoUpdate_);
15891 }
15892
15893 this.off();
15894 };
15895
15896 _proto.setAudio = function setAudio(enable) {
15897 this.audioDisabled_ = !enable;
15898
15899 if (enable) {
15900 this.appendInitSegment_.audio = true;
15901 } else {
15902 // remove current track audio if it gets disabled
15903 this.sourceUpdater_.removeAudio(0, this.duration_());
15904 }
15905 }
15906 /**
15907 * abort anything that is currently doing on with the SegmentLoader
15908 * and reset to a default state
15909 */
15910 ;
15911
15912 _proto.abort = function abort() {
15913 if (this.state !== 'WAITING') {
15914 if (this.pendingSegment_) {
15915 this.pendingSegment_ = null;
15916 }
15917
15918 return;
15919 }
15920
15921 this.abort_(); // We aborted the requests we were waiting on, so reset the loader's state to READY
15922 // since we are no longer "waiting" on any requests. XHR callback is not always run
15923 // when the request is aborted. This will prevent the loader from being stuck in the
15924 // WAITING state indefinitely.
15925
15926 this.state = 'READY'; // don't wait for buffer check timeouts to begin fetching the
15927 // next segment
15928
15929 if (!this.paused()) {
15930 this.monitorBuffer_();
15931 }
15932 }
15933 /**
15934 * abort all pending xhr requests and null any pending segements
15935 *
15936 * @private
15937 */
15938 ;
15939
15940 _proto.abort_ = function abort_() {
15941 if (this.pendingSegment_ && this.pendingSegment_.abortRequests) {
15942 this.pendingSegment_.abortRequests();
15943 } // clear out the segment being processed
15944
15945
15946 this.pendingSegment_ = null;
15947 this.callQueue_ = [];
15948 this.loadQueue_ = [];
15949 this.metadataQueue_.id3 = [];
15950 this.metadataQueue_.caption = [];
15951 this.timelineChangeController_.clearPendingTimelineChange(this.loaderType_);
15952 this.waitingOnRemove_ = false;
15953 window__default["default"].clearTimeout(this.quotaExceededErrorRetryTimeout_);
15954 this.quotaExceededErrorRetryTimeout_ = null;
15955 };
15956
15957 _proto.checkForAbort_ = function checkForAbort_(requestId) {
15958 // If the state is APPENDING, then aborts will not modify the state, meaning the first
15959 // callback that happens should reset the state to READY so that loading can continue.
15960 if (this.state === 'APPENDING' && !this.pendingSegment_) {
15961 this.state = 'READY';
15962 return true;
15963 }
15964
15965 if (!this.pendingSegment_ || this.pendingSegment_.requestId !== requestId) {
15966 return true;
15967 }
15968
15969 return false;
15970 }
15971 /**
15972 * set an error on the segment loader and null out any pending segements
15973 *
15974 * @param {Error} error the error to set on the SegmentLoader
15975 * @return {Error} the error that was set or that is currently set
15976 */
15977 ;
15978
15979 _proto.error = function error(_error) {
15980 if (typeof _error !== 'undefined') {
15981 this.logger_('error occurred:', _error);
15982 this.error_ = _error;
15983 }
15984
15985 this.pendingSegment_ = null;
15986 return this.error_;
15987 };
15988
15989 _proto.endOfStream = function endOfStream() {
15990 this.ended_ = true;
15991
15992 if (this.transmuxer_) {
15993 // need to clear out any cached data to prepare for the new segment
15994 segmentTransmuxer.reset(this.transmuxer_);
15995 }
15996
15997 this.gopBuffer_.length = 0;
15998 this.pause();
15999 this.trigger('ended');
16000 }
16001 /**
16002 * Indicates which time ranges are buffered
16003 *
16004 * @return {TimeRange}
16005 * TimeRange object representing the current buffered ranges
16006 */
16007 ;
16008
16009 _proto.buffered_ = function buffered_() {
16010 var trackInfo = this.getMediaInfo_();
16011
16012 if (!this.sourceUpdater_ || !trackInfo) {
16013 return videojs__default["default"].createTimeRanges();
16014 }
16015
16016 if (this.loaderType_ === 'main') {
16017 var hasAudio = trackInfo.hasAudio,
16018 hasVideo = trackInfo.hasVideo,
16019 isMuxed = trackInfo.isMuxed;
16020
16021 if (hasVideo && hasAudio && !this.audioDisabled_ && !isMuxed) {
16022 return this.sourceUpdater_.buffered();
16023 }
16024
16025 if (hasVideo) {
16026 return this.sourceUpdater_.videoBuffered();
16027 }
16028 } // One case that can be ignored for now is audio only with alt audio,
16029 // as we don't yet have proper support for that.
16030
16031
16032 return this.sourceUpdater_.audioBuffered();
16033 }
16034 /**
16035 * Gets and sets init segment for the provided map
16036 *
16037 * @param {Object} map
16038 * The map object representing the init segment to get or set
16039 * @param {boolean=} set
16040 * If true, the init segment for the provided map should be saved
16041 * @return {Object}
16042 * map object for desired init segment
16043 */
16044 ;
16045
16046 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
16047 if (set === void 0) {
16048 set = false;
16049 }
16050
16051 if (!map) {
16052 return null;
16053 }
16054
16055 var id = initSegmentId(map);
16056 var storedMap = this.initSegments_[id];
16057
16058 if (set && !storedMap && map.bytes) {
16059 this.initSegments_[id] = storedMap = {
16060 resolvedUri: map.resolvedUri,
16061 byterange: map.byterange,
16062 bytes: map.bytes,
16063 tracks: map.tracks,
16064 timescales: map.timescales
16065 };
16066 }
16067
16068 return storedMap || map;
16069 }
16070 /**
16071 * Gets and sets key for the provided key
16072 *
16073 * @param {Object} key
16074 * The key object representing the key to get or set
16075 * @param {boolean=} set
16076 * If true, the key for the provided key should be saved
16077 * @return {Object}
16078 * Key object for desired key
16079 */
16080 ;
16081
16082 _proto.segmentKey = function segmentKey(key, set) {
16083 if (set === void 0) {
16084 set = false;
16085 }
16086
16087 if (!key) {
16088 return null;
16089 }
16090
16091 var id = segmentKeyId(key);
16092 var storedKey = this.keyCache_[id]; // TODO: We should use the HTTP Expires header to invalidate our cache per
16093 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-6.2.3
16094
16095 if (this.cacheEncryptionKeys_ && set && !storedKey && key.bytes) {
16096 this.keyCache_[id] = storedKey = {
16097 resolvedUri: key.resolvedUri,
16098 bytes: key.bytes
16099 };
16100 }
16101
16102 var result = {
16103 resolvedUri: (storedKey || key).resolvedUri
16104 };
16105
16106 if (storedKey) {
16107 result.bytes = storedKey.bytes;
16108 }
16109
16110 return result;
16111 }
16112 /**
16113 * Returns true if all configuration required for loading is present, otherwise false.
16114 *
16115 * @return {boolean} True if the all configuration is ready for loading
16116 * @private
16117 */
16118 ;
16119
16120 _proto.couldBeginLoading_ = function couldBeginLoading_() {
16121 return this.playlist_ && !this.paused();
16122 }
16123 /**
16124 * load a playlist and start to fill the buffer
16125 */
16126 ;
16127
16128 _proto.load = function load() {
16129 // un-pause
16130 this.monitorBuffer_(); // if we don't have a playlist yet, keep waiting for one to be
16131 // specified
16132
16133 if (!this.playlist_) {
16134 return;
16135 } // if all the configuration is ready, initialize and begin loading
16136
16137
16138 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16139 return this.init_();
16140 } // if we're in the middle of processing a segment already, don't
16141 // kick off an additional segment request
16142
16143
16144 if (!this.couldBeginLoading_() || this.state !== 'READY' && this.state !== 'INIT') {
16145 return;
16146 }
16147
16148 this.state = 'READY';
16149 }
16150 /**
16151 * Once all the starting parameters have been specified, begin
16152 * operation. This method should only be invoked from the INIT
16153 * state.
16154 *
16155 * @private
16156 */
16157 ;
16158
16159 _proto.init_ = function init_() {
16160 this.state = 'READY'; // if this is the audio segment loader, and it hasn't been inited before, then any old
16161 // audio data from the muxed content should be removed
16162
16163 this.resetEverything();
16164 return this.monitorBuffer_();
16165 }
16166 /**
16167 * set a playlist on the segment loader
16168 *
16169 * @param {PlaylistLoader} media the playlist to set on the segment loader
16170 */
16171 ;
16172
16173 _proto.playlist = function playlist(newPlaylist, options) {
16174 if (options === void 0) {
16175 options = {};
16176 }
16177
16178 if (!newPlaylist) {
16179 return;
16180 }
16181
16182 var oldPlaylist = this.playlist_;
16183 var segmentInfo = this.pendingSegment_;
16184 this.playlist_ = newPlaylist;
16185 this.xhrOptions_ = options; // when we haven't started playing yet, the start of a live playlist
16186 // is always our zero-time so force a sync update each time the playlist
16187 // is refreshed from the server
16188 //
16189 // Use the INIT state to determine if playback has started, as the playlist sync info
16190 // should be fixed once requests begin (as sync points are generated based on sync
16191 // info), but not before then.
16192
16193 if (this.state === 'INIT') {
16194 newPlaylist.syncInfo = {
16195 mediaSequence: newPlaylist.mediaSequence,
16196 time: 0
16197 }; // Setting the date time mapping means mapping the program date time (if available)
16198 // to time 0 on the player's timeline. The playlist's syncInfo serves a similar
16199 // purpose, mapping the initial mediaSequence to time zero. Since the syncInfo can
16200 // be updated as the playlist is refreshed before the loader starts loading, the
16201 // program date time mapping needs to be updated as well.
16202 //
16203 // This mapping is only done for the main loader because a program date time should
16204 // map equivalently between playlists.
16205
16206 if (this.loaderType_ === 'main') {
16207 this.syncController_.setDateTimeMappingForStart(newPlaylist);
16208 }
16209 }
16210
16211 var oldId = null;
16212
16213 if (oldPlaylist) {
16214 if (oldPlaylist.id) {
16215 oldId = oldPlaylist.id;
16216 } else if (oldPlaylist.uri) {
16217 oldId = oldPlaylist.uri;
16218 }
16219 }
16220
16221 this.logger_("playlist update [" + oldId + " => " + (newPlaylist.id || newPlaylist.uri) + "]"); // in VOD, this is always a rendition switch (or we updated our syncInfo above)
16222 // in LIVE, we always want to update with new playlists (including refreshes)
16223
16224 this.trigger('syncinfoupdate'); // if we were unpaused but waiting for a playlist, start
16225 // buffering now
16226
16227 if (this.state === 'INIT' && this.couldBeginLoading_()) {
16228 return this.init_();
16229 }
16230
16231 if (!oldPlaylist || oldPlaylist.uri !== newPlaylist.uri) {
16232 if (this.mediaIndex !== null) {
16233 // we must "resync" the segment loader when we switch renditions and
16234 // the segment loader is already synced to the previous rendition
16235 this.resyncLoader();
16236 }
16237
16238 this.currentMediaInfo_ = void 0;
16239 this.trigger('playlistupdate'); // the rest of this function depends on `oldPlaylist` being defined
16240
16241 return;
16242 } // we reloaded the same playlist so we are in a live scenario
16243 // and we will likely need to adjust the mediaIndex
16244
16245
16246 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
16247 this.logger_("live window shift [" + mediaSequenceDiff + "]"); // update the mediaIndex on the SegmentLoader
16248 // this is important because we can abort a request and this value must be
16249 // equal to the last appended mediaIndex
16250
16251 if (this.mediaIndex !== null) {
16252 this.mediaIndex -= mediaSequenceDiff; // this can happen if we are going to load the first segment, but get a playlist
16253 // update during that. mediaIndex would go from 0 to -1 if mediaSequence in the
16254 // new playlist was incremented by 1.
16255
16256 if (this.mediaIndex < 0) {
16257 this.mediaIndex = null;
16258 this.partIndex = null;
16259 } else {
16260 var segment = this.playlist_.segments[this.mediaIndex]; // partIndex should remain the same for the same segment
16261 // unless parts fell off of the playlist for this segment.
16262 // In that case we need to reset partIndex and resync
16263
16264 if (this.partIndex && (!segment.parts || !segment.parts.length || !segment.parts[this.partIndex])) {
16265 var mediaIndex = this.mediaIndex;
16266 this.logger_("currently processing part (index " + this.partIndex + ") no longer exists.");
16267 this.resetLoader(); // We want to throw away the partIndex and the data associated with it,
16268 // as the part was dropped from our current playlists segment.
16269 // The mediaIndex will still be valid so keep that around.
16270
16271 this.mediaIndex = mediaIndex;
16272 }
16273 }
16274 } // update the mediaIndex on the SegmentInfo object
16275 // this is important because we will update this.mediaIndex with this value
16276 // in `handleAppendsDone_` after the segment has been successfully appended
16277
16278
16279 if (segmentInfo) {
16280 segmentInfo.mediaIndex -= mediaSequenceDiff;
16281
16282 if (segmentInfo.mediaIndex < 0) {
16283 segmentInfo.mediaIndex = null;
16284 segmentInfo.partIndex = null;
16285 } else {
16286 // we need to update the referenced segment so that timing information is
16287 // saved for the new playlist's segment, however, if the segment fell off the
16288 // playlist, we can leave the old reference and just lose the timing info
16289 if (segmentInfo.mediaIndex >= 0) {
16290 segmentInfo.segment = newPlaylist.segments[segmentInfo.mediaIndex];
16291 }
16292
16293 if (segmentInfo.partIndex >= 0 && segmentInfo.segment.parts) {
16294 segmentInfo.part = segmentInfo.segment.parts[segmentInfo.partIndex];
16295 }
16296 }
16297 }
16298
16299 this.syncController_.saveExpiredSegmentInfo(oldPlaylist, newPlaylist);
16300 }
16301 /**
16302 * Prevent the loader from fetching additional segments. If there
16303 * is a segment request outstanding, it will finish processing
16304 * before the loader halts. A segment loader can be unpaused by
16305 * calling load().
16306 */
16307 ;
16308
16309 _proto.pause = function pause() {
16310 if (this.checkBufferTimeout_) {
16311 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16312 this.checkBufferTimeout_ = null;
16313 }
16314 }
16315 /**
16316 * Returns whether the segment loader is fetching additional
16317 * segments when given the opportunity. This property can be
16318 * modified through calls to pause() and load().
16319 */
16320 ;
16321
16322 _proto.paused = function paused() {
16323 return this.checkBufferTimeout_ === null;
16324 }
16325 /**
16326 * Delete all the buffered data and reset the SegmentLoader
16327 *
16328 * @param {Function} [done] an optional callback to be executed when the remove
16329 * operation is complete
16330 */
16331 ;
16332
16333 _proto.resetEverything = function resetEverything(done) {
16334 this.ended_ = false;
16335 this.appendInitSegment_ = {
16336 audio: true,
16337 video: true
16338 };
16339 this.resetLoader(); // remove from 0, the earliest point, to Infinity, to signify removal of everything.
16340 // VTT Segment Loader doesn't need to do anything but in the regular SegmentLoader,
16341 // we then clamp the value to duration if necessary.
16342
16343 this.remove(0, Infinity, done); // clears fmp4 captions
16344
16345 if (this.transmuxer_) {
16346 this.transmuxer_.postMessage({
16347 action: 'clearAllMp4Captions'
16348 }); // reset the cache in the transmuxer
16349
16350 this.transmuxer_.postMessage({
16351 action: 'reset'
16352 });
16353 }
16354 }
16355 /**
16356 * Force the SegmentLoader to resync and start loading around the currentTime instead
16357 * of starting at the end of the buffer
16358 *
16359 * Useful for fast quality changes
16360 */
16361 ;
16362
16363 _proto.resetLoader = function resetLoader() {
16364 this.fetchAtBuffer_ = false;
16365 this.resyncLoader();
16366 }
16367 /**
16368 * Force the SegmentLoader to restart synchronization and make a conservative guess
16369 * before returning to the simple walk-forward method
16370 */
16371 ;
16372
16373 _proto.resyncLoader = function resyncLoader() {
16374 if (this.transmuxer_) {
16375 // need to clear out any cached data to prepare for the new segment
16376 segmentTransmuxer.reset(this.transmuxer_);
16377 }
16378
16379 this.mediaIndex = null;
16380 this.partIndex = null;
16381 this.syncPoint_ = null;
16382 this.isPendingTimestampOffset_ = false;
16383 this.callQueue_ = [];
16384 this.loadQueue_ = [];
16385 this.metadataQueue_.id3 = [];
16386 this.metadataQueue_.caption = [];
16387 this.abort();
16388
16389 if (this.transmuxer_) {
16390 this.transmuxer_.postMessage({
16391 action: 'clearParsedMp4Captions'
16392 });
16393 }
16394 }
16395 /**
16396 * Remove any data in the source buffer between start and end times
16397 *
16398 * @param {number} start - the start time of the region to remove from the buffer
16399 * @param {number} end - the end time of the region to remove from the buffer
16400 * @param {Function} [done] - an optional callback to be executed when the remove
16401 * @param {boolean} force - force all remove operations to happen
16402 * operation is complete
16403 */
16404 ;
16405
16406 _proto.remove = function remove(start, end, done, force) {
16407 if (done === void 0) {
16408 done = function done() {};
16409 }
16410
16411 if (force === void 0) {
16412 force = false;
16413 }
16414
16415 // clamp end to duration if we need to remove everything.
16416 // This is due to a browser bug that causes issues if we remove to Infinity.
16417 // videojs/videojs-contrib-hls#1225
16418 if (end === Infinity) {
16419 end = this.duration_();
16420 } // skip removes that would throw an error
16421 // commonly happens during a rendition switch at the start of a video
16422 // from start 0 to end 0
16423
16424
16425 if (end <= start) {
16426 this.logger_('skipping remove because end ${end} is <= start ${start}');
16427 return;
16428 }
16429
16430 if (!this.sourceUpdater_ || !this.getMediaInfo_()) {
16431 this.logger_('skipping remove because no source updater or starting media info'); // nothing to remove if we haven't processed any media
16432
16433 return;
16434 } // set it to one to complete this function's removes
16435
16436
16437 var removesRemaining = 1;
16438
16439 var removeFinished = function removeFinished() {
16440 removesRemaining--;
16441
16442 if (removesRemaining === 0) {
16443 done();
16444 }
16445 };
16446
16447 if (force || !this.audioDisabled_) {
16448 removesRemaining++;
16449 this.sourceUpdater_.removeAudio(start, end, removeFinished);
16450 } // While it would be better to only remove video if the main loader has video, this
16451 // should be safe with audio only as removeVideo will call back even if there's no
16452 // video buffer.
16453 //
16454 // In theory we can check to see if there's video before calling the remove, but in
16455 // the event that we're switching between renditions and from video to audio only
16456 // (when we add support for that), we may need to clear the video contents despite
16457 // what the new media will contain.
16458
16459
16460 if (force || this.loaderType_ === 'main') {
16461 this.gopBuffer_ = removeGopBuffer(this.gopBuffer_, start, end, this.timeMapping_);
16462 removesRemaining++;
16463 this.sourceUpdater_.removeVideo(start, end, removeFinished);
16464 } // remove any captions and ID3 tags
16465
16466
16467 for (var track in this.inbandTextTracks_) {
16468 removeCuesFromTrack(start, end, this.inbandTextTracks_[track]);
16469 }
16470
16471 removeCuesFromTrack(start, end, this.segmentMetadataTrack_); // finished this function's removes
16472
16473 removeFinished();
16474 }
16475 /**
16476 * (re-)schedule monitorBufferTick_ to run as soon as possible
16477 *
16478 * @private
16479 */
16480 ;
16481
16482 _proto.monitorBuffer_ = function monitorBuffer_() {
16483 if (this.checkBufferTimeout_) {
16484 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16485 }
16486
16487 this.checkBufferTimeout_ = window__default["default"].setTimeout(this.monitorBufferTick_.bind(this), 1);
16488 }
16489 /**
16490 * As long as the SegmentLoader is in the READY state, periodically
16491 * invoke fillBuffer_().
16492 *
16493 * @private
16494 */
16495 ;
16496
16497 _proto.monitorBufferTick_ = function monitorBufferTick_() {
16498 if (this.state === 'READY') {
16499 this.fillBuffer_();
16500 }
16501
16502 if (this.checkBufferTimeout_) {
16503 window__default["default"].clearTimeout(this.checkBufferTimeout_);
16504 }
16505
16506 this.checkBufferTimeout_ = window__default["default"].setTimeout(this.monitorBufferTick_.bind(this), CHECK_BUFFER_DELAY);
16507 }
16508 /**
16509 * fill the buffer with segements unless the sourceBuffers are
16510 * currently updating
16511 *
16512 * Note: this function should only ever be called by monitorBuffer_
16513 * and never directly
16514 *
16515 * @private
16516 */
16517 ;
16518
16519 _proto.fillBuffer_ = function fillBuffer_() {
16520 // TODO since the source buffer maintains a queue, and we shouldn't call this function
16521 // except when we're ready for the next segment, this check can most likely be removed
16522 if (this.sourceUpdater_.updating()) {
16523 return;
16524 } // see if we need to begin loading immediately
16525
16526
16527 var segmentInfo = this.chooseNextRequest_();
16528
16529 if (!segmentInfo) {
16530 return;
16531 }
16532
16533 if (typeof segmentInfo.timestampOffset === 'number') {
16534 this.isPendingTimestampOffset_ = false;
16535 this.timelineChangeController_.pendingTimelineChange({
16536 type: this.loaderType_,
16537 from: this.currentTimeline_,
16538 to: segmentInfo.timeline
16539 });
16540 }
16541
16542 this.loadSegment_(segmentInfo);
16543 }
16544 /**
16545 * Determines if we should call endOfStream on the media source based
16546 * on the state of the buffer or if appened segment was the final
16547 * segment in the playlist.
16548 *
16549 * @param {number} [mediaIndex] the media index of segment we last appended
16550 * @param {Object} [playlist] a media playlist object
16551 * @return {boolean} do we need to call endOfStream on the MediaSource
16552 */
16553 ;
16554
16555 _proto.isEndOfStream_ = function isEndOfStream_(mediaIndex, playlist, partIndex) {
16556 if (mediaIndex === void 0) {
16557 mediaIndex = this.mediaIndex;
16558 }
16559
16560 if (playlist === void 0) {
16561 playlist = this.playlist_;
16562 }
16563
16564 if (partIndex === void 0) {
16565 partIndex = this.partIndex;
16566 }
16567
16568 if (!playlist || !this.mediaSource_) {
16569 return false;
16570 }
16571
16572 var segment = typeof mediaIndex === 'number' && playlist.segments[mediaIndex]; // mediaIndex is zero based but length is 1 based
16573
16574 var appendedLastSegment = mediaIndex + 1 === playlist.segments.length; // true if there are no parts, or this is the last part.
16575
16576 var appendedLastPart = !segment || !segment.parts || partIndex + 1 === segment.parts.length; // if we've buffered to the end of the video, we need to call endOfStream
16577 // so that MediaSources can trigger the `ended` event when it runs out of
16578 // buffered data instead of waiting for me
16579
16580 return playlist.endList && this.mediaSource_.readyState === 'open' && appendedLastSegment && appendedLastPart;
16581 }
16582 /**
16583 * Determines what request should be made given current segment loader state.
16584 *
16585 * @return {Object} a request object that describes the segment/part to load
16586 */
16587 ;
16588
16589 _proto.chooseNextRequest_ = function chooseNextRequest_() {
16590 var bufferedEnd = lastBufferedEnd(this.buffered_()) || 0;
16591 var bufferedTime = Math.max(0, bufferedEnd - this.currentTime_());
16592 var preloaded = !this.hasPlayed_() && bufferedTime >= 1;
16593 var haveEnoughBuffer = bufferedTime >= this.goalBufferLength_();
16594 var segments = this.playlist_.segments; // return no segment if:
16595 // 1. we don't have segments
16596 // 2. The video has not yet played and we already downloaded a segment
16597 // 3. we already have enough buffered time
16598
16599 if (!segments.length || preloaded || haveEnoughBuffer) {
16600 return null;
16601 }
16602
16603 this.syncPoint_ = this.syncPoint_ || this.syncController_.getSyncPoint(this.playlist_, this.duration_(), this.currentTimeline_, this.currentTime_());
16604 var next = {
16605 partIndex: null,
16606 mediaIndex: null,
16607 startOfSegment: null,
16608 playlist: this.playlist_,
16609 isSyncRequest: Boolean(!this.syncPoint_)
16610 };
16611
16612 if (next.isSyncRequest) {
16613 next.mediaIndex = getSyncSegmentCandidate(this.currentTimeline_, segments, bufferedEnd);
16614 } else if (this.mediaIndex !== null) {
16615 var segment = segments[this.mediaIndex];
16616 var partIndex = typeof this.partIndex === 'number' ? this.partIndex : -1;
16617 next.startOfSegment = segment.end ? segment.end : bufferedEnd;
16618
16619 if (segment.parts && segment.parts[partIndex + 1]) {
16620 next.mediaIndex = this.mediaIndex;
16621 next.partIndex = partIndex + 1;
16622 } else {
16623 next.mediaIndex = this.mediaIndex + 1;
16624 }
16625 } else {
16626 // Find the segment containing the end of the buffer or current time.
16627 var _Playlist$getMediaInf = Playlist.getMediaInfoForTime({
16628 experimentalExactManifestTimings: this.experimentalExactManifestTimings,
16629 playlist: this.playlist_,
16630 currentTime: this.fetchAtBuffer_ ? bufferedEnd : this.currentTime_(),
16631 startingPartIndex: this.syncPoint_.partIndex,
16632 startingSegmentIndex: this.syncPoint_.segmentIndex,
16633 startTime: this.syncPoint_.time
16634 }),
16635 segmentIndex = _Playlist$getMediaInf.segmentIndex,
16636 startTime = _Playlist$getMediaInf.startTime,
16637 _partIndex = _Playlist$getMediaInf.partIndex;
16638
16639 next.getMediaInfoForTime = this.fetchAtBuffer_ ? 'bufferedEnd' : 'currentTime';
16640 next.mediaIndex = segmentIndex;
16641 next.startOfSegment = startTime;
16642 next.partIndex = _partIndex;
16643 }
16644
16645 var nextSegment = segments[next.mediaIndex];
16646 var nextPart = nextSegment && typeof next.partIndex === 'number' && nextSegment.parts && nextSegment.parts[next.partIndex]; // if the next segment index is invalid or
16647 // the next partIndex is invalid do not choose a next segment.
16648
16649 if (!nextSegment || typeof next.partIndex === 'number' && !nextPart) {
16650 return null;
16651 } // if the next segment has parts, and we don't have a partIndex.
16652 // Set partIndex to 0
16653
16654
16655 if (typeof next.partIndex !== 'number' && nextSegment.parts) {
16656 next.partIndex = 0;
16657 }
16658
16659 var ended = this.mediaSource_ && this.mediaSource_.readyState === 'ended'; // do not choose a next segment if all of the following:
16660 // 1. this is the last segment in the playlist
16661 // 2. end of stream has been called on the media source already
16662 // 3. the player is not seeking
16663
16664 if (next.mediaIndex >= segments.length - 1 && ended && !this.seeking_()) {
16665 return null;
16666 }
16667
16668 return this.generateSegmentInfo_(next);
16669 };
16670
16671 _proto.generateSegmentInfo_ = function generateSegmentInfo_(options) {
16672 var playlist = options.playlist,
16673 mediaIndex = options.mediaIndex,
16674 startOfSegment = options.startOfSegment,
16675 isSyncRequest = options.isSyncRequest,
16676 partIndex = options.partIndex,
16677 forceTimestampOffset = options.forceTimestampOffset,
16678 getMediaInfoForTime = options.getMediaInfoForTime;
16679 var segment = playlist.segments[mediaIndex];
16680 var part = typeof partIndex === 'number' && segment.parts[partIndex];
16681 var segmentInfo = {
16682 requestId: 'segment-loader-' + Math.random(),
16683 // resolve the segment URL relative to the playlist
16684 uri: part && part.resolvedUri || segment.resolvedUri,
16685 // the segment's mediaIndex at the time it was requested
16686 mediaIndex: mediaIndex,
16687 partIndex: part ? partIndex : null,
16688 // whether or not to update the SegmentLoader's state with this
16689 // segment's mediaIndex
16690 isSyncRequest: isSyncRequest,
16691 startOfSegment: startOfSegment,
16692 // the segment's playlist
16693 playlist: playlist,
16694 // unencrypted bytes of the segment
16695 bytes: null,
16696 // when a key is defined for this segment, the encrypted bytes
16697 encryptedBytes: null,
16698 // The target timestampOffset for this segment when we append it
16699 // to the source buffer
16700 timestampOffset: null,
16701 // The timeline that the segment is in
16702 timeline: segment.timeline,
16703 // The expected duration of the segment in seconds
16704 duration: part && part.duration || segment.duration,
16705 // retain the segment in case the playlist updates while doing an async process
16706 segment: segment,
16707 part: part,
16708 byteLength: 0,
16709 transmuxer: this.transmuxer_,
16710 // type of getMediaInfoForTime that was used to get this segment
16711 getMediaInfoForTime: getMediaInfoForTime
16712 };
16713 var overrideCheck = typeof forceTimestampOffset !== 'undefined' ? forceTimestampOffset : this.isPendingTimestampOffset_;
16714 segmentInfo.timestampOffset = this.timestampOffsetForSegment_({
16715 segmentTimeline: segment.timeline,
16716 currentTimeline: this.currentTimeline_,
16717 startOfSegment: startOfSegment,
16718 buffered: this.buffered_(),
16719 overrideCheck: overrideCheck
16720 });
16721 var audioBufferedEnd = lastBufferedEnd(this.sourceUpdater_.audioBuffered());
16722
16723 if (typeof audioBufferedEnd === 'number') {
16724 // since the transmuxer is using the actual timing values, but the buffer is
16725 // adjusted by the timestamp offset, we must adjust the value here
16726 segmentInfo.audioAppendStart = audioBufferedEnd - this.sourceUpdater_.audioTimestampOffset();
16727 }
16728
16729 if (this.sourceUpdater_.videoBuffered().length) {
16730 segmentInfo.gopsToAlignWith = gopsSafeToAlignWith(this.gopBuffer_, // since the transmuxer is using the actual timing values, but the time is
16731 // adjusted by the timestmap offset, we must adjust the value here
16732 this.currentTime_() - this.sourceUpdater_.videoTimestampOffset(), this.timeMapping_);
16733 }
16734
16735 return segmentInfo;
16736 } // get the timestampoffset for a segment,
16737 // added so that vtt segment loader can override and prevent
16738 // adding timestamp offsets.
16739 ;
16740
16741 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_(options) {
16742 return timestampOffsetForSegment(options);
16743 }
16744 /**
16745 * Determines if the network has enough bandwidth to complete the current segment
16746 * request in a timely manner. If not, the request will be aborted early and bandwidth
16747 * updated to trigger a playlist switch.
16748 *
16749 * @param {Object} stats
16750 * Object containing stats about the request timing and size
16751 * @private
16752 */
16753 ;
16754
16755 _proto.earlyAbortWhenNeeded_ = function earlyAbortWhenNeeded_(stats) {
16756 if (this.vhs_.tech_.paused() || // Don't abort if the current playlist is on the lowestEnabledRendition
16757 // TODO: Replace using timeout with a boolean indicating whether this playlist is
16758 // the lowestEnabledRendition.
16759 !this.xhrOptions_.timeout || // Don't abort if we have no bandwidth information to estimate segment sizes
16760 !this.playlist_.attributes.BANDWIDTH) {
16761 return;
16762 } // Wait at least 1 second since the first byte of data has been received before
16763 // using the calculated bandwidth from the progress event to allow the bitrate
16764 // to stabilize
16765
16766
16767 if (Date.now() - (stats.firstBytesReceivedAt || Date.now()) < 1000) {
16768 return;
16769 }
16770
16771 var currentTime = this.currentTime_();
16772 var measuredBandwidth = stats.bandwidth;
16773 var segmentDuration = this.pendingSegment_.duration;
16774 var requestTimeRemaining = Playlist.estimateSegmentRequestTime(segmentDuration, measuredBandwidth, this.playlist_, stats.bytesReceived); // Subtract 1 from the timeUntilRebuffer so we still consider an early abort
16775 // if we are only left with less than 1 second when the request completes.
16776 // A negative timeUntilRebuffering indicates we are already rebuffering
16777
16778 var timeUntilRebuffer$1 = timeUntilRebuffer(this.buffered_(), currentTime, this.vhs_.tech_.playbackRate()) - 1; // Only consider aborting early if the estimated time to finish the download
16779 // is larger than the estimated time until the player runs out of forward buffer
16780
16781 if (requestTimeRemaining <= timeUntilRebuffer$1) {
16782 return;
16783 }
16784
16785 var switchCandidate = minRebufferMaxBandwidthSelector({
16786 master: this.vhs_.playlists.master,
16787 currentTime: currentTime,
16788 bandwidth: measuredBandwidth,
16789 duration: this.duration_(),
16790 segmentDuration: segmentDuration,
16791 timeUntilRebuffer: timeUntilRebuffer$1,
16792 currentTimeline: this.currentTimeline_,
16793 syncController: this.syncController_
16794 });
16795
16796 if (!switchCandidate) {
16797 return;
16798 }
16799
16800 var rebufferingImpact = requestTimeRemaining - timeUntilRebuffer$1;
16801 var timeSavedBySwitching = rebufferingImpact - switchCandidate.rebufferingImpact;
16802 var minimumTimeSaving = 0.5; // If we are already rebuffering, increase the amount of variance we add to the
16803 // potential round trip time of the new request so that we are not too aggressive
16804 // with switching to a playlist that might save us a fraction of a second.
16805
16806 if (timeUntilRebuffer$1 <= TIME_FUDGE_FACTOR) {
16807 minimumTimeSaving = 1;
16808 }
16809
16810 if (!switchCandidate.playlist || switchCandidate.playlist.uri === this.playlist_.uri || timeSavedBySwitching < minimumTimeSaving) {
16811 return;
16812 } // set the bandwidth to that of the desired playlist being sure to scale by
16813 // BANDWIDTH_VARIANCE and add one so the playlist selector does not exclude it
16814 // don't trigger a bandwidthupdate as the bandwidth is artifial
16815
16816
16817 this.bandwidth = switchCandidate.playlist.attributes.BANDWIDTH * Config.BANDWIDTH_VARIANCE + 1;
16818 this.trigger('earlyabort');
16819 };
16820
16821 _proto.handleAbort_ = function handleAbort_(segmentInfo) {
16822 this.logger_("Aborting " + segmentInfoString(segmentInfo));
16823 this.mediaRequestsAborted += 1;
16824 }
16825 /**
16826 * XHR `progress` event handler
16827 *
16828 * @param {Event}
16829 * The XHR `progress` event
16830 * @param {Object} simpleSegment
16831 * A simplified segment object copy
16832 * @private
16833 */
16834 ;
16835
16836 _proto.handleProgress_ = function handleProgress_(event, simpleSegment) {
16837 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16838
16839 if (this.checkForAbort_(simpleSegment.requestId)) {
16840 return;
16841 }
16842
16843 this.trigger('progress');
16844 };
16845
16846 _proto.handleTrackInfo_ = function handleTrackInfo_(simpleSegment, trackInfo) {
16847 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16848
16849 if (this.checkForAbort_(simpleSegment.requestId)) {
16850 return;
16851 }
16852
16853 if (this.checkForIllegalMediaSwitch(trackInfo)) {
16854 return;
16855 }
16856
16857 trackInfo = trackInfo || {}; // When we have track info, determine what media types this loader is dealing with.
16858 // Guard against cases where we're not getting track info at all until we are
16859 // certain that all streams will provide it.
16860
16861 if (!shallowEqual(this.currentMediaInfo_, trackInfo)) {
16862 this.appendInitSegment_ = {
16863 audio: true,
16864 video: true
16865 };
16866 this.startingMediaInfo_ = trackInfo;
16867 this.currentMediaInfo_ = trackInfo;
16868 this.logger_('trackinfo update', trackInfo);
16869 this.trigger('trackinfo');
16870 } // trackinfo may cause an abort if the trackinfo
16871 // causes a codec change to an unsupported codec.
16872
16873
16874 if (this.checkForAbort_(simpleSegment.requestId)) {
16875 return;
16876 } // set trackinfo on the pending segment so that
16877 // it can append.
16878
16879
16880 this.pendingSegment_.trackInfo = trackInfo; // check if any calls were waiting on the track info
16881
16882 if (this.hasEnoughInfoToAppend_()) {
16883 this.processCallQueue_();
16884 }
16885 };
16886
16887 _proto.handleTimingInfo_ = function handleTimingInfo_(simpleSegment, mediaType, timeType, time) {
16888 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16889
16890 if (this.checkForAbort_(simpleSegment.requestId)) {
16891 return;
16892 }
16893
16894 var segmentInfo = this.pendingSegment_;
16895 var timingInfoProperty = timingInfoPropertyForMedia(mediaType);
16896 segmentInfo[timingInfoProperty] = segmentInfo[timingInfoProperty] || {};
16897 segmentInfo[timingInfoProperty][timeType] = time;
16898 this.logger_("timinginfo: " + mediaType + " - " + timeType + " - " + time); // check if any calls were waiting on the timing info
16899
16900 if (this.hasEnoughInfoToAppend_()) {
16901 this.processCallQueue_();
16902 }
16903 };
16904
16905 _proto.handleCaptions_ = function handleCaptions_(simpleSegment, captionData) {
16906 var _this2 = this;
16907
16908 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16909
16910 if (this.checkForAbort_(simpleSegment.requestId)) {
16911 return;
16912 } // This could only happen with fmp4 segments, but
16913 // should still not happen in general
16914
16915
16916 if (captionData.length === 0) {
16917 this.logger_('SegmentLoader received no captions from a caption event');
16918 return;
16919 }
16920
16921 var segmentInfo = this.pendingSegment_; // Wait until we have some video data so that caption timing
16922 // can be adjusted by the timestamp offset
16923
16924 if (!segmentInfo.hasAppendedData_) {
16925 this.metadataQueue_.caption.push(this.handleCaptions_.bind(this, simpleSegment, captionData));
16926 return;
16927 }
16928
16929 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset();
16930 var captionTracks = {}; // get total start/end and captions for each track/stream
16931
16932 captionData.forEach(function (caption) {
16933 // caption.stream is actually a track name...
16934 // set to the existing values in tracks or default values
16935 captionTracks[caption.stream] = captionTracks[caption.stream] || {
16936 // Infinity, as any other value will be less than this
16937 startTime: Infinity,
16938 captions: [],
16939 // 0 as an other value will be more than this
16940 endTime: 0
16941 };
16942 var captionTrack = captionTracks[caption.stream];
16943 captionTrack.startTime = Math.min(captionTrack.startTime, caption.startTime + timestampOffset);
16944 captionTrack.endTime = Math.max(captionTrack.endTime, caption.endTime + timestampOffset);
16945 captionTrack.captions.push(caption);
16946 });
16947 Object.keys(captionTracks).forEach(function (trackName) {
16948 var _captionTracks$trackN = captionTracks[trackName],
16949 startTime = _captionTracks$trackN.startTime,
16950 endTime = _captionTracks$trackN.endTime,
16951 captions = _captionTracks$trackN.captions;
16952 var inbandTextTracks = _this2.inbandTextTracks_;
16953
16954 _this2.logger_("adding cues from " + startTime + " -> " + endTime + " for " + trackName);
16955
16956 createCaptionsTrackIfNotExists(inbandTextTracks, _this2.vhs_.tech_, trackName); // clear out any cues that start and end at the same time period for the same track.
16957 // We do this because a rendition change that also changes the timescale for captions
16958 // will result in captions being re-parsed for certain segments. If we add them again
16959 // without clearing we will have two of the same captions visible.
16960
16961 removeCuesFromTrack(startTime, endTime, inbandTextTracks[trackName]);
16962 addCaptionData({
16963 captionArray: captions,
16964 inbandTextTracks: inbandTextTracks,
16965 timestampOffset: timestampOffset
16966 });
16967 }); // Reset stored captions since we added parsed
16968 // captions to a text track at this point
16969
16970 if (this.transmuxer_) {
16971 this.transmuxer_.postMessage({
16972 action: 'clearParsedMp4Captions'
16973 });
16974 }
16975 };
16976
16977 _proto.handleId3_ = function handleId3_(simpleSegment, id3Frames, dispatchType) {
16978 this.earlyAbortWhenNeeded_(simpleSegment.stats);
16979
16980 if (this.checkForAbort_(simpleSegment.requestId)) {
16981 return;
16982 }
16983
16984 var segmentInfo = this.pendingSegment_; // we need to have appended data in order for the timestamp offset to be set
16985
16986 if (!segmentInfo.hasAppendedData_) {
16987 this.metadataQueue_.id3.push(this.handleId3_.bind(this, simpleSegment, id3Frames, dispatchType));
16988 return;
16989 }
16990
16991 var timestampOffset = this.sourceUpdater_.videoTimestampOffset() === null ? this.sourceUpdater_.audioTimestampOffset() : this.sourceUpdater_.videoTimestampOffset(); // There's potentially an issue where we could double add metadata if there's a muxed
16992 // audio/video source with a metadata track, and an alt audio with a metadata track.
16993 // However, this probably won't happen, and if it does it can be handled then.
16994
16995 createMetadataTrackIfNotExists(this.inbandTextTracks_, dispatchType, this.vhs_.tech_);
16996 addMetadata({
16997 inbandTextTracks: this.inbandTextTracks_,
16998 metadataArray: id3Frames,
16999 timestampOffset: timestampOffset,
17000 videoDuration: this.duration_()
17001 });
17002 };
17003
17004 _proto.processMetadataQueue_ = function processMetadataQueue_() {
17005 this.metadataQueue_.id3.forEach(function (fn) {
17006 return fn();
17007 });
17008 this.metadataQueue_.caption.forEach(function (fn) {
17009 return fn();
17010 });
17011 this.metadataQueue_.id3 = [];
17012 this.metadataQueue_.caption = [];
17013 };
17014
17015 _proto.processCallQueue_ = function processCallQueue_() {
17016 var callQueue = this.callQueue_; // Clear out the queue before the queued functions are run, since some of the
17017 // functions may check the length of the load queue and default to pushing themselves
17018 // back onto the queue.
17019
17020 this.callQueue_ = [];
17021 callQueue.forEach(function (fun) {
17022 return fun();
17023 });
17024 };
17025
17026 _proto.processLoadQueue_ = function processLoadQueue_() {
17027 var loadQueue = this.loadQueue_; // Clear out the queue before the queued functions are run, since some of the
17028 // functions may check the length of the load queue and default to pushing themselves
17029 // back onto the queue.
17030
17031 this.loadQueue_ = [];
17032 loadQueue.forEach(function (fun) {
17033 return fun();
17034 });
17035 }
17036 /**
17037 * Determines whether the loader has enough info to load the next segment.
17038 *
17039 * @return {boolean}
17040 * Whether or not the loader has enough info to load the next segment
17041 */
17042 ;
17043
17044 _proto.hasEnoughInfoToLoad_ = function hasEnoughInfoToLoad_() {
17045 // Since primary timing goes by video, only the audio loader potentially needs to wait
17046 // to load.
17047 if (this.loaderType_ !== 'audio') {
17048 return true;
17049 }
17050
17051 var segmentInfo = this.pendingSegment_; // A fill buffer must have already run to establish a pending segment before there's
17052 // enough info to load.
17053
17054 if (!segmentInfo) {
17055 return false;
17056 } // The first segment can and should be loaded immediately so that source buffers are
17057 // created together (before appending). Source buffer creation uses the presence of
17058 // audio and video data to determine whether to create audio/video source buffers, and
17059 // uses processed (transmuxed or parsed) media to determine the types required.
17060
17061
17062 if (!this.getCurrentMediaInfo_()) {
17063 return true;
17064 }
17065
17066 if ( // Technically, instead of waiting to load a segment on timeline changes, a segment
17067 // can be requested and downloaded and only wait before it is transmuxed or parsed.
17068 // But in practice, there are a few reasons why it is better to wait until a loader
17069 // is ready to append that segment before requesting and downloading:
17070 //
17071 // 1. Because audio and main loaders cross discontinuities together, if this loader
17072 // is waiting for the other to catch up, then instead of requesting another
17073 // segment and using up more bandwidth, by not yet loading, more bandwidth is
17074 // allotted to the loader currently behind.
17075 // 2. media-segment-request doesn't have to have logic to consider whether a segment
17076 // is ready to be processed or not, isolating the queueing behavior to the loader.
17077 // 3. The audio loader bases some of its segment properties on timing information
17078 // provided by the main loader, meaning that, if the logic for waiting on
17079 // processing was in media-segment-request, then it would also need to know how
17080 // to re-generate the segment information after the main loader caught up.
17081 shouldWaitForTimelineChange({
17082 timelineChangeController: this.timelineChangeController_,
17083 currentTimeline: this.currentTimeline_,
17084 segmentTimeline: segmentInfo.timeline,
17085 loaderType: this.loaderType_,
17086 audioDisabled: this.audioDisabled_
17087 })) {
17088 return false;
17089 }
17090
17091 return true;
17092 };
17093
17094 _proto.getCurrentMediaInfo_ = function getCurrentMediaInfo_(segmentInfo) {
17095 if (segmentInfo === void 0) {
17096 segmentInfo = this.pendingSegment_;
17097 }
17098
17099 return segmentInfo && segmentInfo.trackInfo || this.currentMediaInfo_;
17100 };
17101
17102 _proto.getMediaInfo_ = function getMediaInfo_(segmentInfo) {
17103 if (segmentInfo === void 0) {
17104 segmentInfo = this.pendingSegment_;
17105 }
17106
17107 return this.getCurrentMediaInfo_(segmentInfo) || this.startingMediaInfo_;
17108 };
17109
17110 _proto.hasEnoughInfoToAppend_ = function hasEnoughInfoToAppend_() {
17111 if (!this.sourceUpdater_.ready()) {
17112 return false;
17113 } // If content needs to be removed or the loader is waiting on an append reattempt,
17114 // then no additional content should be appended until the prior append is resolved.
17115
17116
17117 if (this.waitingOnRemove_ || this.quotaExceededErrorRetryTimeout_) {
17118 return false;
17119 }
17120
17121 var segmentInfo = this.pendingSegment_;
17122 var trackInfo = this.getCurrentMediaInfo_(); // no segment to append any data for or
17123 // we do not have information on this specific
17124 // segment yet
17125
17126 if (!segmentInfo || !trackInfo) {
17127 return false;
17128 }
17129
17130 var hasAudio = trackInfo.hasAudio,
17131 hasVideo = trackInfo.hasVideo,
17132 isMuxed = trackInfo.isMuxed;
17133
17134 if (hasVideo && !segmentInfo.videoTimingInfo) {
17135 return false;
17136 } // muxed content only relies on video timing information for now.
17137
17138
17139 if (hasAudio && !this.audioDisabled_ && !isMuxed && !segmentInfo.audioTimingInfo) {
17140 return false;
17141 }
17142
17143 if (shouldWaitForTimelineChange({
17144 timelineChangeController: this.timelineChangeController_,
17145 currentTimeline: this.currentTimeline_,
17146 segmentTimeline: segmentInfo.timeline,
17147 loaderType: this.loaderType_,
17148 audioDisabled: this.audioDisabled_
17149 })) {
17150 return false;
17151 }
17152
17153 return true;
17154 };
17155
17156 _proto.handleData_ = function handleData_(simpleSegment, result) {
17157 this.earlyAbortWhenNeeded_(simpleSegment.stats);
17158
17159 if (this.checkForAbort_(simpleSegment.requestId)) {
17160 return;
17161 } // If there's anything in the call queue, then this data came later and should be
17162 // executed after the calls currently queued.
17163
17164
17165 if (this.callQueue_.length || !this.hasEnoughInfoToAppend_()) {
17166 this.callQueue_.push(this.handleData_.bind(this, simpleSegment, result));
17167 return;
17168 }
17169
17170 var segmentInfo = this.pendingSegment_; // update the time mapping so we can translate from display time to media time
17171
17172 this.setTimeMapping_(segmentInfo.timeline); // for tracking overall stats
17173
17174 this.updateMediaSecondsLoaded_(segmentInfo.segment); // Note that the state isn't changed from loading to appending. This is because abort
17175 // logic may change behavior depending on the state, and changing state too early may
17176 // inflate our estimates of bandwidth. In the future this should be re-examined to
17177 // note more granular states.
17178 // don't process and append data if the mediaSource is closed
17179
17180 if (this.mediaSource_.readyState === 'closed') {
17181 return;
17182 } // if this request included an initialization segment, save that data
17183 // to the initSegment cache
17184
17185
17186 if (simpleSegment.map) {
17187 simpleSegment.map = this.initSegmentForMap(simpleSegment.map, true); // move over init segment properties to media request
17188
17189 segmentInfo.segment.map = simpleSegment.map;
17190 } // if this request included a segment key, save that data in the cache
17191
17192
17193 if (simpleSegment.key) {
17194 this.segmentKey(simpleSegment.key, true);
17195 }
17196
17197 segmentInfo.isFmp4 = simpleSegment.isFmp4;
17198 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
17199
17200 if (segmentInfo.isFmp4) {
17201 this.trigger('fmp4');
17202 segmentInfo.timingInfo.start = segmentInfo[timingInfoPropertyForMedia(result.type)].start;
17203 } else {
17204 var trackInfo = this.getCurrentMediaInfo_();
17205 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
17206 var firstVideoFrameTimeForData;
17207
17208 if (useVideoTimingInfo) {
17209 firstVideoFrameTimeForData = segmentInfo.videoTimingInfo.start;
17210 } // Segment loader knows more about segment timing than the transmuxer (in certain
17211 // aspects), so make any changes required for a more accurate start time.
17212 // Don't set the end time yet, as the segment may not be finished processing.
17213
17214
17215 segmentInfo.timingInfo.start = this.trueSegmentStart_({
17216 currentStart: segmentInfo.timingInfo.start,
17217 playlist: segmentInfo.playlist,
17218 mediaIndex: segmentInfo.mediaIndex,
17219 currentVideoTimestampOffset: this.sourceUpdater_.videoTimestampOffset(),
17220 useVideoTimingInfo: useVideoTimingInfo,
17221 firstVideoFrameTimeForData: firstVideoFrameTimeForData,
17222 videoTimingInfo: segmentInfo.videoTimingInfo,
17223 audioTimingInfo: segmentInfo.audioTimingInfo
17224 });
17225 } // Init segments for audio and video only need to be appended in certain cases. Now
17226 // that data is about to be appended, we can check the final cases to determine
17227 // whether we should append an init segment.
17228
17229
17230 this.updateAppendInitSegmentStatus(segmentInfo, result.type); // Timestamp offset should be updated once we get new data and have its timing info,
17231 // as we use the start of the segment to offset the best guess (playlist provided)
17232 // timestamp offset.
17233
17234 this.updateSourceBufferTimestampOffset_(segmentInfo); // if this is a sync request we need to determine whether it should
17235 // be appended or not.
17236
17237 if (segmentInfo.isSyncRequest) {
17238 // first save/update our timing info for this segment.
17239 // this is what allows us to choose an accurate segment
17240 // and the main reason we make a sync request.
17241 this.updateTimingInfoEnd_(segmentInfo);
17242 this.syncController_.saveSegmentTimingInfo({
17243 segmentInfo: segmentInfo,
17244 shouldSaveTimelineMapping: this.loaderType_ === 'main'
17245 });
17246 var next = this.chooseNextRequest_(); // If the sync request isn't the segment that would be requested next
17247 // after taking into account its timing info, do not append it.
17248
17249 if (next.mediaIndex !== segmentInfo.mediaIndex || next.partIndex !== segmentInfo.partIndex) {
17250 this.logger_('sync segment was incorrect, not appending');
17251 return;
17252 } // otherwise append it like any other segment as our guess was correct.
17253
17254
17255 this.logger_('sync segment was correct, appending');
17256 } // Save some state so that in the future anything waiting on first append (and/or
17257 // timestamp offset(s)) can process immediately. While the extra state isn't optimal,
17258 // we need some notion of whether the timestamp offset or other relevant information
17259 // has had a chance to be set.
17260
17261
17262 segmentInfo.hasAppendedData_ = true; // Now that the timestamp offset should be set, we can append any waiting ID3 tags.
17263
17264 this.processMetadataQueue_();
17265 this.appendData_(segmentInfo, result);
17266 };
17267
17268 _proto.updateAppendInitSegmentStatus = function updateAppendInitSegmentStatus(segmentInfo, type) {
17269 // alt audio doesn't manage timestamp offset
17270 if (this.loaderType_ === 'main' && typeof segmentInfo.timestampOffset === 'number' && // in the case that we're handling partial data, we don't want to append an init
17271 // segment for each chunk
17272 !segmentInfo.changedTimestampOffset) {
17273 // if the timestamp offset changed, the timeline may have changed, so we have to re-
17274 // append init segments
17275 this.appendInitSegment_ = {
17276 audio: true,
17277 video: true
17278 };
17279 }
17280
17281 if (this.playlistOfLastInitSegment_[type] !== segmentInfo.playlist) {
17282 // make sure we append init segment on playlist changes, in case the media config
17283 // changed
17284 this.appendInitSegment_[type] = true;
17285 }
17286 };
17287
17288 _proto.getInitSegmentAndUpdateState_ = function getInitSegmentAndUpdateState_(_ref4) {
17289 var type = _ref4.type,
17290 initSegment = _ref4.initSegment,
17291 map = _ref4.map,
17292 playlist = _ref4.playlist;
17293
17294 // "The EXT-X-MAP tag specifies how to obtain the Media Initialization Section
17295 // (Section 3) required to parse the applicable Media Segments. It applies to every
17296 // Media Segment that appears after it in the Playlist until the next EXT-X-MAP tag
17297 // or until the end of the playlist."
17298 // https://tools.ietf.org/html/draft-pantos-http-live-streaming-23#section-4.3.2.5
17299 if (map) {
17300 var id = initSegmentId(map);
17301
17302 if (this.activeInitSegmentId_ === id) {
17303 // don't need to re-append the init segment if the ID matches
17304 return null;
17305 } // a map-specified init segment takes priority over any transmuxed (or otherwise
17306 // obtained) init segment
17307 //
17308 // this also caches the init segment for later use
17309
17310
17311 initSegment = this.initSegmentForMap(map, true).bytes;
17312 this.activeInitSegmentId_ = id;
17313 } // We used to always prepend init segments for video, however, that shouldn't be
17314 // necessary. Instead, we should only append on changes, similar to what we've always
17315 // done for audio. This is more important (though may not be that important) for
17316 // frame-by-frame appending for LHLS, simply because of the increased quantity of
17317 // appends.
17318
17319
17320 if (initSegment && this.appendInitSegment_[type]) {
17321 // Make sure we track the playlist that we last used for the init segment, so that
17322 // we can re-append the init segment in the event that we get data from a new
17323 // playlist. Discontinuities and track changes are handled in other sections.
17324 this.playlistOfLastInitSegment_[type] = playlist; // Disable future init segment appends for this type. Until a change is necessary.
17325
17326 this.appendInitSegment_[type] = false; // we need to clear out the fmp4 active init segment id, since
17327 // we are appending the muxer init segment
17328
17329 this.activeInitSegmentId_ = null;
17330 return initSegment;
17331 }
17332
17333 return null;
17334 };
17335
17336 _proto.handleQuotaExceededError_ = function handleQuotaExceededError_(_ref5, error) {
17337 var _this3 = this;
17338
17339 var segmentInfo = _ref5.segmentInfo,
17340 type = _ref5.type,
17341 bytes = _ref5.bytes;
17342 var audioBuffered = this.sourceUpdater_.audioBuffered();
17343 var videoBuffered = this.sourceUpdater_.videoBuffered(); // For now we're ignoring any notion of gaps in the buffer, but they, in theory,
17344 // should be cleared out during the buffer removals. However, log in case it helps
17345 // debug.
17346
17347 if (audioBuffered.length > 1) {
17348 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the audio buffer: ' + timeRangesToArray(audioBuffered).join(', '));
17349 }
17350
17351 if (videoBuffered.length > 1) {
17352 this.logger_('On QUOTA_EXCEEDED_ERR, found gaps in the video buffer: ' + timeRangesToArray(videoBuffered).join(', '));
17353 }
17354
17355 var audioBufferStart = audioBuffered.length ? audioBuffered.start(0) : 0;
17356 var audioBufferEnd = audioBuffered.length ? audioBuffered.end(audioBuffered.length - 1) : 0;
17357 var videoBufferStart = videoBuffered.length ? videoBuffered.start(0) : 0;
17358 var videoBufferEnd = videoBuffered.length ? videoBuffered.end(videoBuffered.length - 1) : 0;
17359
17360 if (audioBufferEnd - audioBufferStart <= MIN_BACK_BUFFER && videoBufferEnd - videoBufferStart <= MIN_BACK_BUFFER) {
17361 // Can't remove enough buffer to make room for new segment (or the browser doesn't
17362 // allow for appends of segments this size). In the future, it may be possible to
17363 // split up the segment and append in pieces, but for now, error out this playlist
17364 // in an attempt to switch to a more manageable rendition.
17365 this.logger_('On QUOTA_EXCEEDED_ERR, single segment too large to append to ' + 'buffer, triggering an error. ' + ("Appended byte length: " + bytes.byteLength + ", ") + ("audio buffer: " + timeRangesToArray(audioBuffered).join(', ') + ", ") + ("video buffer: " + timeRangesToArray(videoBuffered).join(', ') + ", "));
17366 this.error({
17367 message: 'Quota exceeded error with append of a single segment of content',
17368 excludeUntil: Infinity
17369 });
17370 this.trigger('error');
17371 return;
17372 } // To try to resolve the quota exceeded error, clear back buffer and retry. This means
17373 // that the segment-loader should block on future events until this one is handled, so
17374 // that it doesn't keep moving onto further segments. Adding the call to the call
17375 // queue will prevent further appends until waitingOnRemove_ and
17376 // quotaExceededErrorRetryTimeout_ are cleared.
17377 //
17378 // Note that this will only block the current loader. In the case of demuxed content,
17379 // the other load may keep filling as fast as possible. In practice, this should be
17380 // OK, as it is a rare case when either audio has a high enough bitrate to fill up a
17381 // source buffer, or video fills without enough room for audio to append (and without
17382 // the availability of clearing out seconds of back buffer to make room for audio).
17383 // But it might still be good to handle this case in the future as a TODO.
17384
17385
17386 this.waitingOnRemove_ = true;
17387 this.callQueue_.push(this.appendToSourceBuffer_.bind(this, {
17388 segmentInfo: segmentInfo,
17389 type: type,
17390 bytes: bytes
17391 }));
17392 var currentTime = this.currentTime_(); // Try to remove as much audio and video as possible to make room for new content
17393 // before retrying.
17394
17395 var timeToRemoveUntil = currentTime - MIN_BACK_BUFFER;
17396 this.logger_("On QUOTA_EXCEEDED_ERR, removing audio/video from 0 to " + timeToRemoveUntil);
17397 this.remove(0, timeToRemoveUntil, function () {
17398 _this3.logger_("On QUOTA_EXCEEDED_ERR, retrying append in " + MIN_BACK_BUFFER + "s");
17399
17400 _this3.waitingOnRemove_ = false; // wait the length of time alotted in the back buffer to prevent wasted
17401 // attempts (since we can't clear less than the minimum)
17402
17403 _this3.quotaExceededErrorRetryTimeout_ = window__default["default"].setTimeout(function () {
17404 _this3.logger_('On QUOTA_EXCEEDED_ERR, re-processing call queue');
17405
17406 _this3.quotaExceededErrorRetryTimeout_ = null;
17407
17408 _this3.processCallQueue_();
17409 }, MIN_BACK_BUFFER * 1000);
17410 }, true);
17411 };
17412
17413 _proto.handleAppendError_ = function handleAppendError_(_ref6, error) {
17414 var segmentInfo = _ref6.segmentInfo,
17415 type = _ref6.type,
17416 bytes = _ref6.bytes;
17417
17418 // if there's no error, nothing to do
17419 if (!error) {
17420 return;
17421 }
17422
17423 if (error.code === QUOTA_EXCEEDED_ERR) {
17424 this.handleQuotaExceededError_({
17425 segmentInfo: segmentInfo,
17426 type: type,
17427 bytes: bytes
17428 }); // A quota exceeded error should be recoverable with a future re-append, so no need
17429 // to trigger an append error.
17430
17431 return;
17432 }
17433
17434 this.logger_('Received non QUOTA_EXCEEDED_ERR on append', error);
17435 this.error(type + " append of " + bytes.length + "b failed for segment " + ("#" + segmentInfo.mediaIndex + " in playlist " + segmentInfo.playlist.id)); // If an append errors, we often can't recover.
17436 // (see https://w3c.github.io/media-source/#sourcebuffer-append-error).
17437 //
17438 // Trigger a special error so that it can be handled separately from normal,
17439 // recoverable errors.
17440
17441 this.trigger('appenderror');
17442 };
17443
17444 _proto.appendToSourceBuffer_ = function appendToSourceBuffer_(_ref7) {
17445 var segmentInfo = _ref7.segmentInfo,
17446 type = _ref7.type,
17447 initSegment = _ref7.initSegment,
17448 data = _ref7.data,
17449 bytes = _ref7.bytes;
17450
17451 // If this is a re-append, bytes were already created and don't need to be recreated
17452 if (!bytes) {
17453 var segments = [data];
17454 var byteLength = data.byteLength;
17455
17456 if (initSegment) {
17457 // if the media initialization segment is changing, append it before the content
17458 // segment
17459 segments.unshift(initSegment);
17460 byteLength += initSegment.byteLength;
17461 } // Technically we should be OK appending the init segment separately, however, we
17462 // haven't yet tested that, and prepending is how we have always done things.
17463
17464
17465 bytes = concatSegments({
17466 bytes: byteLength,
17467 segments: segments
17468 });
17469 }
17470
17471 this.sourceUpdater_.appendBuffer({
17472 segmentInfo: segmentInfo,
17473 type: type,
17474 bytes: bytes
17475 }, this.handleAppendError_.bind(this, {
17476 segmentInfo: segmentInfo,
17477 type: type,
17478 bytes: bytes
17479 }));
17480 };
17481
17482 _proto.handleSegmentTimingInfo_ = function handleSegmentTimingInfo_(type, requestId, segmentTimingInfo) {
17483 if (!this.pendingSegment_ || requestId !== this.pendingSegment_.requestId) {
17484 return;
17485 }
17486
17487 var segment = this.pendingSegment_.segment;
17488 var timingInfoProperty = type + "TimingInfo";
17489
17490 if (!segment[timingInfoProperty]) {
17491 segment[timingInfoProperty] = {};
17492 }
17493
17494 segment[timingInfoProperty].transmuxerPrependedSeconds = segmentTimingInfo.prependedContentDuration || 0;
17495 segment[timingInfoProperty].transmuxedPresentationStart = segmentTimingInfo.start.presentation;
17496 segment[timingInfoProperty].transmuxedDecodeStart = segmentTimingInfo.start.decode;
17497 segment[timingInfoProperty].transmuxedPresentationEnd = segmentTimingInfo.end.presentation;
17498 segment[timingInfoProperty].transmuxedDecodeEnd = segmentTimingInfo.end.decode; // mainly used as a reference for debugging
17499
17500 segment[timingInfoProperty].baseMediaDecodeTime = segmentTimingInfo.baseMediaDecodeTime;
17501 };
17502
17503 _proto.appendData_ = function appendData_(segmentInfo, result) {
17504 var type = result.type,
17505 data = result.data;
17506
17507 if (!data || !data.byteLength) {
17508 return;
17509 }
17510
17511 if (type === 'audio' && this.audioDisabled_) {
17512 return;
17513 }
17514
17515 var initSegment = this.getInitSegmentAndUpdateState_({
17516 type: type,
17517 initSegment: result.initSegment,
17518 playlist: segmentInfo.playlist,
17519 map: segmentInfo.isFmp4 ? segmentInfo.segment.map : null
17520 });
17521 this.appendToSourceBuffer_({
17522 segmentInfo: segmentInfo,
17523 type: type,
17524 initSegment: initSegment,
17525 data: data
17526 });
17527 }
17528 /**
17529 * load a specific segment from a request into the buffer
17530 *
17531 * @private
17532 */
17533 ;
17534
17535 _proto.loadSegment_ = function loadSegment_(segmentInfo) {
17536 var _this4 = this;
17537
17538 this.state = 'WAITING';
17539 this.pendingSegment_ = segmentInfo;
17540 this.trimBackBuffer_(segmentInfo);
17541
17542 if (typeof segmentInfo.timestampOffset === 'number') {
17543 if (this.transmuxer_) {
17544 this.transmuxer_.postMessage({
17545 action: 'clearAllMp4Captions'
17546 });
17547 }
17548 }
17549
17550 if (!this.hasEnoughInfoToLoad_()) {
17551 this.loadQueue_.push(function () {
17552 // regenerate the audioAppendStart, timestampOffset, etc as they
17553 // may have changed since this function was added to the queue.
17554 var options = _extends__default["default"]({}, segmentInfo, {
17555 forceTimestampOffset: true
17556 });
17557
17558 _extends__default["default"](segmentInfo, _this4.generateSegmentInfo_(options));
17559
17560 _this4.isPendingTimestampOffset_ = false;
17561
17562 _this4.updateTransmuxerAndRequestSegment_(segmentInfo);
17563 });
17564 return;
17565 }
17566
17567 this.updateTransmuxerAndRequestSegment_(segmentInfo);
17568 };
17569
17570 _proto.updateTransmuxerAndRequestSegment_ = function updateTransmuxerAndRequestSegment_(segmentInfo) {
17571 var _this5 = this;
17572
17573 // We'll update the source buffer's timestamp offset once we have transmuxed data, but
17574 // the transmuxer still needs to be updated before then.
17575 //
17576 // Even though keepOriginalTimestamps is set to true for the transmuxer, timestamp
17577 // offset must be passed to the transmuxer for stream correcting adjustments.
17578 if (this.shouldUpdateTransmuxerTimestampOffset_(segmentInfo.timestampOffset)) {
17579 this.gopBuffer_.length = 0; // gopsToAlignWith was set before the GOP buffer was cleared
17580
17581 segmentInfo.gopsToAlignWith = [];
17582 this.timeMapping_ = 0; // reset values in the transmuxer since a discontinuity should start fresh
17583
17584 this.transmuxer_.postMessage({
17585 action: 'reset'
17586 });
17587 this.transmuxer_.postMessage({
17588 action: 'setTimestampOffset',
17589 timestampOffset: segmentInfo.timestampOffset
17590 });
17591 }
17592
17593 var simpleSegment = this.createSimplifiedSegmentObj_(segmentInfo);
17594 var isEndOfStream = this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex);
17595 var isWalkingForward = this.mediaIndex !== null;
17596 var isDiscontinuity = segmentInfo.timeline !== this.currentTimeline_ && // currentTimeline starts at -1, so we shouldn't end the timeline switching to 0,
17597 // the first timeline
17598 segmentInfo.timeline > 0;
17599 var isEndOfTimeline = isEndOfStream || isWalkingForward && isDiscontinuity;
17600 this.logger_("Requesting " + segmentInfoString(segmentInfo)); // If there's an init segment associated with this segment, but it is not cached (identified by a lack of bytes),
17601 // then this init segment has never been seen before and should be appended.
17602 //
17603 // At this point the content type (audio/video or both) is not yet known, but it should be safe to set
17604 // both to true and leave the decision of whether to append the init segment to append time.
17605
17606 if (simpleSegment.map && !simpleSegment.map.bytes) {
17607 this.logger_('going to request init segment.');
17608 this.appendInitSegment_ = {
17609 video: true,
17610 audio: true
17611 };
17612 }
17613
17614 segmentInfo.abortRequests = mediaSegmentRequest({
17615 xhr: this.vhs_.xhr,
17616 xhrOptions: this.xhrOptions_,
17617 decryptionWorker: this.decrypter_,
17618 segment: simpleSegment,
17619 abortFn: this.handleAbort_.bind(this, segmentInfo),
17620 progressFn: this.handleProgress_.bind(this),
17621 trackInfoFn: this.handleTrackInfo_.bind(this),
17622 timingInfoFn: this.handleTimingInfo_.bind(this),
17623 videoSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'video', segmentInfo.requestId),
17624 audioSegmentTimingInfoFn: this.handleSegmentTimingInfo_.bind(this, 'audio', segmentInfo.requestId),
17625 captionsFn: this.handleCaptions_.bind(this),
17626 isEndOfTimeline: isEndOfTimeline,
17627 endedTimelineFn: function endedTimelineFn() {
17628 _this5.logger_('received endedtimeline callback');
17629 },
17630 id3Fn: this.handleId3_.bind(this),
17631 dataFn: this.handleData_.bind(this),
17632 doneFn: this.segmentRequestFinished_.bind(this),
17633 onTransmuxerLog: function onTransmuxerLog(_ref8) {
17634 var message = _ref8.message,
17635 level = _ref8.level,
17636 stream = _ref8.stream;
17637
17638 _this5.logger_(segmentInfoString(segmentInfo) + " logged from transmuxer stream " + stream + " as a " + level + ": " + message);
17639 }
17640 });
17641 }
17642 /**
17643 * trim the back buffer so that we don't have too much data
17644 * in the source buffer
17645 *
17646 * @private
17647 *
17648 * @param {Object} segmentInfo - the current segment
17649 */
17650 ;
17651
17652 _proto.trimBackBuffer_ = function trimBackBuffer_(segmentInfo) {
17653 var removeToTime = safeBackBufferTrimTime(this.seekable_(), this.currentTime_(), this.playlist_.targetDuration || 10); // Chrome has a hard limit of 150MB of
17654 // buffer and a very conservative "garbage collector"
17655 // We manually clear out the old buffer to ensure
17656 // we don't trigger the QuotaExceeded error
17657 // on the source buffer during subsequent appends
17658
17659 if (removeToTime > 0) {
17660 this.remove(0, removeToTime);
17661 }
17662 }
17663 /**
17664 * created a simplified copy of the segment object with just the
17665 * information necessary to perform the XHR and decryption
17666 *
17667 * @private
17668 *
17669 * @param {Object} segmentInfo - the current segment
17670 * @return {Object} a simplified segment object copy
17671 */
17672 ;
17673
17674 _proto.createSimplifiedSegmentObj_ = function createSimplifiedSegmentObj_(segmentInfo) {
17675 var segment = segmentInfo.segment;
17676 var part = segmentInfo.part;
17677 var simpleSegment = {
17678 resolvedUri: part ? part.resolvedUri : segment.resolvedUri,
17679 byterange: part ? part.byterange : segment.byterange,
17680 requestId: segmentInfo.requestId,
17681 transmuxer: segmentInfo.transmuxer,
17682 audioAppendStart: segmentInfo.audioAppendStart,
17683 gopsToAlignWith: segmentInfo.gopsToAlignWith,
17684 part: segmentInfo.part
17685 };
17686 var previousSegment = segmentInfo.playlist.segments[segmentInfo.mediaIndex - 1];
17687
17688 if (previousSegment && previousSegment.timeline === segment.timeline) {
17689 // The baseStartTime of a segment is used to handle rollover when probing the TS
17690 // segment to retrieve timing information. Since the probe only looks at the media's
17691 // times (e.g., PTS and DTS values of the segment), and doesn't consider the
17692 // player's time (e.g., player.currentTime()), baseStartTime should reflect the
17693 // media time as well. transmuxedDecodeEnd represents the end time of a segment, in
17694 // seconds of media time, so should be used here. The previous segment is used since
17695 // the end of the previous segment should represent the beginning of the current
17696 // segment, so long as they are on the same timeline.
17697 if (previousSegment.videoTimingInfo) {
17698 simpleSegment.baseStartTime = previousSegment.videoTimingInfo.transmuxedDecodeEnd;
17699 } else if (previousSegment.audioTimingInfo) {
17700 simpleSegment.baseStartTime = previousSegment.audioTimingInfo.transmuxedDecodeEnd;
17701 }
17702 }
17703
17704 if (segment.key) {
17705 // if the media sequence is greater than 2^32, the IV will be incorrect
17706 // assuming 10s segments, that would be about 1300 years
17707 var iv = segment.key.iv || new Uint32Array([0, 0, 0, segmentInfo.mediaIndex + segmentInfo.playlist.mediaSequence]);
17708 simpleSegment.key = this.segmentKey(segment.key);
17709 simpleSegment.key.iv = iv;
17710 }
17711
17712 if (segment.map) {
17713 simpleSegment.map = this.initSegmentForMap(segment.map);
17714 }
17715
17716 return simpleSegment;
17717 };
17718
17719 _proto.saveTransferStats_ = function saveTransferStats_(stats) {
17720 // every request counts as a media request even if it has been aborted
17721 // or canceled due to a timeout
17722 this.mediaRequests += 1;
17723
17724 if (stats) {
17725 this.mediaBytesTransferred += stats.bytesReceived;
17726 this.mediaTransferDuration += stats.roundTripTime;
17727 }
17728 };
17729
17730 _proto.saveBandwidthRelatedStats_ = function saveBandwidthRelatedStats_(duration, stats) {
17731 // byteLength will be used for throughput, and should be based on bytes receieved,
17732 // which we only know at the end of the request and should reflect total bytes
17733 // downloaded rather than just bytes processed from components of the segment
17734 this.pendingSegment_.byteLength = stats.bytesReceived;
17735
17736 if (duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
17737 this.logger_("Ignoring segment's bandwidth because its duration of " + duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
17738 return;
17739 }
17740
17741 this.bandwidth = stats.bandwidth;
17742 this.roundTrip = stats.roundTripTime;
17743 };
17744
17745 _proto.handleTimeout_ = function handleTimeout_() {
17746 // although the VTT segment loader bandwidth isn't really used, it's good to
17747 // maintain functinality between segment loaders
17748 this.mediaRequestsTimedout += 1;
17749 this.bandwidth = 1;
17750 this.roundTrip = NaN;
17751 this.trigger('bandwidthupdate');
17752 }
17753 /**
17754 * Handle the callback from the segmentRequest function and set the
17755 * associated SegmentLoader state and errors if necessary
17756 *
17757 * @private
17758 */
17759 ;
17760
17761 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
17762 // TODO handle special cases, e.g., muxed audio/video but only audio in the segment
17763 // check the call queue directly since this function doesn't need to deal with any
17764 // data, and can continue even if the source buffers are not set up and we didn't get
17765 // any data from the segment
17766 if (this.callQueue_.length) {
17767 this.callQueue_.push(this.segmentRequestFinished_.bind(this, error, simpleSegment, result));
17768 return;
17769 }
17770
17771 this.saveTransferStats_(simpleSegment.stats); // The request was aborted and the SegmentLoader has already been reset
17772
17773 if (!this.pendingSegment_) {
17774 return;
17775 } // the request was aborted and the SegmentLoader has already started
17776 // another request. this can happen when the timeout for an aborted
17777 // request triggers due to a limitation in the XHR library
17778 // do not count this as any sort of request or we risk double-counting
17779
17780
17781 if (simpleSegment.requestId !== this.pendingSegment_.requestId) {
17782 return;
17783 } // an error occurred from the active pendingSegment_ so reset everything
17784
17785
17786 if (error) {
17787 this.pendingSegment_ = null;
17788 this.state = 'READY'; // aborts are not a true error condition and nothing corrective needs to be done
17789
17790 if (error.code === REQUEST_ERRORS.ABORTED) {
17791 return;
17792 }
17793
17794 this.pause(); // the error is really just that at least one of the requests timed-out
17795 // set the bandwidth to a very low value and trigger an ABR switch to
17796 // take emergency action
17797
17798 if (error.code === REQUEST_ERRORS.TIMEOUT) {
17799 this.handleTimeout_();
17800 return;
17801 } // if control-flow has arrived here, then the error is real
17802 // emit an error event to blacklist the current playlist
17803
17804
17805 this.mediaRequestsErrored += 1;
17806 this.error(error);
17807 this.trigger('error');
17808 return;
17809 }
17810
17811 var segmentInfo = this.pendingSegment_; // the response was a success so set any bandwidth stats the request
17812 // generated for ABR purposes
17813
17814 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
17815 segmentInfo.endOfAllRequests = simpleSegment.endOfAllRequests;
17816
17817 if (result.gopInfo) {
17818 this.gopBuffer_ = updateGopBuffer(this.gopBuffer_, result.gopInfo, this.safeAppend_);
17819 } // Although we may have already started appending on progress, we shouldn't switch the
17820 // state away from loading until we are officially done loading the segment data.
17821
17822
17823 this.state = 'APPENDING'; // used for testing
17824
17825 this.trigger('appending');
17826 this.waitForAppendsToComplete_(segmentInfo);
17827 };
17828
17829 _proto.setTimeMapping_ = function setTimeMapping_(timeline) {
17830 var timelineMapping = this.syncController_.mappingForTimeline(timeline);
17831
17832 if (timelineMapping !== null) {
17833 this.timeMapping_ = timelineMapping;
17834 }
17835 };
17836
17837 _proto.updateMediaSecondsLoaded_ = function updateMediaSecondsLoaded_(segment) {
17838 if (typeof segment.start === 'number' && typeof segment.end === 'number') {
17839 this.mediaSecondsLoaded += segment.end - segment.start;
17840 } else {
17841 this.mediaSecondsLoaded += segment.duration;
17842 }
17843 };
17844
17845 _proto.shouldUpdateTransmuxerTimestampOffset_ = function shouldUpdateTransmuxerTimestampOffset_(timestampOffset) {
17846 if (timestampOffset === null) {
17847 return false;
17848 } // note that we're potentially using the same timestamp offset for both video and
17849 // audio
17850
17851
17852 if (this.loaderType_ === 'main' && timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
17853 return true;
17854 }
17855
17856 if (!this.audioDisabled_ && timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
17857 return true;
17858 }
17859
17860 return false;
17861 };
17862
17863 _proto.trueSegmentStart_ = function trueSegmentStart_(_ref9) {
17864 var currentStart = _ref9.currentStart,
17865 playlist = _ref9.playlist,
17866 mediaIndex = _ref9.mediaIndex,
17867 firstVideoFrameTimeForData = _ref9.firstVideoFrameTimeForData,
17868 currentVideoTimestampOffset = _ref9.currentVideoTimestampOffset,
17869 useVideoTimingInfo = _ref9.useVideoTimingInfo,
17870 videoTimingInfo = _ref9.videoTimingInfo,
17871 audioTimingInfo = _ref9.audioTimingInfo;
17872
17873 if (typeof currentStart !== 'undefined') {
17874 // if start was set once, keep using it
17875 return currentStart;
17876 }
17877
17878 if (!useVideoTimingInfo) {
17879 return audioTimingInfo.start;
17880 }
17881
17882 var previousSegment = playlist.segments[mediaIndex - 1]; // The start of a segment should be the start of the first full frame contained
17883 // within that segment. Since the transmuxer maintains a cache of incomplete data
17884 // from and/or the last frame seen, the start time may reflect a frame that starts
17885 // in the previous segment. Check for that case and ensure the start time is
17886 // accurate for the segment.
17887
17888 if (mediaIndex === 0 || !previousSegment || typeof previousSegment.start === 'undefined' || previousSegment.end !== firstVideoFrameTimeForData + currentVideoTimestampOffset) {
17889 return firstVideoFrameTimeForData;
17890 }
17891
17892 return videoTimingInfo.start;
17893 };
17894
17895 _proto.waitForAppendsToComplete_ = function waitForAppendsToComplete_(segmentInfo) {
17896 var trackInfo = this.getCurrentMediaInfo_(segmentInfo);
17897
17898 if (!trackInfo) {
17899 this.error({
17900 message: 'No starting media returned, likely due to an unsupported media format.',
17901 blacklistDuration: Infinity
17902 });
17903 this.trigger('error');
17904 return;
17905 } // Although transmuxing is done, appends may not yet be finished. Throw a marker
17906 // on each queue this loader is responsible for to ensure that the appends are
17907 // complete.
17908
17909
17910 var hasAudio = trackInfo.hasAudio,
17911 hasVideo = trackInfo.hasVideo,
17912 isMuxed = trackInfo.isMuxed;
17913 var waitForVideo = this.loaderType_ === 'main' && hasVideo;
17914 var waitForAudio = !this.audioDisabled_ && hasAudio && !isMuxed;
17915 segmentInfo.waitingOnAppends = 0; // segments with no data
17916
17917 if (!segmentInfo.hasAppendedData_) {
17918 if (!segmentInfo.timingInfo && typeof segmentInfo.timestampOffset === 'number') {
17919 // When there's no audio or video data in the segment, there's no audio or video
17920 // timing information.
17921 //
17922 // If there's no audio or video timing information, then the timestamp offset
17923 // can't be adjusted to the appropriate value for the transmuxer and source
17924 // buffers.
17925 //
17926 // Therefore, the next segment should be used to set the timestamp offset.
17927 this.isPendingTimestampOffset_ = true;
17928 } // override settings for metadata only segments
17929
17930
17931 segmentInfo.timingInfo = {
17932 start: 0
17933 };
17934 segmentInfo.waitingOnAppends++;
17935
17936 if (!this.isPendingTimestampOffset_) {
17937 // update the timestampoffset
17938 this.updateSourceBufferTimestampOffset_(segmentInfo); // make sure the metadata queue is processed even though we have
17939 // no video/audio data.
17940
17941 this.processMetadataQueue_();
17942 } // append is "done" instantly with no data.
17943
17944
17945 this.checkAppendsDone_(segmentInfo);
17946 return;
17947 } // Since source updater could call back synchronously, do the increments first.
17948
17949
17950 if (waitForVideo) {
17951 segmentInfo.waitingOnAppends++;
17952 }
17953
17954 if (waitForAudio) {
17955 segmentInfo.waitingOnAppends++;
17956 }
17957
17958 if (waitForVideo) {
17959 this.sourceUpdater_.videoQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
17960 }
17961
17962 if (waitForAudio) {
17963 this.sourceUpdater_.audioQueueCallback(this.checkAppendsDone_.bind(this, segmentInfo));
17964 }
17965 };
17966
17967 _proto.checkAppendsDone_ = function checkAppendsDone_(segmentInfo) {
17968 if (this.checkForAbort_(segmentInfo.requestId)) {
17969 return;
17970 }
17971
17972 segmentInfo.waitingOnAppends--;
17973
17974 if (segmentInfo.waitingOnAppends === 0) {
17975 this.handleAppendsDone_();
17976 }
17977 };
17978
17979 _proto.checkForIllegalMediaSwitch = function checkForIllegalMediaSwitch(trackInfo) {
17980 var illegalMediaSwitchError = illegalMediaSwitch(this.loaderType_, this.getCurrentMediaInfo_(), trackInfo);
17981
17982 if (illegalMediaSwitchError) {
17983 this.error({
17984 message: illegalMediaSwitchError,
17985 blacklistDuration: Infinity
17986 });
17987 this.trigger('error');
17988 return true;
17989 }
17990
17991 return false;
17992 };
17993
17994 _proto.updateSourceBufferTimestampOffset_ = function updateSourceBufferTimestampOffset_(segmentInfo) {
17995 if (segmentInfo.timestampOffset === null || // we don't yet have the start for whatever media type (video or audio) has
17996 // priority, timing-wise, so we must wait
17997 typeof segmentInfo.timingInfo.start !== 'number' || // already updated the timestamp offset for this segment
17998 segmentInfo.changedTimestampOffset || // the alt audio loader should not be responsible for setting the timestamp offset
17999 this.loaderType_ !== 'main') {
18000 return;
18001 }
18002
18003 var didChange = false; // Primary timing goes by video, and audio is trimmed in the transmuxer, meaning that
18004 // the timing info here comes from video. In the event that the audio is longer than
18005 // the video, this will trim the start of the audio.
18006 // This also trims any offset from 0 at the beginning of the media
18007
18008 segmentInfo.timestampOffset -= segmentInfo.timingInfo.start; // In the event that there are part segment downloads, each will try to update the
18009 // timestamp offset. Retaining this bit of state prevents us from updating in the
18010 // future (within the same segment), however, there may be a better way to handle it.
18011
18012 segmentInfo.changedTimestampOffset = true;
18013
18014 if (segmentInfo.timestampOffset !== this.sourceUpdater_.videoTimestampOffset()) {
18015 this.sourceUpdater_.videoTimestampOffset(segmentInfo.timestampOffset);
18016 didChange = true;
18017 }
18018
18019 if (segmentInfo.timestampOffset !== this.sourceUpdater_.audioTimestampOffset()) {
18020 this.sourceUpdater_.audioTimestampOffset(segmentInfo.timestampOffset);
18021 didChange = true;
18022 }
18023
18024 if (didChange) {
18025 this.trigger('timestampoffset');
18026 }
18027 };
18028
18029 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_(segmentInfo) {
18030 segmentInfo.timingInfo = segmentInfo.timingInfo || {};
18031 var trackInfo = this.getMediaInfo_();
18032 var useVideoTimingInfo = this.loaderType_ === 'main' && trackInfo && trackInfo.hasVideo;
18033 var prioritizedTimingInfo = useVideoTimingInfo && segmentInfo.videoTimingInfo ? segmentInfo.videoTimingInfo : segmentInfo.audioTimingInfo;
18034
18035 if (!prioritizedTimingInfo) {
18036 return;
18037 }
18038
18039 segmentInfo.timingInfo.end = typeof prioritizedTimingInfo.end === 'number' ? // End time may not exist in a case where we aren't parsing the full segment (one
18040 // current example is the case of fmp4), so use the rough duration to calculate an
18041 // end time.
18042 prioritizedTimingInfo.end : prioritizedTimingInfo.start + segmentInfo.duration;
18043 }
18044 /**
18045 * callback to run when appendBuffer is finished. detects if we are
18046 * in a good state to do things with the data we got, or if we need
18047 * to wait for more
18048 *
18049 * @private
18050 */
18051 ;
18052
18053 _proto.handleAppendsDone_ = function handleAppendsDone_() {
18054 // appendsdone can cause an abort
18055 if (this.pendingSegment_) {
18056 this.trigger('appendsdone');
18057 }
18058
18059 if (!this.pendingSegment_) {
18060 this.state = 'READY'; // TODO should this move into this.checkForAbort to speed up requests post abort in
18061 // all appending cases?
18062
18063 if (!this.paused()) {
18064 this.monitorBuffer_();
18065 }
18066
18067 return;
18068 }
18069
18070 var segmentInfo = this.pendingSegment_; // Now that the end of the segment has been reached, we can set the end time. It's
18071 // best to wait until all appends are done so we're sure that the primary media is
18072 // finished (and we have its end time).
18073
18074 this.updateTimingInfoEnd_(segmentInfo);
18075
18076 if (this.shouldSaveSegmentTimingInfo_) {
18077 // Timeline mappings should only be saved for the main loader. This is for multiple
18078 // reasons:
18079 //
18080 // 1) Only one mapping is saved per timeline, meaning that if both the audio loader
18081 // and the main loader try to save the timeline mapping, whichever comes later
18082 // will overwrite the first. In theory this is OK, as the mappings should be the
18083 // same, however, it breaks for (2)
18084 // 2) In the event of a live stream, the initial live point will make for a somewhat
18085 // arbitrary mapping. If audio and video streams are not perfectly in-sync, then
18086 // the mapping will be off for one of the streams, dependent on which one was
18087 // first saved (see (1)).
18088 // 3) Primary timing goes by video in VHS, so the mapping should be video.
18089 //
18090 // Since the audio loader will wait for the main loader to load the first segment,
18091 // the main loader will save the first timeline mapping, and ensure that there won't
18092 // be a case where audio loads two segments without saving a mapping (thus leading
18093 // to missing segment timing info).
18094 this.syncController_.saveSegmentTimingInfo({
18095 segmentInfo: segmentInfo,
18096 shouldSaveTimelineMapping: this.loaderType_ === 'main'
18097 });
18098 }
18099
18100 var segmentDurationMessage = getTroublesomeSegmentDurationMessage(segmentInfo, this.sourceType_);
18101
18102 if (segmentDurationMessage) {
18103 if (segmentDurationMessage.severity === 'warn') {
18104 videojs__default["default"].log.warn(segmentDurationMessage.message);
18105 } else {
18106 this.logger_(segmentDurationMessage.message);
18107 }
18108 }
18109
18110 this.recordThroughput_(segmentInfo);
18111 this.pendingSegment_ = null;
18112 this.state = 'READY';
18113
18114 if (segmentInfo.isSyncRequest) {
18115 this.trigger('syncinfoupdate'); // if the sync request was not appended
18116 // then it was not the correct segment.
18117 // throw it away and use the data it gave us
18118 // to get the correct one.
18119
18120 if (!segmentInfo.hasAppendedData_) {
18121 this.logger_("Throwing away un-appended sync request " + segmentInfoString(segmentInfo));
18122 return;
18123 }
18124 }
18125
18126 this.logger_("Appended " + segmentInfoString(segmentInfo));
18127 this.addSegmentMetadataCue_(segmentInfo);
18128 this.fetchAtBuffer_ = true;
18129
18130 if (this.currentTimeline_ !== segmentInfo.timeline) {
18131 this.timelineChangeController_.lastTimelineChange({
18132 type: this.loaderType_,
18133 from: this.currentTimeline_,
18134 to: segmentInfo.timeline
18135 }); // If audio is not disabled, the main segment loader is responsible for updating
18136 // the audio timeline as well. If the content is video only, this won't have any
18137 // impact.
18138
18139 if (this.loaderType_ === 'main' && !this.audioDisabled_) {
18140 this.timelineChangeController_.lastTimelineChange({
18141 type: 'audio',
18142 from: this.currentTimeline_,
18143 to: segmentInfo.timeline
18144 });
18145 }
18146 }
18147
18148 this.currentTimeline_ = segmentInfo.timeline; // We must update the syncinfo to recalculate the seekable range before
18149 // the following conditional otherwise it may consider this a bad "guess"
18150 // and attempt to resync when the post-update seekable window and live
18151 // point would mean that this was the perfect segment to fetch
18152
18153 this.trigger('syncinfoupdate');
18154 var segment = segmentInfo.segment; // If we previously appended a segment that ends more than 3 targetDurations before
18155 // the currentTime_ that means that our conservative guess was too conservative.
18156 // In that case, reset the loader state so that we try to use any information gained
18157 // from the previous request to create a new, more accurate, sync-point.
18158
18159 if (segment.end && this.currentTime_() - segment.end > segmentInfo.playlist.targetDuration * 3) {
18160 this.resetEverything();
18161 return;
18162 }
18163
18164 var isWalkingForward = this.mediaIndex !== null; // Don't do a rendition switch unless we have enough time to get a sync segment
18165 // and conservatively guess
18166
18167 if (isWalkingForward) {
18168 this.trigger('bandwidthupdate');
18169 }
18170
18171 this.trigger('progress');
18172 this.mediaIndex = segmentInfo.mediaIndex;
18173 this.partIndex = segmentInfo.partIndex; // any time an update finishes and the last segment is in the
18174 // buffer, end the stream. this ensures the "ended" event will
18175 // fire if playback reaches that point.
18176
18177 if (this.isEndOfStream_(segmentInfo.mediaIndex, segmentInfo.playlist, segmentInfo.partIndex)) {
18178 this.endOfStream();
18179 } // used for testing
18180
18181
18182 this.trigger('appended');
18183
18184 if (segmentInfo.hasAppendedData_) {
18185 this.mediaAppends++;
18186 }
18187
18188 if (!this.paused()) {
18189 this.monitorBuffer_();
18190 }
18191 }
18192 /**
18193 * Records the current throughput of the decrypt, transmux, and append
18194 * portion of the semgment pipeline. `throughput.rate` is a the cumulative
18195 * moving average of the throughput. `throughput.count` is the number of
18196 * data points in the average.
18197 *
18198 * @private
18199 * @param {Object} segmentInfo the object returned by loadSegment
18200 */
18201 ;
18202
18203 _proto.recordThroughput_ = function recordThroughput_(segmentInfo) {
18204 if (segmentInfo.duration < MIN_SEGMENT_DURATION_TO_SAVE_STATS) {
18205 this.logger_("Ignoring segment's throughput because its duration of " + segmentInfo.duration + (" is less than the min to record " + MIN_SEGMENT_DURATION_TO_SAVE_STATS));
18206 return;
18207 }
18208
18209 var rate = this.throughput.rate; // Add one to the time to ensure that we don't accidentally attempt to divide
18210 // by zero in the case where the throughput is ridiculously high
18211
18212 var segmentProcessingTime = Date.now() - segmentInfo.endOfAllRequests + 1; // Multiply by 8000 to convert from bytes/millisecond to bits/second
18213
18214 var segmentProcessingThroughput = Math.floor(segmentInfo.byteLength / segmentProcessingTime * 8 * 1000); // This is just a cumulative moving average calculation:
18215 // newAvg = oldAvg + (sample - oldAvg) / (sampleCount + 1)
18216
18217 this.throughput.rate += (segmentProcessingThroughput - rate) / ++this.throughput.count;
18218 }
18219 /**
18220 * Adds a cue to the segment-metadata track with some metadata information about the
18221 * segment
18222 *
18223 * @private
18224 * @param {Object} segmentInfo
18225 * the object returned by loadSegment
18226 * @method addSegmentMetadataCue_
18227 */
18228 ;
18229
18230 _proto.addSegmentMetadataCue_ = function addSegmentMetadataCue_(segmentInfo) {
18231 if (!this.segmentMetadataTrack_) {
18232 return;
18233 }
18234
18235 var segment = segmentInfo.segment;
18236 var start = segment.start;
18237 var end = segment.end; // Do not try adding the cue if the start and end times are invalid.
18238
18239 if (!finite(start) || !finite(end)) {
18240 return;
18241 }
18242
18243 removeCuesFromTrack(start, end, this.segmentMetadataTrack_);
18244 var Cue = window__default["default"].WebKitDataCue || window__default["default"].VTTCue;
18245 var value = {
18246 custom: segment.custom,
18247 dateTimeObject: segment.dateTimeObject,
18248 dateTimeString: segment.dateTimeString,
18249 bandwidth: segmentInfo.playlist.attributes.BANDWIDTH,
18250 resolution: segmentInfo.playlist.attributes.RESOLUTION,
18251 codecs: segmentInfo.playlist.attributes.CODECS,
18252 byteLength: segmentInfo.byteLength,
18253 uri: segmentInfo.uri,
18254 timeline: segmentInfo.timeline,
18255 playlist: segmentInfo.playlist.id,
18256 start: start,
18257 end: end
18258 };
18259 var data = JSON.stringify(value);
18260 var cue = new Cue(start, end, data); // Attach the metadata to the value property of the cue to keep consistency between
18261 // the differences of WebKitDataCue in safari and VTTCue in other browsers
18262
18263 cue.value = value;
18264 this.segmentMetadataTrack_.addCue(cue);
18265 };
18266
18267 return SegmentLoader;
18268}(videojs__default["default"].EventTarget);
18269
18270function noop() {}
18271
18272var toTitleCase = function toTitleCase(string) {
18273 if (typeof string !== 'string') {
18274 return string;
18275 }
18276
18277 return string.replace(/./, function (w) {
18278 return w.toUpperCase();
18279 });
18280};
18281
18282var bufferTypes = ['video', 'audio'];
18283
18284var _updating = function updating(type, sourceUpdater) {
18285 var sourceBuffer = sourceUpdater[type + "Buffer"];
18286 return sourceBuffer && sourceBuffer.updating || sourceUpdater.queuePending[type];
18287};
18288
18289var nextQueueIndexOfType = function nextQueueIndexOfType(type, queue) {
18290 for (var i = 0; i < queue.length; i++) {
18291 var queueEntry = queue[i];
18292
18293 if (queueEntry.type === 'mediaSource') {
18294 // If the next entry is a media source entry (uses multiple source buffers), block
18295 // processing to allow it to go through first.
18296 return null;
18297 }
18298
18299 if (queueEntry.type === type) {
18300 return i;
18301 }
18302 }
18303
18304 return null;
18305};
18306
18307var shiftQueue = function shiftQueue(type, sourceUpdater) {
18308 if (sourceUpdater.queue.length === 0) {
18309 return;
18310 }
18311
18312 var queueIndex = 0;
18313 var queueEntry = sourceUpdater.queue[queueIndex];
18314
18315 if (queueEntry.type === 'mediaSource') {
18316 if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
18317 sourceUpdater.queue.shift();
18318 queueEntry.action(sourceUpdater);
18319
18320 if (queueEntry.doneFn) {
18321 queueEntry.doneFn();
18322 } // Only specific source buffer actions must wait for async updateend events. Media
18323 // Source actions process synchronously. Therefore, both audio and video source
18324 // buffers are now clear to process the next queue entries.
18325
18326
18327 shiftQueue('audio', sourceUpdater);
18328 shiftQueue('video', sourceUpdater);
18329 } // Media Source actions require both source buffers, so if the media source action
18330 // couldn't process yet (because one or both source buffers are busy), block other
18331 // queue actions until both are available and the media source action can process.
18332
18333
18334 return;
18335 }
18336
18337 if (type === 'mediaSource') {
18338 // If the queue was shifted by a media source action (this happens when pushing a
18339 // media source action onto the queue), then it wasn't from an updateend event from an
18340 // audio or video source buffer, so there's no change from previous state, and no
18341 // processing should be done.
18342 return;
18343 } // Media source queue entries don't need to consider whether the source updater is
18344 // started (i.e., source buffers are created) as they don't need the source buffers, but
18345 // source buffer queue entries do.
18346
18347
18348 if (!sourceUpdater.ready() || sourceUpdater.mediaSource.readyState === 'closed' || _updating(type, sourceUpdater)) {
18349 return;
18350 }
18351
18352 if (queueEntry.type !== type) {
18353 queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
18354
18355 if (queueIndex === null) {
18356 // Either there's no queue entry that uses this source buffer type in the queue, or
18357 // there's a media source queue entry before the next entry of this type, in which
18358 // case wait for that action to process first.
18359 return;
18360 }
18361
18362 queueEntry = sourceUpdater.queue[queueIndex];
18363 }
18364
18365 sourceUpdater.queue.splice(queueIndex, 1); // Keep a record that this source buffer type is in use.
18366 //
18367 // The queue pending operation must be set before the action is performed in the event
18368 // that the action results in a synchronous event that is acted upon. For instance, if
18369 // an exception is thrown that can be handled, it's possible that new actions will be
18370 // appended to an empty queue and immediately executed, but would not have the correct
18371 // pending information if this property was set after the action was performed.
18372
18373 sourceUpdater.queuePending[type] = queueEntry;
18374 queueEntry.action(type, sourceUpdater);
18375
18376 if (!queueEntry.doneFn) {
18377 // synchronous operation, process next entry
18378 sourceUpdater.queuePending[type] = null;
18379 shiftQueue(type, sourceUpdater);
18380 return;
18381 }
18382};
18383
18384var cleanupBuffer = function cleanupBuffer(type, sourceUpdater) {
18385 var buffer = sourceUpdater[type + "Buffer"];
18386 var titleType = toTitleCase(type);
18387
18388 if (!buffer) {
18389 return;
18390 }
18391
18392 buffer.removeEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18393 buffer.removeEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18394 sourceUpdater.codecs[type] = null;
18395 sourceUpdater[type + "Buffer"] = null;
18396};
18397
18398var inSourceBuffers = function inSourceBuffers(mediaSource, sourceBuffer) {
18399 return mediaSource && sourceBuffer && Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
18400};
18401
18402var actions = {
18403 appendBuffer: function appendBuffer(bytes, segmentInfo, onError) {
18404 return function (type, sourceUpdater) {
18405 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18406 // or the media source does not contain this source buffer.
18407
18408 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18409 return;
18410 }
18411
18412 sourceUpdater.logger_("Appending segment " + segmentInfo.mediaIndex + "'s " + bytes.length + " bytes to " + type + "Buffer");
18413
18414 try {
18415 sourceBuffer.appendBuffer(bytes);
18416 } catch (e) {
18417 sourceUpdater.logger_("Error with code " + e.code + " " + (e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') + ("when appending segment " + segmentInfo.mediaIndex + " to " + type + "Buffer"));
18418 sourceUpdater.queuePending[type] = null;
18419 onError(e);
18420 }
18421 };
18422 },
18423 remove: function remove(start, end) {
18424 return function (type, sourceUpdater) {
18425 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18426 // or the media source does not contain this source buffer.
18427
18428 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18429 return;
18430 }
18431
18432 sourceUpdater.logger_("Removing " + start + " to " + end + " from " + type + "Buffer");
18433
18434 try {
18435 sourceBuffer.remove(start, end);
18436 } catch (e) {
18437 sourceUpdater.logger_("Remove " + start + " to " + end + " from " + type + "Buffer failed");
18438 }
18439 };
18440 },
18441 timestampOffset: function timestampOffset(offset) {
18442 return function (type, sourceUpdater) {
18443 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18444 // or the media source does not contain this source buffer.
18445
18446 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18447 return;
18448 }
18449
18450 sourceUpdater.logger_("Setting " + type + "timestampOffset to " + offset);
18451 sourceBuffer.timestampOffset = offset;
18452 };
18453 },
18454 callback: function callback(_callback) {
18455 return function (type, sourceUpdater) {
18456 _callback();
18457 };
18458 },
18459 endOfStream: function endOfStream(error) {
18460 return function (sourceUpdater) {
18461 if (sourceUpdater.mediaSource.readyState !== 'open') {
18462 return;
18463 }
18464
18465 sourceUpdater.logger_("Calling mediaSource endOfStream(" + (error || '') + ")");
18466
18467 try {
18468 sourceUpdater.mediaSource.endOfStream(error);
18469 } catch (e) {
18470 videojs__default["default"].log.warn('Failed to call media source endOfStream', e);
18471 }
18472 };
18473 },
18474 duration: function duration(_duration) {
18475 return function (sourceUpdater) {
18476 sourceUpdater.logger_("Setting mediaSource duration to " + _duration);
18477
18478 try {
18479 sourceUpdater.mediaSource.duration = _duration;
18480 } catch (e) {
18481 videojs__default["default"].log.warn('Failed to set media source duration', e);
18482 }
18483 };
18484 },
18485 abort: function abort() {
18486 return function (type, sourceUpdater) {
18487 if (sourceUpdater.mediaSource.readyState !== 'open') {
18488 return;
18489 }
18490
18491 var sourceBuffer = sourceUpdater[type + "Buffer"]; // can't do anything if the media source / source buffer is null
18492 // or the media source does not contain this source buffer.
18493
18494 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18495 return;
18496 }
18497
18498 sourceUpdater.logger_("calling abort on " + type + "Buffer");
18499
18500 try {
18501 sourceBuffer.abort();
18502 } catch (e) {
18503 videojs__default["default"].log.warn("Failed to abort on " + type + "Buffer", e);
18504 }
18505 };
18506 },
18507 addSourceBuffer: function addSourceBuffer(type, codec) {
18508 return function (sourceUpdater) {
18509 var titleType = toTitleCase(type);
18510 var mime = codecs_js.getMimeForCodec(codec);
18511 sourceUpdater.logger_("Adding " + type + "Buffer with codec " + codec + " to mediaSource");
18512 var sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
18513 sourceBuffer.addEventListener('updateend', sourceUpdater["on" + titleType + "UpdateEnd_"]);
18514 sourceBuffer.addEventListener('error', sourceUpdater["on" + titleType + "Error_"]);
18515 sourceUpdater.codecs[type] = codec;
18516 sourceUpdater[type + "Buffer"] = sourceBuffer;
18517 };
18518 },
18519 removeSourceBuffer: function removeSourceBuffer(type) {
18520 return function (sourceUpdater) {
18521 var sourceBuffer = sourceUpdater[type + "Buffer"];
18522 cleanupBuffer(type, sourceUpdater); // can't do anything if the media source / source buffer is null
18523 // or the media source does not contain this source buffer.
18524
18525 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18526 return;
18527 }
18528
18529 sourceUpdater.logger_("Removing " + type + "Buffer with codec " + sourceUpdater.codecs[type] + " from mediaSource");
18530
18531 try {
18532 sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
18533 } catch (e) {
18534 videojs__default["default"].log.warn("Failed to removeSourceBuffer " + type + "Buffer", e);
18535 }
18536 };
18537 },
18538 changeType: function changeType(codec) {
18539 return function (type, sourceUpdater) {
18540 var sourceBuffer = sourceUpdater[type + "Buffer"];
18541 var mime = codecs_js.getMimeForCodec(codec); // can't do anything if the media source / source buffer is null
18542 // or the media source does not contain this source buffer.
18543
18544 if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
18545 return;
18546 } // do not update codec if we don't need to.
18547
18548
18549 if (sourceUpdater.codecs[type] === codec) {
18550 return;
18551 }
18552
18553 sourceUpdater.logger_("changing " + type + "Buffer codec from " + sourceUpdater.codecs[type] + " to " + codec);
18554 sourceBuffer.changeType(mime);
18555 sourceUpdater.codecs[type] = codec;
18556 };
18557 }
18558};
18559
18560var pushQueue = function pushQueue(_ref) {
18561 var type = _ref.type,
18562 sourceUpdater = _ref.sourceUpdater,
18563 action = _ref.action,
18564 doneFn = _ref.doneFn,
18565 name = _ref.name;
18566 sourceUpdater.queue.push({
18567 type: type,
18568 action: action,
18569 doneFn: doneFn,
18570 name: name
18571 });
18572 shiftQueue(type, sourceUpdater);
18573};
18574
18575var onUpdateend = function onUpdateend(type, sourceUpdater) {
18576 return function (e) {
18577 // Although there should, in theory, be a pending action for any updateend receieved,
18578 // there are some actions that may trigger updateend events without set definitions in
18579 // the w3c spec. For instance, setting the duration on the media source may trigger
18580 // updateend events on source buffers. This does not appear to be in the spec. As such,
18581 // if we encounter an updateend without a corresponding pending action from our queue
18582 // for that source buffer type, process the next action.
18583 if (sourceUpdater.queuePending[type]) {
18584 var doneFn = sourceUpdater.queuePending[type].doneFn;
18585 sourceUpdater.queuePending[type] = null;
18586
18587 if (doneFn) {
18588 // if there's an error, report it
18589 doneFn(sourceUpdater[type + "Error_"]);
18590 }
18591 }
18592
18593 shiftQueue(type, sourceUpdater);
18594 };
18595};
18596/**
18597 * A queue of callbacks to be serialized and applied when a
18598 * MediaSource and its associated SourceBuffers are not in the
18599 * updating state. It is used by the segment loader to update the
18600 * underlying SourceBuffers when new data is loaded, for instance.
18601 *
18602 * @class SourceUpdater
18603 * @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
18604 * @param {string} mimeType the desired MIME type of the underlying SourceBuffer
18605 */
18606
18607
18608var SourceUpdater = /*#__PURE__*/function (_videojs$EventTarget) {
18609 _inheritsLoose__default["default"](SourceUpdater, _videojs$EventTarget);
18610
18611 function SourceUpdater(mediaSource) {
18612 var _this;
18613
18614 _this = _videojs$EventTarget.call(this) || this;
18615 _this.mediaSource = mediaSource;
18616
18617 _this.sourceopenListener_ = function () {
18618 return shiftQueue('mediaSource', _assertThisInitialized__default["default"](_this));
18619 };
18620
18621 _this.mediaSource.addEventListener('sourceopen', _this.sourceopenListener_);
18622
18623 _this.logger_ = logger('SourceUpdater'); // initial timestamp offset is 0
18624
18625 _this.audioTimestampOffset_ = 0;
18626 _this.videoTimestampOffset_ = 0;
18627 _this.queue = [];
18628 _this.queuePending = {
18629 audio: null,
18630 video: null
18631 };
18632 _this.delayedAudioAppendQueue_ = [];
18633 _this.videoAppendQueued_ = false;
18634 _this.codecs = {};
18635 _this.onVideoUpdateEnd_ = onUpdateend('video', _assertThisInitialized__default["default"](_this));
18636 _this.onAudioUpdateEnd_ = onUpdateend('audio', _assertThisInitialized__default["default"](_this));
18637
18638 _this.onVideoError_ = function (e) {
18639 // used for debugging
18640 _this.videoError_ = e;
18641 };
18642
18643 _this.onAudioError_ = function (e) {
18644 // used for debugging
18645 _this.audioError_ = e;
18646 };
18647
18648 _this.createdSourceBuffers_ = false;
18649 _this.initializedEme_ = false;
18650 _this.triggeredReady_ = false;
18651 return _this;
18652 }
18653
18654 var _proto = SourceUpdater.prototype;
18655
18656 _proto.initializedEme = function initializedEme() {
18657 this.initializedEme_ = true;
18658 this.triggerReady();
18659 };
18660
18661 _proto.hasCreatedSourceBuffers = function hasCreatedSourceBuffers() {
18662 // if false, likely waiting on one of the segment loaders to get enough data to create
18663 // source buffers
18664 return this.createdSourceBuffers_;
18665 };
18666
18667 _proto.hasInitializedAnyEme = function hasInitializedAnyEme() {
18668 return this.initializedEme_;
18669 };
18670
18671 _proto.ready = function ready() {
18672 return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
18673 };
18674
18675 _proto.createSourceBuffers = function createSourceBuffers(codecs) {
18676 if (this.hasCreatedSourceBuffers()) {
18677 // already created them before
18678 return;
18679 } // the intial addOrChangeSourceBuffers will always be
18680 // two add buffers.
18681
18682
18683 this.addOrChangeSourceBuffers(codecs);
18684 this.createdSourceBuffers_ = true;
18685 this.trigger('createdsourcebuffers');
18686 this.triggerReady();
18687 };
18688
18689 _proto.triggerReady = function triggerReady() {
18690 // only allow ready to be triggered once, this prevents the case
18691 // where:
18692 // 1. we trigger createdsourcebuffers
18693 // 2. ie 11 synchronously initializates eme
18694 // 3. the synchronous initialization causes us to trigger ready
18695 // 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
18696 if (this.ready() && !this.triggeredReady_) {
18697 this.triggeredReady_ = true;
18698 this.trigger('ready');
18699 }
18700 }
18701 /**
18702 * Add a type of source buffer to the media source.
18703 *
18704 * @param {string} type
18705 * The type of source buffer to add.
18706 *
18707 * @param {string} codec
18708 * The codec to add the source buffer with.
18709 */
18710 ;
18711
18712 _proto.addSourceBuffer = function addSourceBuffer(type, codec) {
18713 pushQueue({
18714 type: 'mediaSource',
18715 sourceUpdater: this,
18716 action: actions.addSourceBuffer(type, codec),
18717 name: 'addSourceBuffer'
18718 });
18719 }
18720 /**
18721 * call abort on a source buffer.
18722 *
18723 * @param {string} type
18724 * The type of source buffer to call abort on.
18725 */
18726 ;
18727
18728 _proto.abort = function abort(type) {
18729 pushQueue({
18730 type: type,
18731 sourceUpdater: this,
18732 action: actions.abort(type),
18733 name: 'abort'
18734 });
18735 }
18736 /**
18737 * Call removeSourceBuffer and remove a specific type
18738 * of source buffer on the mediaSource.
18739 *
18740 * @param {string} type
18741 * The type of source buffer to remove.
18742 */
18743 ;
18744
18745 _proto.removeSourceBuffer = function removeSourceBuffer(type) {
18746 if (!this.canRemoveSourceBuffer()) {
18747 videojs__default["default"].log.error('removeSourceBuffer is not supported!');
18748 return;
18749 }
18750
18751 pushQueue({
18752 type: 'mediaSource',
18753 sourceUpdater: this,
18754 action: actions.removeSourceBuffer(type),
18755 name: 'removeSourceBuffer'
18756 });
18757 }
18758 /**
18759 * Whether or not the removeSourceBuffer function is supported
18760 * on the mediaSource.
18761 *
18762 * @return {boolean}
18763 * if removeSourceBuffer can be called.
18764 */
18765 ;
18766
18767 _proto.canRemoveSourceBuffer = function canRemoveSourceBuffer() {
18768 // IE reports that it supports removeSourceBuffer, but often throws
18769 // errors when attempting to use the function. So we report that it
18770 // does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
18771 // throws errors, so we report that it does not support this as well.
18772 return !videojs__default["default"].browser.IE_VERSION && !videojs__default["default"].browser.IS_FIREFOX && window__default["default"].MediaSource && window__default["default"].MediaSource.prototype && typeof window__default["default"].MediaSource.prototype.removeSourceBuffer === 'function';
18773 }
18774 /**
18775 * Whether or not the changeType function is supported
18776 * on our SourceBuffers.
18777 *
18778 * @return {boolean}
18779 * if changeType can be called.
18780 */
18781 ;
18782
18783 SourceUpdater.canChangeType = function canChangeType() {
18784 return window__default["default"].SourceBuffer && window__default["default"].SourceBuffer.prototype && typeof window__default["default"].SourceBuffer.prototype.changeType === 'function';
18785 }
18786 /**
18787 * Whether or not the changeType function is supported
18788 * on our SourceBuffers.
18789 *
18790 * @return {boolean}
18791 * if changeType can be called.
18792 */
18793 ;
18794
18795 _proto.canChangeType = function canChangeType() {
18796 return this.constructor.canChangeType();
18797 }
18798 /**
18799 * Call the changeType function on a source buffer, given the code and type.
18800 *
18801 * @param {string} type
18802 * The type of source buffer to call changeType on.
18803 *
18804 * @param {string} codec
18805 * The codec string to change type with on the source buffer.
18806 */
18807 ;
18808
18809 _proto.changeType = function changeType(type, codec) {
18810 if (!this.canChangeType()) {
18811 videojs__default["default"].log.error('changeType is not supported!');
18812 return;
18813 }
18814
18815 pushQueue({
18816 type: type,
18817 sourceUpdater: this,
18818 action: actions.changeType(codec),
18819 name: 'changeType'
18820 });
18821 }
18822 /**
18823 * Add source buffers with a codec or, if they are already created,
18824 * call changeType on source buffers using changeType.
18825 *
18826 * @param {Object} codecs
18827 * Codecs to switch to
18828 */
18829 ;
18830
18831 _proto.addOrChangeSourceBuffers = function addOrChangeSourceBuffers(codecs) {
18832 var _this2 = this;
18833
18834 if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
18835 throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
18836 }
18837
18838 Object.keys(codecs).forEach(function (type) {
18839 var codec = codecs[type];
18840
18841 if (!_this2.hasCreatedSourceBuffers()) {
18842 return _this2.addSourceBuffer(type, codec);
18843 }
18844
18845 if (_this2.canChangeType()) {
18846 _this2.changeType(type, codec);
18847 }
18848 });
18849 }
18850 /**
18851 * Queue an update to append an ArrayBuffer.
18852 *
18853 * @param {MediaObject} object containing audioBytes and/or videoBytes
18854 * @param {Function} done the function to call when done
18855 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
18856 */
18857 ;
18858
18859 _proto.appendBuffer = function appendBuffer(options, doneFn) {
18860 var _this3 = this;
18861
18862 var segmentInfo = options.segmentInfo,
18863 type = options.type,
18864 bytes = options.bytes;
18865 this.processedAppend_ = true;
18866
18867 if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
18868 this.delayedAudioAppendQueue_.push([options, doneFn]);
18869 this.logger_("delayed audio append of " + bytes.length + " until video append");
18870 return;
18871 } // In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
18872 // not be fired. This means that the queue will be blocked until the next action
18873 // taken by the segment-loader. Provide a mechanism for segment-loader to handle
18874 // these errors by calling the doneFn with the specific error.
18875
18876
18877 var onError = doneFn;
18878 pushQueue({
18879 type: type,
18880 sourceUpdater: this,
18881 action: actions.appendBuffer(bytes, segmentInfo || {
18882 mediaIndex: -1
18883 }, onError),
18884 doneFn: doneFn,
18885 name: 'appendBuffer'
18886 });
18887
18888 if (type === 'video') {
18889 this.videoAppendQueued_ = true;
18890
18891 if (!this.delayedAudioAppendQueue_.length) {
18892 return;
18893 }
18894
18895 var queue = this.delayedAudioAppendQueue_.slice();
18896 this.logger_("queuing delayed audio " + queue.length + " appendBuffers");
18897 this.delayedAudioAppendQueue_.length = 0;
18898 queue.forEach(function (que) {
18899 _this3.appendBuffer.apply(_this3, que);
18900 });
18901 }
18902 }
18903 /**
18904 * Get the audio buffer's buffered timerange.
18905 *
18906 * @return {TimeRange}
18907 * The audio buffer's buffered time range
18908 */
18909 ;
18910
18911 _proto.audioBuffered = function audioBuffered() {
18912 // no media source/source buffer or it isn't in the media sources
18913 // source buffer list
18914 if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
18915 return videojs__default["default"].createTimeRange();
18916 }
18917
18918 return this.audioBuffer.buffered ? this.audioBuffer.buffered : videojs__default["default"].createTimeRange();
18919 }
18920 /**
18921 * Get the video buffer's buffered timerange.
18922 *
18923 * @return {TimeRange}
18924 * The video buffer's buffered time range
18925 */
18926 ;
18927
18928 _proto.videoBuffered = function videoBuffered() {
18929 // no media source/source buffer or it isn't in the media sources
18930 // source buffer list
18931 if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
18932 return videojs__default["default"].createTimeRange();
18933 }
18934
18935 return this.videoBuffer.buffered ? this.videoBuffer.buffered : videojs__default["default"].createTimeRange();
18936 }
18937 /**
18938 * Get a combined video/audio buffer's buffered timerange.
18939 *
18940 * @return {TimeRange}
18941 * the combined time range
18942 */
18943 ;
18944
18945 _proto.buffered = function buffered() {
18946 var video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
18947 var audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
18948
18949 if (audio && !video) {
18950 return this.audioBuffered();
18951 }
18952
18953 if (video && !audio) {
18954 return this.videoBuffered();
18955 }
18956
18957 return bufferIntersection(this.audioBuffered(), this.videoBuffered());
18958 }
18959 /**
18960 * Add a callback to the queue that will set duration on the mediaSource.
18961 *
18962 * @param {number} duration
18963 * The duration to set
18964 *
18965 * @param {Function} [doneFn]
18966 * function to run after duration has been set.
18967 */
18968 ;
18969
18970 _proto.setDuration = function setDuration(duration, doneFn) {
18971 if (doneFn === void 0) {
18972 doneFn = noop;
18973 }
18974
18975 // In order to set the duration on the media source, it's necessary to wait for all
18976 // source buffers to no longer be updating. "If the updating attribute equals true on
18977 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
18978 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
18979 pushQueue({
18980 type: 'mediaSource',
18981 sourceUpdater: this,
18982 action: actions.duration(duration),
18983 name: 'duration',
18984 doneFn: doneFn
18985 });
18986 }
18987 /**
18988 * Add a mediaSource endOfStream call to the queue
18989 *
18990 * @param {Error} [error]
18991 * Call endOfStream with an error
18992 *
18993 * @param {Function} [doneFn]
18994 * A function that should be called when the
18995 * endOfStream call has finished.
18996 */
18997 ;
18998
18999 _proto.endOfStream = function endOfStream(error, doneFn) {
19000 if (error === void 0) {
19001 error = null;
19002 }
19003
19004 if (doneFn === void 0) {
19005 doneFn = noop;
19006 }
19007
19008 if (typeof error !== 'string') {
19009 error = undefined;
19010 } // In order to set the duration on the media source, it's necessary to wait for all
19011 // source buffers to no longer be updating. "If the updating attribute equals true on
19012 // any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
19013 // abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
19014
19015
19016 pushQueue({
19017 type: 'mediaSource',
19018 sourceUpdater: this,
19019 action: actions.endOfStream(error),
19020 name: 'endOfStream',
19021 doneFn: doneFn
19022 });
19023 }
19024 /**
19025 * Queue an update to remove a time range from the buffer.
19026 *
19027 * @param {number} start where to start the removal
19028 * @param {number} end where to end the removal
19029 * @param {Function} [done=noop] optional callback to be executed when the remove
19030 * operation is complete
19031 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19032 */
19033 ;
19034
19035 _proto.removeAudio = function removeAudio(start, end, done) {
19036 if (done === void 0) {
19037 done = noop;
19038 }
19039
19040 if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
19041 done();
19042 return;
19043 }
19044
19045 pushQueue({
19046 type: 'audio',
19047 sourceUpdater: this,
19048 action: actions.remove(start, end),
19049 doneFn: done,
19050 name: 'remove'
19051 });
19052 }
19053 /**
19054 * Queue an update to remove a time range from the buffer.
19055 *
19056 * @param {number} start where to start the removal
19057 * @param {number} end where to end the removal
19058 * @param {Function} [done=noop] optional callback to be executed when the remove
19059 * operation is complete
19060 * @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
19061 */
19062 ;
19063
19064 _proto.removeVideo = function removeVideo(start, end, done) {
19065 if (done === void 0) {
19066 done = noop;
19067 }
19068
19069 if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
19070 done();
19071 return;
19072 }
19073
19074 pushQueue({
19075 type: 'video',
19076 sourceUpdater: this,
19077 action: actions.remove(start, end),
19078 doneFn: done,
19079 name: 'remove'
19080 });
19081 }
19082 /**
19083 * Whether the underlying sourceBuffer is updating or not
19084 *
19085 * @return {boolean} the updating status of the SourceBuffer
19086 */
19087 ;
19088
19089 _proto.updating = function updating() {
19090 // the audio/video source buffer is updating
19091 if (_updating('audio', this) || _updating('video', this)) {
19092 return true;
19093 }
19094
19095 return false;
19096 }
19097 /**
19098 * Set/get the timestampoffset on the audio SourceBuffer
19099 *
19100 * @return {number} the timestamp offset
19101 */
19102 ;
19103
19104 _proto.audioTimestampOffset = function audioTimestampOffset(offset) {
19105 if (typeof offset !== 'undefined' && this.audioBuffer && // no point in updating if it's the same
19106 this.audioTimestampOffset_ !== offset) {
19107 pushQueue({
19108 type: 'audio',
19109 sourceUpdater: this,
19110 action: actions.timestampOffset(offset),
19111 name: 'timestampOffset'
19112 });
19113 this.audioTimestampOffset_ = offset;
19114 }
19115
19116 return this.audioTimestampOffset_;
19117 }
19118 /**
19119 * Set/get the timestampoffset on the video SourceBuffer
19120 *
19121 * @return {number} the timestamp offset
19122 */
19123 ;
19124
19125 _proto.videoTimestampOffset = function videoTimestampOffset(offset) {
19126 if (typeof offset !== 'undefined' && this.videoBuffer && // no point in updating if it's the same
19127 this.videoTimestampOffset !== offset) {
19128 pushQueue({
19129 type: 'video',
19130 sourceUpdater: this,
19131 action: actions.timestampOffset(offset),
19132 name: 'timestampOffset'
19133 });
19134 this.videoTimestampOffset_ = offset;
19135 }
19136
19137 return this.videoTimestampOffset_;
19138 }
19139 /**
19140 * Add a function to the queue that will be called
19141 * when it is its turn to run in the audio queue.
19142 *
19143 * @param {Function} callback
19144 * The callback to queue.
19145 */
19146 ;
19147
19148 _proto.audioQueueCallback = function audioQueueCallback(callback) {
19149 if (!this.audioBuffer) {
19150 return;
19151 }
19152
19153 pushQueue({
19154 type: 'audio',
19155 sourceUpdater: this,
19156 action: actions.callback(callback),
19157 name: 'callback'
19158 });
19159 }
19160 /**
19161 * Add a function to the queue that will be called
19162 * when it is its turn to run in the video queue.
19163 *
19164 * @param {Function} callback
19165 * The callback to queue.
19166 */
19167 ;
19168
19169 _proto.videoQueueCallback = function videoQueueCallback(callback) {
19170 if (!this.videoBuffer) {
19171 return;
19172 }
19173
19174 pushQueue({
19175 type: 'video',
19176 sourceUpdater: this,
19177 action: actions.callback(callback),
19178 name: 'callback'
19179 });
19180 }
19181 /**
19182 * dispose of the source updater and the underlying sourceBuffer
19183 */
19184 ;
19185
19186 _proto.dispose = function dispose() {
19187 var _this4 = this;
19188
19189 this.trigger('dispose');
19190 bufferTypes.forEach(function (type) {
19191 _this4.abort(type);
19192
19193 if (_this4.canRemoveSourceBuffer()) {
19194 _this4.removeSourceBuffer(type);
19195 } else {
19196 _this4[type + "QueueCallback"](function () {
19197 return cleanupBuffer(type, _this4);
19198 });
19199 }
19200 });
19201 this.videoAppendQueued_ = false;
19202 this.delayedAudioAppendQueue_.length = 0;
19203
19204 if (this.sourceopenListener_) {
19205 this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
19206 }
19207
19208 this.off();
19209 };
19210
19211 return SourceUpdater;
19212}(videojs__default["default"].EventTarget);
19213
19214var uint8ToUtf8 = function uint8ToUtf8(uintArray) {
19215 return decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));
19216};
19217
19218var VTT_LINE_TERMINATORS = new Uint8Array('\n\n'.split('').map(function (char) {
19219 return char.charCodeAt(0);
19220}));
19221/**
19222 * An object that manages segment loading and appending.
19223 *
19224 * @class VTTSegmentLoader
19225 * @param {Object} options required and optional options
19226 * @extends videojs.EventTarget
19227 */
19228
19229var VTTSegmentLoader = /*#__PURE__*/function (_SegmentLoader) {
19230 _inheritsLoose__default["default"](VTTSegmentLoader, _SegmentLoader);
19231
19232 function VTTSegmentLoader(settings, options) {
19233 var _this;
19234
19235 if (options === void 0) {
19236 options = {};
19237 }
19238
19239 _this = _SegmentLoader.call(this, settings, options) || this; // SegmentLoader requires a MediaSource be specified or it will throw an error;
19240 // however, VTTSegmentLoader has no need of a media source, so delete the reference
19241
19242 _this.mediaSource_ = null;
19243 _this.subtitlesTrack_ = null;
19244 _this.loaderType_ = 'subtitle';
19245 _this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks; // The VTT segment will have its own time mappings. Saving VTT segment timing info in
19246 // the sync controller leads to improper behavior.
19247
19248 _this.shouldSaveSegmentTimingInfo_ = false;
19249 return _this;
19250 }
19251
19252 var _proto = VTTSegmentLoader.prototype;
19253
19254 _proto.createTransmuxer_ = function createTransmuxer_() {
19255 // don't need to transmux any subtitles
19256 return null;
19257 }
19258 /**
19259 * Indicates which time ranges are buffered
19260 *
19261 * @return {TimeRange}
19262 * TimeRange object representing the current buffered ranges
19263 */
19264 ;
19265
19266 _proto.buffered_ = function buffered_() {
19267 if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
19268 return videojs__default["default"].createTimeRanges();
19269 }
19270
19271 var cues = this.subtitlesTrack_.cues;
19272 var start = cues[0].startTime;
19273 var end = cues[cues.length - 1].startTime;
19274 return videojs__default["default"].createTimeRanges([[start, end]]);
19275 }
19276 /**
19277 * Gets and sets init segment for the provided map
19278 *
19279 * @param {Object} map
19280 * The map object representing the init segment to get or set
19281 * @param {boolean=} set
19282 * If true, the init segment for the provided map should be saved
19283 * @return {Object}
19284 * map object for desired init segment
19285 */
19286 ;
19287
19288 _proto.initSegmentForMap = function initSegmentForMap(map, set) {
19289 if (set === void 0) {
19290 set = false;
19291 }
19292
19293 if (!map) {
19294 return null;
19295 }
19296
19297 var id = initSegmentId(map);
19298 var storedMap = this.initSegments_[id];
19299
19300 if (set && !storedMap && map.bytes) {
19301 // append WebVTT line terminators to the media initialization segment if it exists
19302 // to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
19303 // requires two or more WebVTT line terminators between the WebVTT header and the
19304 // rest of the file
19305 var combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
19306 var combinedSegment = new Uint8Array(combinedByteLength);
19307 combinedSegment.set(map.bytes);
19308 combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
19309 this.initSegments_[id] = storedMap = {
19310 resolvedUri: map.resolvedUri,
19311 byterange: map.byterange,
19312 bytes: combinedSegment
19313 };
19314 }
19315
19316 return storedMap || map;
19317 }
19318 /**
19319 * Returns true if all configuration required for loading is present, otherwise false.
19320 *
19321 * @return {boolean} True if the all configuration is ready for loading
19322 * @private
19323 */
19324 ;
19325
19326 _proto.couldBeginLoading_ = function couldBeginLoading_() {
19327 return this.playlist_ && this.subtitlesTrack_ && !this.paused();
19328 }
19329 /**
19330 * Once all the starting parameters have been specified, begin
19331 * operation. This method should only be invoked from the INIT
19332 * state.
19333 *
19334 * @private
19335 */
19336 ;
19337
19338 _proto.init_ = function init_() {
19339 this.state = 'READY';
19340 this.resetEverything();
19341 return this.monitorBuffer_();
19342 }
19343 /**
19344 * Set a subtitle track on the segment loader to add subtitles to
19345 *
19346 * @param {TextTrack=} track
19347 * The text track to add loaded subtitles to
19348 * @return {TextTrack}
19349 * Returns the subtitles track
19350 */
19351 ;
19352
19353 _proto.track = function track(_track) {
19354 if (typeof _track === 'undefined') {
19355 return this.subtitlesTrack_;
19356 }
19357
19358 this.subtitlesTrack_ = _track; // if we were unpaused but waiting for a sourceUpdater, start
19359 // buffering now
19360
19361 if (this.state === 'INIT' && this.couldBeginLoading_()) {
19362 this.init_();
19363 }
19364
19365 return this.subtitlesTrack_;
19366 }
19367 /**
19368 * Remove any data in the source buffer between start and end times
19369 *
19370 * @param {number} start - the start time of the region to remove from the buffer
19371 * @param {number} end - the end time of the region to remove from the buffer
19372 */
19373 ;
19374
19375 _proto.remove = function remove(start, end) {
19376 removeCuesFromTrack(start, end, this.subtitlesTrack_);
19377 }
19378 /**
19379 * fill the buffer with segements unless the sourceBuffers are
19380 * currently updating
19381 *
19382 * Note: this function should only ever be called by monitorBuffer_
19383 * and never directly
19384 *
19385 * @private
19386 */
19387 ;
19388
19389 _proto.fillBuffer_ = function fillBuffer_() {
19390 var _this2 = this;
19391
19392 // see if we need to begin loading immediately
19393 var segmentInfo = this.chooseNextRequest_();
19394
19395 if (!segmentInfo) {
19396 return;
19397 }
19398
19399 if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
19400 // We don't have the timestamp offset that we need to sync subtitles.
19401 // Rerun on a timestamp offset or user interaction.
19402 var checkTimestampOffset = function checkTimestampOffset() {
19403 _this2.state = 'READY';
19404
19405 if (!_this2.paused()) {
19406 // if not paused, queue a buffer check as soon as possible
19407 _this2.monitorBuffer_();
19408 }
19409 };
19410
19411 this.syncController_.one('timestampoffset', checkTimestampOffset);
19412 this.state = 'WAITING_ON_TIMELINE';
19413 return;
19414 }
19415
19416 this.loadSegment_(segmentInfo);
19417 } // never set a timestamp offset for vtt segments.
19418 ;
19419
19420 _proto.timestampOffsetForSegment_ = function timestampOffsetForSegment_() {
19421 return null;
19422 };
19423
19424 _proto.chooseNextRequest_ = function chooseNextRequest_() {
19425 return this.skipEmptySegments_(_SegmentLoader.prototype.chooseNextRequest_.call(this));
19426 }
19427 /**
19428 * Prevents the segment loader from requesting segments we know contain no subtitles
19429 * by walking forward until we find the next segment that we don't know whether it is
19430 * empty or not.
19431 *
19432 * @param {Object} segmentInfo
19433 * a segment info object that describes the current segment
19434 * @return {Object}
19435 * a segment info object that describes the current segment
19436 */
19437 ;
19438
19439 _proto.skipEmptySegments_ = function skipEmptySegments_(segmentInfo) {
19440 while (segmentInfo && segmentInfo.segment.empty) {
19441 // stop at the last possible segmentInfo
19442 if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
19443 segmentInfo = null;
19444 break;
19445 }
19446
19447 segmentInfo = this.generateSegmentInfo_({
19448 playlist: segmentInfo.playlist,
19449 mediaIndex: segmentInfo.mediaIndex + 1,
19450 startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
19451 isSyncRequest: segmentInfo.isSyncRequest
19452 });
19453 }
19454
19455 return segmentInfo;
19456 };
19457
19458 _proto.stopForError = function stopForError(error) {
19459 this.error(error);
19460 this.state = 'READY';
19461 this.pause();
19462 this.trigger('error');
19463 }
19464 /**
19465 * append a decrypted segement to the SourceBuffer through a SourceUpdater
19466 *
19467 * @private
19468 */
19469 ;
19470
19471 _proto.segmentRequestFinished_ = function segmentRequestFinished_(error, simpleSegment, result) {
19472 var _this3 = this;
19473
19474 if (!this.subtitlesTrack_) {
19475 this.state = 'READY';
19476 return;
19477 }
19478
19479 this.saveTransferStats_(simpleSegment.stats); // the request was aborted
19480
19481 if (!this.pendingSegment_) {
19482 this.state = 'READY';
19483 this.mediaRequestsAborted += 1;
19484 return;
19485 }
19486
19487 if (error) {
19488 if (error.code === REQUEST_ERRORS.TIMEOUT) {
19489 this.handleTimeout_();
19490 }
19491
19492 if (error.code === REQUEST_ERRORS.ABORTED) {
19493 this.mediaRequestsAborted += 1;
19494 } else {
19495 this.mediaRequestsErrored += 1;
19496 }
19497
19498 this.stopForError(error);
19499 return;
19500 }
19501
19502 var segmentInfo = this.pendingSegment_; // although the VTT segment loader bandwidth isn't really used, it's good to
19503 // maintain functionality between segment loaders
19504
19505 this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
19506 this.state = 'APPENDING'; // used for tests
19507
19508 this.trigger('appending');
19509 var segment = segmentInfo.segment;
19510
19511 if (segment.map) {
19512 segment.map.bytes = simpleSegment.map.bytes;
19513 }
19514
19515 segmentInfo.bytes = simpleSegment.bytes; // Make sure that vttjs has loaded, otherwise, wait till it finished loading
19516
19517 if (typeof window__default["default"].WebVTT !== 'function' && this.subtitlesTrack_ && this.subtitlesTrack_.tech_) {
19518 var loadHandler;
19519
19520 var errorHandler = function errorHandler() {
19521 _this3.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
19522
19523 _this3.stopForError({
19524 message: 'Error loading vtt.js'
19525 });
19526
19527 return;
19528 };
19529
19530 loadHandler = function loadHandler() {
19531 _this3.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
19532
19533 _this3.segmentRequestFinished_(error, simpleSegment, result);
19534 };
19535
19536 this.state = 'WAITING_ON_VTTJS';
19537 this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
19538 this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
19539 return;
19540 }
19541
19542 segment.requested = true;
19543
19544 try {
19545 this.parseVTTCues_(segmentInfo);
19546 } catch (e) {
19547 this.stopForError({
19548 message: e.message
19549 });
19550 return;
19551 }
19552
19553 this.updateTimeMapping_(segmentInfo, this.syncController_.timelines[segmentInfo.timeline], this.playlist_);
19554
19555 if (segmentInfo.cues.length) {
19556 segmentInfo.timingInfo = {
19557 start: segmentInfo.cues[0].startTime,
19558 end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
19559 };
19560 } else {
19561 segmentInfo.timingInfo = {
19562 start: segmentInfo.startOfSegment,
19563 end: segmentInfo.startOfSegment + segmentInfo.duration
19564 };
19565 }
19566
19567 if (segmentInfo.isSyncRequest) {
19568 this.trigger('syncinfoupdate');
19569 this.pendingSegment_ = null;
19570 this.state = 'READY';
19571 return;
19572 }
19573
19574 segmentInfo.byteLength = segmentInfo.bytes.byteLength;
19575 this.mediaSecondsLoaded += segment.duration; // Create VTTCue instances for each cue in the new segment and add them to
19576 // the subtitle track
19577
19578 segmentInfo.cues.forEach(function (cue) {
19579 _this3.subtitlesTrack_.addCue(_this3.featuresNativeTextTracks_ ? new window__default["default"].VTTCue(cue.startTime, cue.endTime, cue.text) : cue);
19580 }); // Remove any duplicate cues from the subtitle track. The WebVTT spec allows
19581 // cues to have identical time-intervals, but if the text is also identical
19582 // we can safely assume it is a duplicate that can be removed (ex. when a cue
19583 // "overlaps" VTT segments)
19584
19585 removeDuplicateCuesFromTrack(this.subtitlesTrack_);
19586 this.handleAppendsDone_();
19587 };
19588
19589 _proto.handleData_ = function handleData_() {// noop as we shouldn't be getting video/audio data captions
19590 // that we do not support here.
19591 };
19592
19593 _proto.updateTimingInfoEnd_ = function updateTimingInfoEnd_() {// noop
19594 }
19595 /**
19596 * Uses the WebVTT parser to parse the segment response
19597 *
19598 * @param {Object} segmentInfo
19599 * a segment info object that describes the current segment
19600 * @private
19601 */
19602 ;
19603
19604 _proto.parseVTTCues_ = function parseVTTCues_(segmentInfo) {
19605 var decoder;
19606 var decodeBytesToString = false;
19607
19608 if (typeof window__default["default"].TextDecoder === 'function') {
19609 decoder = new window__default["default"].TextDecoder('utf8');
19610 } else {
19611 decoder = window__default["default"].WebVTT.StringDecoder();
19612 decodeBytesToString = true;
19613 }
19614
19615 var parser = new window__default["default"].WebVTT.Parser(window__default["default"], window__default["default"].vttjs, decoder);
19616 segmentInfo.cues = [];
19617 segmentInfo.timestampmap = {
19618 MPEGTS: 0,
19619 LOCAL: 0
19620 };
19621 parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
19622
19623 parser.ontimestampmap = function (map) {
19624 segmentInfo.timestampmap = map;
19625 };
19626
19627 parser.onparsingerror = function (error) {
19628 videojs__default["default"].log.warn('Error encountered when parsing cues: ' + error.message);
19629 };
19630
19631 if (segmentInfo.segment.map) {
19632 var mapData = segmentInfo.segment.map.bytes;
19633
19634 if (decodeBytesToString) {
19635 mapData = uint8ToUtf8(mapData);
19636 }
19637
19638 parser.parse(mapData);
19639 }
19640
19641 var segmentData = segmentInfo.bytes;
19642
19643 if (decodeBytesToString) {
19644 segmentData = uint8ToUtf8(segmentData);
19645 }
19646
19647 parser.parse(segmentData);
19648 parser.flush();
19649 }
19650 /**
19651 * Updates the start and end times of any cues parsed by the WebVTT parser using
19652 * the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
19653 * from the SyncController
19654 *
19655 * @param {Object} segmentInfo
19656 * a segment info object that describes the current segment
19657 * @param {Object} mappingObj
19658 * object containing a mapping from TS to media time
19659 * @param {Object} playlist
19660 * the playlist object containing the segment
19661 * @private
19662 */
19663 ;
19664
19665 _proto.updateTimeMapping_ = function updateTimeMapping_(segmentInfo, mappingObj, playlist) {
19666 var segment = segmentInfo.segment;
19667
19668 if (!mappingObj) {
19669 // If the sync controller does not have a mapping of TS to Media Time for the
19670 // timeline, then we don't have enough information to update the cue
19671 // start/end times
19672 return;
19673 }
19674
19675 if (!segmentInfo.cues.length) {
19676 // If there are no cues, we also do not have enough information to figure out
19677 // segment timing. Mark that the segment contains no cues so we don't re-request
19678 // an empty segment.
19679 segment.empty = true;
19680 return;
19681 }
19682
19683 var timestampmap = segmentInfo.timestampmap;
19684 var diff = timestampmap.MPEGTS / clock.ONE_SECOND_IN_TS - timestampmap.LOCAL + mappingObj.mapping;
19685 segmentInfo.cues.forEach(function (cue) {
19686 // First convert cue time to TS time using the timestamp-map provided within the vtt
19687 cue.startTime += diff;
19688 cue.endTime += diff;
19689 });
19690
19691 if (!playlist.syncInfo) {
19692 var firstStart = segmentInfo.cues[0].startTime;
19693 var lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
19694 playlist.syncInfo = {
19695 mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
19696 time: Math.min(firstStart, lastStart - segment.duration)
19697 };
19698 }
19699 };
19700
19701 return VTTSegmentLoader;
19702}(SegmentLoader);
19703
19704/**
19705 * @file ad-cue-tags.js
19706 */
19707/**
19708 * Searches for an ad cue that overlaps with the given mediaTime
19709 *
19710 * @param {Object} track
19711 * the track to find the cue for
19712 *
19713 * @param {number} mediaTime
19714 * the time to find the cue at
19715 *
19716 * @return {Object|null}
19717 * the found cue or null
19718 */
19719
19720var findAdCue = function findAdCue(track, mediaTime) {
19721 var cues = track.cues;
19722
19723 for (var i = 0; i < cues.length; i++) {
19724 var cue = cues[i];
19725
19726 if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
19727 return cue;
19728 }
19729 }
19730
19731 return null;
19732};
19733var updateAdCues = function updateAdCues(media, track, offset) {
19734 if (offset === void 0) {
19735 offset = 0;
19736 }
19737
19738 if (!media.segments) {
19739 return;
19740 }
19741
19742 var mediaTime = offset;
19743 var cue;
19744
19745 for (var i = 0; i < media.segments.length; i++) {
19746 var segment = media.segments[i];
19747
19748 if (!cue) {
19749 // Since the cues will span for at least the segment duration, adding a fudge
19750 // factor of half segment duration will prevent duplicate cues from being
19751 // created when timing info is not exact (e.g. cue start time initialized
19752 // at 10.006677, but next call mediaTime is 10.003332 )
19753 cue = findAdCue(track, mediaTime + segment.duration / 2);
19754 }
19755
19756 if (cue) {
19757 if ('cueIn' in segment) {
19758 // Found a CUE-IN so end the cue
19759 cue.endTime = mediaTime;
19760 cue.adEndTime = mediaTime;
19761 mediaTime += segment.duration;
19762 cue = null;
19763 continue;
19764 }
19765
19766 if (mediaTime < cue.endTime) {
19767 // Already processed this mediaTime for this cue
19768 mediaTime += segment.duration;
19769 continue;
19770 } // otherwise extend cue until a CUE-IN is found
19771
19772
19773 cue.endTime += segment.duration;
19774 } else {
19775 if ('cueOut' in segment) {
19776 cue = new window__default["default"].VTTCue(mediaTime, mediaTime + segment.duration, segment.cueOut);
19777 cue.adStartTime = mediaTime; // Assumes tag format to be
19778 // #EXT-X-CUE-OUT:30
19779
19780 cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
19781 track.addCue(cue);
19782 }
19783
19784 if ('cueOutCont' in segment) {
19785 // Entered into the middle of an ad cue
19786 // Assumes tag formate to be
19787 // #EXT-X-CUE-OUT-CONT:10/30
19788 var _segment$cueOutCont$s = segment.cueOutCont.split('/').map(parseFloat),
19789 adOffset = _segment$cueOutCont$s[0],
19790 adTotal = _segment$cueOutCont$s[1];
19791
19792 cue = new window__default["default"].VTTCue(mediaTime, mediaTime + segment.duration, '');
19793 cue.adStartTime = mediaTime - adOffset;
19794 cue.adEndTime = cue.adStartTime + adTotal;
19795 track.addCue(cue);
19796 }
19797 }
19798
19799 mediaTime += segment.duration;
19800 }
19801};
19802
19803// synchronize expired playlist segments.
19804// the max media sequence diff is 48 hours of live stream
19805// content with two second segments. Anything larger than that
19806// will likely be invalid.
19807
19808var MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC = 86400;
19809var syncPointStrategies = [// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
19810// the equivalence display-time 0 === segment-index 0
19811{
19812 name: 'VOD',
19813 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19814 if (duration !== Infinity) {
19815 var syncPoint = {
19816 time: 0,
19817 segmentIndex: 0,
19818 partIndex: null
19819 };
19820 return syncPoint;
19821 }
19822
19823 return null;
19824 }
19825}, // Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
19826{
19827 name: 'ProgramDateTime',
19828 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19829 if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
19830 return null;
19831 }
19832
19833 var syncPoint = null;
19834 var lastDistance = null;
19835 var partsAndSegments = getPartsAndSegments(playlist);
19836 currentTime = currentTime || 0;
19837
19838 for (var i = 0; i < partsAndSegments.length; i++) {
19839 // start from the end and loop backwards for live
19840 // or start from the front and loop forwards for non-live
19841 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
19842 var partAndSegment = partsAndSegments[index];
19843 var segment = partAndSegment.segment;
19844 var datetimeMapping = syncController.timelineToDatetimeMappings[segment.timeline];
19845
19846 if (!datetimeMapping) {
19847 continue;
19848 }
19849
19850 if (segment.dateTimeObject) {
19851 var segmentTime = segment.dateTimeObject.getTime() / 1000;
19852 var start = segmentTime + datetimeMapping; // take part duration into account.
19853
19854 if (segment.parts && typeof partAndSegment.partIndex === 'number') {
19855 for (var z = 0; z < partAndSegment.partIndex; z++) {
19856 start += segment.parts[z].duration;
19857 }
19858 }
19859
19860 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, or if distance is 0, we have passed
19861 // currentTime and can stop looking for better candidates
19862
19863 if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
19864 break;
19865 }
19866
19867 lastDistance = distance;
19868 syncPoint = {
19869 time: start,
19870 segmentIndex: partAndSegment.segmentIndex,
19871 partIndex: partAndSegment.partIndex
19872 };
19873 }
19874 }
19875
19876 return syncPoint;
19877 }
19878}, // Stategy "Segment": We have a known time mapping for a timeline and a
19879// segment in the current timeline with timing data
19880{
19881 name: 'Segment',
19882 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19883 var syncPoint = null;
19884 var lastDistance = null;
19885 currentTime = currentTime || 0;
19886 var partsAndSegments = getPartsAndSegments(playlist);
19887
19888 for (var i = 0; i < partsAndSegments.length; i++) {
19889 // start from the end and loop backwards for live
19890 // or start from the front and loop forwards for non-live
19891 var index = playlist.endList || currentTime === 0 ? i : partsAndSegments.length - (i + 1);
19892 var partAndSegment = partsAndSegments[index];
19893 var segment = partAndSegment.segment;
19894 var start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
19895
19896 if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
19897 var distance = Math.abs(currentTime - start); // Once the distance begins to increase, we have passed
19898 // currentTime and can stop looking for better candidates
19899
19900 if (lastDistance !== null && lastDistance < distance) {
19901 break;
19902 }
19903
19904 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
19905 lastDistance = distance;
19906 syncPoint = {
19907 time: start,
19908 segmentIndex: partAndSegment.segmentIndex,
19909 partIndex: partAndSegment.partIndex
19910 };
19911 }
19912 }
19913 }
19914
19915 return syncPoint;
19916 }
19917}, // Stategy "Discontinuity": We have a discontinuity with a known
19918// display-time
19919{
19920 name: 'Discontinuity',
19921 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19922 var syncPoint = null;
19923 currentTime = currentTime || 0;
19924
19925 if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
19926 var lastDistance = null;
19927
19928 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
19929 var segmentIndex = playlist.discontinuityStarts[i];
19930 var discontinuity = playlist.discontinuitySequence + i + 1;
19931 var discontinuitySync = syncController.discontinuities[discontinuity];
19932
19933 if (discontinuitySync) {
19934 var distance = Math.abs(currentTime - discontinuitySync.time); // Once the distance begins to increase, we have passed
19935 // currentTime and can stop looking for better candidates
19936
19937 if (lastDistance !== null && lastDistance < distance) {
19938 break;
19939 }
19940
19941 if (!syncPoint || lastDistance === null || lastDistance >= distance) {
19942 lastDistance = distance;
19943 syncPoint = {
19944 time: discontinuitySync.time,
19945 segmentIndex: segmentIndex,
19946 partIndex: null
19947 };
19948 }
19949 }
19950 }
19951 }
19952
19953 return syncPoint;
19954 }
19955}, // Stategy "Playlist": We have a playlist with a known mapping of
19956// segment index to display time
19957{
19958 name: 'Playlist',
19959 run: function run(syncController, playlist, duration, currentTimeline, currentTime) {
19960 if (playlist.syncInfo) {
19961 var syncPoint = {
19962 time: playlist.syncInfo.time,
19963 segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
19964 partIndex: null
19965 };
19966 return syncPoint;
19967 }
19968
19969 return null;
19970 }
19971}];
19972
19973var SyncController = /*#__PURE__*/function (_videojs$EventTarget) {
19974 _inheritsLoose__default["default"](SyncController, _videojs$EventTarget);
19975
19976 function SyncController(options) {
19977 var _this;
19978
19979 _this = _videojs$EventTarget.call(this) || this; // ...for synching across variants
19980
19981 _this.timelines = [];
19982 _this.discontinuities = [];
19983 _this.timelineToDatetimeMappings = {};
19984 _this.logger_ = logger('SyncController');
19985 return _this;
19986 }
19987 /**
19988 * Find a sync-point for the playlist specified
19989 *
19990 * A sync-point is defined as a known mapping from display-time to
19991 * a segment-index in the current playlist.
19992 *
19993 * @param {Playlist} playlist
19994 * The playlist that needs a sync-point
19995 * @param {number} duration
19996 * Duration of the MediaSource (Infinite if playing a live source)
19997 * @param {number} currentTimeline
19998 * The last timeline from which a segment was loaded
19999 * @return {Object}
20000 * A sync-point object
20001 */
20002
20003
20004 var _proto = SyncController.prototype;
20005
20006 _proto.getSyncPoint = function getSyncPoint(playlist, duration, currentTimeline, currentTime) {
20007 var syncPoints = this.runStrategies_(playlist, duration, currentTimeline, currentTime);
20008
20009 if (!syncPoints.length) {
20010 // Signal that we need to attempt to get a sync-point manually
20011 // by fetching a segment in the playlist and constructing
20012 // a sync-point from that information
20013 return null;
20014 } // Now find the sync-point that is closest to the currentTime because
20015 // that should result in the most accurate guess about which segment
20016 // to fetch
20017
20018
20019 return this.selectSyncPoint_(syncPoints, {
20020 key: 'time',
20021 value: currentTime
20022 });
20023 }
20024 /**
20025 * Calculate the amount of time that has expired off the playlist during playback
20026 *
20027 * @param {Playlist} playlist
20028 * Playlist object to calculate expired from
20029 * @param {number} duration
20030 * Duration of the MediaSource (Infinity if playling a live source)
20031 * @return {number|null}
20032 * The amount of time that has expired off the playlist during playback. Null
20033 * if no sync-points for the playlist can be found.
20034 */
20035 ;
20036
20037 _proto.getExpiredTime = function getExpiredTime(playlist, duration) {
20038 if (!playlist || !playlist.segments) {
20039 return null;
20040 }
20041
20042 var syncPoints = this.runStrategies_(playlist, duration, playlist.discontinuitySequence, 0); // Without sync-points, there is not enough information to determine the expired time
20043
20044 if (!syncPoints.length) {
20045 return null;
20046 }
20047
20048 var syncPoint = this.selectSyncPoint_(syncPoints, {
20049 key: 'segmentIndex',
20050 value: 0
20051 }); // If the sync-point is beyond the start of the playlist, we want to subtract the
20052 // duration from index 0 to syncPoint.segmentIndex instead of adding.
20053
20054 if (syncPoint.segmentIndex > 0) {
20055 syncPoint.time *= -1;
20056 }
20057
20058 return Math.abs(syncPoint.time + sumDurations({
20059 defaultDuration: playlist.targetDuration,
20060 durationList: playlist.segments,
20061 startIndex: syncPoint.segmentIndex,
20062 endIndex: 0
20063 }));
20064 }
20065 /**
20066 * Runs each sync-point strategy and returns a list of sync-points returned by the
20067 * strategies
20068 *
20069 * @private
20070 * @param {Playlist} playlist
20071 * The playlist that needs a sync-point
20072 * @param {number} duration
20073 * Duration of the MediaSource (Infinity if playing a live source)
20074 * @param {number} currentTimeline
20075 * The last timeline from which a segment was loaded
20076 * @return {Array}
20077 * A list of sync-point objects
20078 */
20079 ;
20080
20081 _proto.runStrategies_ = function runStrategies_(playlist, duration, currentTimeline, currentTime) {
20082 var syncPoints = []; // Try to find a sync-point in by utilizing various strategies...
20083
20084 for (var i = 0; i < syncPointStrategies.length; i++) {
20085 var strategy = syncPointStrategies[i];
20086 var syncPoint = strategy.run(this, playlist, duration, currentTimeline, currentTime);
20087
20088 if (syncPoint) {
20089 syncPoint.strategy = strategy.name;
20090 syncPoints.push({
20091 strategy: strategy.name,
20092 syncPoint: syncPoint
20093 });
20094 }
20095 }
20096
20097 return syncPoints;
20098 }
20099 /**
20100 * Selects the sync-point nearest the specified target
20101 *
20102 * @private
20103 * @param {Array} syncPoints
20104 * List of sync-points to select from
20105 * @param {Object} target
20106 * Object specifying the property and value we are targeting
20107 * @param {string} target.key
20108 * Specifies the property to target. Must be either 'time' or 'segmentIndex'
20109 * @param {number} target.value
20110 * The value to target for the specified key.
20111 * @return {Object}
20112 * The sync-point nearest the target
20113 */
20114 ;
20115
20116 _proto.selectSyncPoint_ = function selectSyncPoint_(syncPoints, target) {
20117 var bestSyncPoint = syncPoints[0].syncPoint;
20118 var bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
20119 var bestStrategy = syncPoints[0].strategy;
20120
20121 for (var i = 1; i < syncPoints.length; i++) {
20122 var newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
20123
20124 if (newDistance < bestDistance) {
20125 bestDistance = newDistance;
20126 bestSyncPoint = syncPoints[i].syncPoint;
20127 bestStrategy = syncPoints[i].strategy;
20128 }
20129 }
20130
20131 this.logger_("syncPoint for [" + target.key + ": " + target.value + "] chosen with strategy" + (" [" + bestStrategy + "]: [time:" + bestSyncPoint.time + ",") + (" segmentIndex:" + bestSyncPoint.segmentIndex) + (typeof bestSyncPoint.partIndex === 'number' ? ",partIndex:" + bestSyncPoint.partIndex : '') + ']');
20132 return bestSyncPoint;
20133 }
20134 /**
20135 * Save any meta-data present on the segments when segments leave
20136 * the live window to the playlist to allow for synchronization at the
20137 * playlist level later.
20138 *
20139 * @param {Playlist} oldPlaylist - The previous active playlist
20140 * @param {Playlist} newPlaylist - The updated and most current playlist
20141 */
20142 ;
20143
20144 _proto.saveExpiredSegmentInfo = function saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
20145 var mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence; // Ignore large media sequence gaps
20146
20147 if (mediaSequenceDiff > MAX_MEDIA_SEQUENCE_DIFF_FOR_SYNC) {
20148 videojs__default["default"].log.warn("Not saving expired segment info. Media sequence gap " + mediaSequenceDiff + " is too large.");
20149 return;
20150 } // When a segment expires from the playlist and it has a start time
20151 // save that information as a possible sync-point reference in future
20152
20153
20154 for (var i = mediaSequenceDiff - 1; i >= 0; i--) {
20155 var lastRemovedSegment = oldPlaylist.segments[i];
20156
20157 if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
20158 newPlaylist.syncInfo = {
20159 mediaSequence: oldPlaylist.mediaSequence + i,
20160 time: lastRemovedSegment.start
20161 };
20162 this.logger_("playlist refresh sync: [time:" + newPlaylist.syncInfo.time + "," + (" mediaSequence: " + newPlaylist.syncInfo.mediaSequence + "]"));
20163 this.trigger('syncinfoupdate');
20164 break;
20165 }
20166 }
20167 }
20168 /**
20169 * Save the mapping from playlist's ProgramDateTime to display. This should only happen
20170 * before segments start to load.
20171 *
20172 * @param {Playlist} playlist - The currently active playlist
20173 */
20174 ;
20175
20176 _proto.setDateTimeMappingForStart = function setDateTimeMappingForStart(playlist) {
20177 // It's possible for the playlist to be updated before playback starts, meaning time
20178 // zero is not yet set. If, during these playlist refreshes, a discontinuity is
20179 // crossed, then the old time zero mapping (for the prior timeline) would be retained
20180 // unless the mappings are cleared.
20181 this.timelineToDatetimeMappings = {};
20182
20183 if (playlist.segments && playlist.segments.length && playlist.segments[0].dateTimeObject) {
20184 var firstSegment = playlist.segments[0];
20185 var playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
20186 this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
20187 }
20188 }
20189 /**
20190 * Calculates and saves timeline mappings, playlist sync info, and segment timing values
20191 * based on the latest timing information.
20192 *
20193 * @param {Object} options
20194 * Options object
20195 * @param {SegmentInfo} options.segmentInfo
20196 * The current active request information
20197 * @param {boolean} options.shouldSaveTimelineMapping
20198 * If there's a timeline change, determines if the timeline mapping should be
20199 * saved for timeline mapping and program date time mappings.
20200 */
20201 ;
20202
20203 _proto.saveSegmentTimingInfo = function saveSegmentTimingInfo(_ref) {
20204 var segmentInfo = _ref.segmentInfo,
20205 shouldSaveTimelineMapping = _ref.shouldSaveTimelineMapping;
20206 var didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(segmentInfo, segmentInfo.timingInfo, shouldSaveTimelineMapping);
20207 var segment = segmentInfo.segment;
20208
20209 if (didCalculateSegmentTimeMapping) {
20210 this.saveDiscontinuitySyncInfo_(segmentInfo); // If the playlist does not have sync information yet, record that information
20211 // now with segment timing information
20212
20213 if (!segmentInfo.playlist.syncInfo) {
20214 segmentInfo.playlist.syncInfo = {
20215 mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
20216 time: segment.start
20217 };
20218 }
20219 }
20220
20221 var dateTime = segment.dateTimeObject;
20222
20223 if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
20224 this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
20225 }
20226 };
20227
20228 _proto.timestampOffsetForTimeline = function timestampOffsetForTimeline(timeline) {
20229 if (typeof this.timelines[timeline] === 'undefined') {
20230 return null;
20231 }
20232
20233 return this.timelines[timeline].time;
20234 };
20235
20236 _proto.mappingForTimeline = function mappingForTimeline(timeline) {
20237 if (typeof this.timelines[timeline] === 'undefined') {
20238 return null;
20239 }
20240
20241 return this.timelines[timeline].mapping;
20242 }
20243 /**
20244 * Use the "media time" for a segment to generate a mapping to "display time" and
20245 * save that display time to the segment.
20246 *
20247 * @private
20248 * @param {SegmentInfo} segmentInfo
20249 * The current active request information
20250 * @param {Object} timingInfo
20251 * The start and end time of the current segment in "media time"
20252 * @param {boolean} shouldSaveTimelineMapping
20253 * If there's a timeline change, determines if the timeline mapping should be
20254 * saved in timelines.
20255 * @return {boolean}
20256 * Returns false if segment time mapping could not be calculated
20257 */
20258 ;
20259
20260 _proto.calculateSegmentTimeMapping_ = function calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
20261 // TODO: remove side effects
20262 var segment = segmentInfo.segment;
20263 var part = segmentInfo.part;
20264 var mappingObj = this.timelines[segmentInfo.timeline];
20265 var start;
20266 var end;
20267
20268 if (typeof segmentInfo.timestampOffset === 'number') {
20269 mappingObj = {
20270 time: segmentInfo.startOfSegment,
20271 mapping: segmentInfo.startOfSegment - timingInfo.start
20272 };
20273
20274 if (shouldSaveTimelineMapping) {
20275 this.timelines[segmentInfo.timeline] = mappingObj;
20276 this.trigger('timestampoffset');
20277 this.logger_("time mapping for timeline " + segmentInfo.timeline + ": " + ("[time: " + mappingObj.time + "] [mapping: " + mappingObj.mapping + "]"));
20278 }
20279
20280 start = segmentInfo.startOfSegment;
20281 end = timingInfo.end + mappingObj.mapping;
20282 } else if (mappingObj) {
20283 start = timingInfo.start + mappingObj.mapping;
20284 end = timingInfo.end + mappingObj.mapping;
20285 } else {
20286 return false;
20287 }
20288
20289 if (part) {
20290 part.start = start;
20291 part.end = end;
20292 } // If we don't have a segment start yet or the start value we got
20293 // is less than our current segment.start value, save a new start value.
20294 // We have to do this because parts will have segment timing info saved
20295 // multiple times and we want segment start to be the earliest part start
20296 // value for that segment.
20297
20298
20299 if (!segment.start || start < segment.start) {
20300 segment.start = start;
20301 }
20302
20303 segment.end = end;
20304 return true;
20305 }
20306 /**
20307 * Each time we have discontinuity in the playlist, attempt to calculate the location
20308 * in display of the start of the discontinuity and save that. We also save an accuracy
20309 * value so that we save values with the most accuracy (closest to 0.)
20310 *
20311 * @private
20312 * @param {SegmentInfo} segmentInfo - The current active request information
20313 */
20314 ;
20315
20316 _proto.saveDiscontinuitySyncInfo_ = function saveDiscontinuitySyncInfo_(segmentInfo) {
20317 var playlist = segmentInfo.playlist;
20318 var segment = segmentInfo.segment; // If the current segment is a discontinuity then we know exactly where
20319 // the start of the range and it's accuracy is 0 (greater accuracy values
20320 // mean more approximation)
20321
20322 if (segment.discontinuity) {
20323 this.discontinuities[segment.timeline] = {
20324 time: segment.start,
20325 accuracy: 0
20326 };
20327 } else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
20328 // Search for future discontinuities that we can provide better timing
20329 // information for and save that information for sync purposes
20330 for (var i = 0; i < playlist.discontinuityStarts.length; i++) {
20331 var segmentIndex = playlist.discontinuityStarts[i];
20332 var discontinuity = playlist.discontinuitySequence + i + 1;
20333 var mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
20334 var accuracy = Math.abs(mediaIndexDiff);
20335
20336 if (!this.discontinuities[discontinuity] || this.discontinuities[discontinuity].accuracy > accuracy) {
20337 var time = void 0;
20338
20339 if (mediaIndexDiff < 0) {
20340 time = segment.start - sumDurations({
20341 defaultDuration: playlist.targetDuration,
20342 durationList: playlist.segments,
20343 startIndex: segmentInfo.mediaIndex,
20344 endIndex: segmentIndex
20345 });
20346 } else {
20347 time = segment.end + sumDurations({
20348 defaultDuration: playlist.targetDuration,
20349 durationList: playlist.segments,
20350 startIndex: segmentInfo.mediaIndex + 1,
20351 endIndex: segmentIndex
20352 });
20353 }
20354
20355 this.discontinuities[discontinuity] = {
20356 time: time,
20357 accuracy: accuracy
20358 };
20359 }
20360 }
20361 }
20362 };
20363
20364 _proto.dispose = function dispose() {
20365 this.trigger('dispose');
20366 this.off();
20367 };
20368
20369 return SyncController;
20370}(videojs__default["default"].EventTarget);
20371
20372/**
20373 * The TimelineChangeController acts as a source for segment loaders to listen for and
20374 * keep track of latest and pending timeline changes. This is useful to ensure proper
20375 * sync, as each loader may need to make a consideration for what timeline the other
20376 * loader is on before making changes which could impact the other loader's media.
20377 *
20378 * @class TimelineChangeController
20379 * @extends videojs.EventTarget
20380 */
20381
20382var TimelineChangeController = /*#__PURE__*/function (_videojs$EventTarget) {
20383 _inheritsLoose__default["default"](TimelineChangeController, _videojs$EventTarget);
20384
20385 function TimelineChangeController() {
20386 var _this;
20387
20388 _this = _videojs$EventTarget.call(this) || this;
20389 _this.pendingTimelineChanges_ = {};
20390 _this.lastTimelineChanges_ = {};
20391 return _this;
20392 }
20393
20394 var _proto = TimelineChangeController.prototype;
20395
20396 _proto.clearPendingTimelineChange = function clearPendingTimelineChange(type) {
20397 this.pendingTimelineChanges_[type] = null;
20398 this.trigger('pendingtimelinechange');
20399 };
20400
20401 _proto.pendingTimelineChange = function pendingTimelineChange(_ref) {
20402 var type = _ref.type,
20403 from = _ref.from,
20404 to = _ref.to;
20405
20406 if (typeof from === 'number' && typeof to === 'number') {
20407 this.pendingTimelineChanges_[type] = {
20408 type: type,
20409 from: from,
20410 to: to
20411 };
20412 this.trigger('pendingtimelinechange');
20413 }
20414
20415 return this.pendingTimelineChanges_[type];
20416 };
20417
20418 _proto.lastTimelineChange = function lastTimelineChange(_ref2) {
20419 var type = _ref2.type,
20420 from = _ref2.from,
20421 to = _ref2.to;
20422
20423 if (typeof from === 'number' && typeof to === 'number') {
20424 this.lastTimelineChanges_[type] = {
20425 type: type,
20426 from: from,
20427 to: to
20428 };
20429 delete this.pendingTimelineChanges_[type];
20430 this.trigger('timelinechange');
20431 }
20432
20433 return this.lastTimelineChanges_[type];
20434 };
20435
20436 _proto.dispose = function dispose() {
20437 this.trigger('dispose');
20438 this.pendingTimelineChanges_ = {};
20439 this.lastTimelineChanges_ = {};
20440 this.off();
20441 };
20442
20443 return TimelineChangeController;
20444}(videojs__default["default"].EventTarget);
20445
20446/* rollup-plugin-worker-factory start for worker!/Users/bcasey/Projects/videojs-http-streaming/src/decrypter-worker.js */
20447var workerCode = transform(getWorkerString(function () {
20448
20449 function createCommonjsModule(fn, basedir, module) {
20450 return module = {
20451 path: basedir,
20452 exports: {},
20453 require: function require(path, base) {
20454 return commonjsRequire(path, base === undefined || base === null ? module.path : base);
20455 }
20456 }, fn(module, module.exports), module.exports;
20457 }
20458
20459 function commonjsRequire() {
20460 throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
20461 }
20462
20463 var createClass = createCommonjsModule(function (module) {
20464 function _defineProperties(target, props) {
20465 for (var i = 0; i < props.length; i++) {
20466 var descriptor = props[i];
20467 descriptor.enumerable = descriptor.enumerable || false;
20468 descriptor.configurable = true;
20469 if ("value" in descriptor) descriptor.writable = true;
20470 Object.defineProperty(target, descriptor.key, descriptor);
20471 }
20472 }
20473
20474 function _createClass(Constructor, protoProps, staticProps) {
20475 if (protoProps) _defineProperties(Constructor.prototype, protoProps);
20476 if (staticProps) _defineProperties(Constructor, staticProps);
20477 return Constructor;
20478 }
20479
20480 module.exports = _createClass;
20481 module.exports["default"] = module.exports, module.exports.__esModule = true;
20482 });
20483 var setPrototypeOf = createCommonjsModule(function (module) {
20484 function _setPrototypeOf(o, p) {
20485 module.exports = _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
20486 o.__proto__ = p;
20487 return o;
20488 };
20489
20490 module.exports["default"] = module.exports, module.exports.__esModule = true;
20491 return _setPrototypeOf(o, p);
20492 }
20493
20494 module.exports = _setPrototypeOf;
20495 module.exports["default"] = module.exports, module.exports.__esModule = true;
20496 });
20497 var inheritsLoose = createCommonjsModule(function (module) {
20498 function _inheritsLoose(subClass, superClass) {
20499 subClass.prototype = Object.create(superClass.prototype);
20500 subClass.prototype.constructor = subClass;
20501 setPrototypeOf(subClass, superClass);
20502 }
20503
20504 module.exports = _inheritsLoose;
20505 module.exports["default"] = module.exports, module.exports.__esModule = true;
20506 });
20507 /**
20508 * @file stream.js
20509 */
20510
20511 /**
20512 * A lightweight readable stream implemention that handles event dispatching.
20513 *
20514 * @class Stream
20515 */
20516
20517 var Stream = /*#__PURE__*/function () {
20518 function Stream() {
20519 this.listeners = {};
20520 }
20521 /**
20522 * Add a listener for a specified event type.
20523 *
20524 * @param {string} type the event name
20525 * @param {Function} listener the callback to be invoked when an event of
20526 * the specified type occurs
20527 */
20528
20529
20530 var _proto = Stream.prototype;
20531
20532 _proto.on = function on(type, listener) {
20533 if (!this.listeners[type]) {
20534 this.listeners[type] = [];
20535 }
20536
20537 this.listeners[type].push(listener);
20538 }
20539 /**
20540 * Remove a listener for a specified event type.
20541 *
20542 * @param {string} type the event name
20543 * @param {Function} listener a function previously registered for this
20544 * type of event through `on`
20545 * @return {boolean} if we could turn it off or not
20546 */
20547 ;
20548
20549 _proto.off = function off(type, listener) {
20550 if (!this.listeners[type]) {
20551 return false;
20552 }
20553
20554 var index = this.listeners[type].indexOf(listener); // TODO: which is better?
20555 // In Video.js we slice listener functions
20556 // on trigger so that it does not mess up the order
20557 // while we loop through.
20558 //
20559 // Here we slice on off so that the loop in trigger
20560 // can continue using it's old reference to loop without
20561 // messing up the order.
20562
20563 this.listeners[type] = this.listeners[type].slice(0);
20564 this.listeners[type].splice(index, 1);
20565 return index > -1;
20566 }
20567 /**
20568 * Trigger an event of the specified type on this stream. Any additional
20569 * arguments to this function are passed as parameters to event listeners.
20570 *
20571 * @param {string} type the event name
20572 */
20573 ;
20574
20575 _proto.trigger = function trigger(type) {
20576 var callbacks = this.listeners[type];
20577
20578 if (!callbacks) {
20579 return;
20580 } // Slicing the arguments on every invocation of this method
20581 // can add a significant amount of overhead. Avoid the
20582 // intermediate object creation for the common case of a
20583 // single callback argument
20584
20585
20586 if (arguments.length === 2) {
20587 var length = callbacks.length;
20588
20589 for (var i = 0; i < length; ++i) {
20590 callbacks[i].call(this, arguments[1]);
20591 }
20592 } else {
20593 var args = Array.prototype.slice.call(arguments, 1);
20594 var _length = callbacks.length;
20595
20596 for (var _i = 0; _i < _length; ++_i) {
20597 callbacks[_i].apply(this, args);
20598 }
20599 }
20600 }
20601 /**
20602 * Destroys the stream and cleans up.
20603 */
20604 ;
20605
20606 _proto.dispose = function dispose() {
20607 this.listeners = {};
20608 }
20609 /**
20610 * Forwards all `data` events on this stream to the destination stream. The
20611 * destination stream should provide a method `push` to receive the data
20612 * events as they arrive.
20613 *
20614 * @param {Stream} destination the stream that will receive all `data` events
20615 * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
20616 */
20617 ;
20618
20619 _proto.pipe = function pipe(destination) {
20620 this.on('data', function (data) {
20621 destination.push(data);
20622 });
20623 };
20624
20625 return Stream;
20626 }();
20627 /*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
20628
20629 /**
20630 * Returns the subarray of a Uint8Array without PKCS#7 padding.
20631 *
20632 * @param padded {Uint8Array} unencrypted bytes that have been padded
20633 * @return {Uint8Array} the unpadded bytes
20634 * @see http://tools.ietf.org/html/rfc5652
20635 */
20636
20637
20638 function unpad(padded) {
20639 return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
20640 }
20641 /*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
20642
20643 /**
20644 * @file aes.js
20645 *
20646 * This file contains an adaptation of the AES decryption algorithm
20647 * from the Standford Javascript Cryptography Library. That work is
20648 * covered by the following copyright and permissions notice:
20649 *
20650 * Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
20651 * All rights reserved.
20652 *
20653 * Redistribution and use in source and binary forms, with or without
20654 * modification, are permitted provided that the following conditions are
20655 * met:
20656 *
20657 * 1. Redistributions of source code must retain the above copyright
20658 * notice, this list of conditions and the following disclaimer.
20659 *
20660 * 2. Redistributions in binary form must reproduce the above
20661 * copyright notice, this list of conditions and the following
20662 * disclaimer in the documentation and/or other materials provided
20663 * with the distribution.
20664 *
20665 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
20666 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20667 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20668 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
20669 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20670 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
20671 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
20672 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
20673 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
20674 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
20675 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
20676 *
20677 * The views and conclusions contained in the software and documentation
20678 * are those of the authors and should not be interpreted as representing
20679 * official policies, either expressed or implied, of the authors.
20680 */
20681
20682 /**
20683 * Expand the S-box tables.
20684 *
20685 * @private
20686 */
20687
20688
20689 var precompute = function precompute() {
20690 var tables = [[[], [], [], [], []], [[], [], [], [], []]];
20691 var encTable = tables[0];
20692 var decTable = tables[1];
20693 var sbox = encTable[4];
20694 var sboxInv = decTable[4];
20695 var i;
20696 var x;
20697 var xInv;
20698 var d = [];
20699 var th = [];
20700 var x2;
20701 var x4;
20702 var x8;
20703 var s;
20704 var tEnc;
20705 var tDec; // Compute double and third tables
20706
20707 for (i = 0; i < 256; i++) {
20708 th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
20709 }
20710
20711 for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
20712 // Compute sbox
20713 s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
20714 s = s >> 8 ^ s & 255 ^ 99;
20715 sbox[x] = s;
20716 sboxInv[s] = x; // Compute MixColumns
20717
20718 x8 = d[x4 = d[x2 = d[x]]];
20719 tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
20720 tEnc = d[s] * 0x101 ^ s * 0x1010100;
20721
20722 for (i = 0; i < 4; i++) {
20723 encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
20724 decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
20725 }
20726 } // Compactify. Considerable speedup on Firefox.
20727
20728
20729 for (i = 0; i < 5; i++) {
20730 encTable[i] = encTable[i].slice(0);
20731 decTable[i] = decTable[i].slice(0);
20732 }
20733
20734 return tables;
20735 };
20736
20737 var aesTables = null;
20738 /**
20739 * Schedule out an AES key for both encryption and decryption. This
20740 * is a low-level class. Use a cipher mode to do bulk encryption.
20741 *
20742 * @class AES
20743 * @param key {Array} The key as an array of 4, 6 or 8 words.
20744 */
20745
20746 var AES = /*#__PURE__*/function () {
20747 function AES(key) {
20748 /**
20749 * The expanded S-box and inverse S-box tables. These will be computed
20750 * on the client so that we don't have to send them down the wire.
20751 *
20752 * There are two tables, _tables[0] is for encryption and
20753 * _tables[1] is for decryption.
20754 *
20755 * The first 4 sub-tables are the expanded S-box with MixColumns. The
20756 * last (_tables[01][4]) is the S-box itself.
20757 *
20758 * @private
20759 */
20760 // if we have yet to precompute the S-box tables
20761 // do so now
20762 if (!aesTables) {
20763 aesTables = precompute();
20764 } // then make a copy of that object for use
20765
20766
20767 this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
20768 var i;
20769 var j;
20770 var tmp;
20771 var sbox = this._tables[0][4];
20772 var decTable = this._tables[1];
20773 var keyLen = key.length;
20774 var rcon = 1;
20775
20776 if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
20777 throw new Error('Invalid aes key size');
20778 }
20779
20780 var encKey = key.slice(0);
20781 var decKey = [];
20782 this._key = [encKey, decKey]; // schedule encryption keys
20783
20784 for (i = keyLen; i < 4 * keyLen + 28; i++) {
20785 tmp = encKey[i - 1]; // apply sbox
20786
20787 if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
20788 tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
20789
20790 if (i % keyLen === 0) {
20791 tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
20792 rcon = rcon << 1 ^ (rcon >> 7) * 283;
20793 }
20794 }
20795
20796 encKey[i] = encKey[i - keyLen] ^ tmp;
20797 } // schedule decryption keys
20798
20799
20800 for (j = 0; i; j++, i--) {
20801 tmp = encKey[j & 3 ? i : i - 4];
20802
20803 if (i <= 4 || j < 4) {
20804 decKey[j] = tmp;
20805 } else {
20806 decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
20807 }
20808 }
20809 }
20810 /**
20811 * Decrypt 16 bytes, specified as four 32-bit words.
20812 *
20813 * @param {number} encrypted0 the first word to decrypt
20814 * @param {number} encrypted1 the second word to decrypt
20815 * @param {number} encrypted2 the third word to decrypt
20816 * @param {number} encrypted3 the fourth word to decrypt
20817 * @param {Int32Array} out the array to write the decrypted words
20818 * into
20819 * @param {number} offset the offset into the output array to start
20820 * writing results
20821 * @return {Array} The plaintext.
20822 */
20823
20824
20825 var _proto = AES.prototype;
20826
20827 _proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
20828 var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
20829
20830 var a = encrypted0 ^ key[0];
20831 var b = encrypted3 ^ key[1];
20832 var c = encrypted2 ^ key[2];
20833 var d = encrypted1 ^ key[3];
20834 var a2;
20835 var b2;
20836 var c2; // key.length === 2 ?
20837
20838 var nInnerRounds = key.length / 4 - 2;
20839 var i;
20840 var kIndex = 4;
20841 var table = this._tables[1]; // load up the tables
20842
20843 var table0 = table[0];
20844 var table1 = table[1];
20845 var table2 = table[2];
20846 var table3 = table[3];
20847 var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
20848
20849 for (i = 0; i < nInnerRounds; i++) {
20850 a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
20851 b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
20852 c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
20853 d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
20854 kIndex += 4;
20855 a = a2;
20856 b = b2;
20857 c = c2;
20858 } // Last round.
20859
20860
20861 for (i = 0; i < 4; i++) {
20862 out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
20863 a2 = a;
20864 a = b;
20865 b = c;
20866 c = d;
20867 d = a2;
20868 }
20869 };
20870
20871 return AES;
20872 }();
20873 /**
20874 * A wrapper around the Stream class to use setTimeout
20875 * and run stream "jobs" Asynchronously
20876 *
20877 * @class AsyncStream
20878 * @extends Stream
20879 */
20880
20881
20882 var AsyncStream = /*#__PURE__*/function (_Stream) {
20883 inheritsLoose(AsyncStream, _Stream);
20884
20885 function AsyncStream() {
20886 var _this;
20887
20888 _this = _Stream.call(this, Stream) || this;
20889 _this.jobs = [];
20890 _this.delay = 1;
20891 _this.timeout_ = null;
20892 return _this;
20893 }
20894 /**
20895 * process an async job
20896 *
20897 * @private
20898 */
20899
20900
20901 var _proto = AsyncStream.prototype;
20902
20903 _proto.processJob_ = function processJob_() {
20904 this.jobs.shift()();
20905
20906 if (this.jobs.length) {
20907 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
20908 } else {
20909 this.timeout_ = null;
20910 }
20911 }
20912 /**
20913 * push a job into the stream
20914 *
20915 * @param {Function} job the job to push into the stream
20916 */
20917 ;
20918
20919 _proto.push = function push(job) {
20920 this.jobs.push(job);
20921
20922 if (!this.timeout_) {
20923 this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
20924 }
20925 };
20926
20927 return AsyncStream;
20928 }(Stream);
20929 /**
20930 * Convert network-order (big-endian) bytes into their little-endian
20931 * representation.
20932 */
20933
20934
20935 var ntoh = function ntoh(word) {
20936 return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
20937 };
20938 /**
20939 * Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
20940 *
20941 * @param {Uint8Array} encrypted the encrypted bytes
20942 * @param {Uint32Array} key the bytes of the decryption key
20943 * @param {Uint32Array} initVector the initialization vector (IV) to
20944 * use for the first round of CBC.
20945 * @return {Uint8Array} the decrypted bytes
20946 *
20947 * @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
20948 * @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
20949 * @see https://tools.ietf.org/html/rfc2315
20950 */
20951
20952
20953 var decrypt = function decrypt(encrypted, key, initVector) {
20954 // word-level access to the encrypted bytes
20955 var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
20956 var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
20957
20958 var decrypted = new Uint8Array(encrypted.byteLength);
20959 var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
20960 // decrypted data
20961
20962 var init0;
20963 var init1;
20964 var init2;
20965 var init3;
20966 var encrypted0;
20967 var encrypted1;
20968 var encrypted2;
20969 var encrypted3; // iteration variable
20970
20971 var wordIx; // pull out the words of the IV to ensure we don't modify the
20972 // passed-in reference and easier access
20973
20974 init0 = initVector[0];
20975 init1 = initVector[1];
20976 init2 = initVector[2];
20977 init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
20978 // to each decrypted block
20979
20980 for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
20981 // convert big-endian (network order) words into little-endian
20982 // (javascript order)
20983 encrypted0 = ntoh(encrypted32[wordIx]);
20984 encrypted1 = ntoh(encrypted32[wordIx + 1]);
20985 encrypted2 = ntoh(encrypted32[wordIx + 2]);
20986 encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
20987
20988 decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
20989 // plaintext
20990
20991 decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
20992 decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
20993 decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
20994 decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
20995
20996 init0 = encrypted0;
20997 init1 = encrypted1;
20998 init2 = encrypted2;
20999 init3 = encrypted3;
21000 }
21001
21002 return decrypted;
21003 };
21004 /**
21005 * The `Decrypter` class that manages decryption of AES
21006 * data through `AsyncStream` objects and the `decrypt`
21007 * function
21008 *
21009 * @param {Uint8Array} encrypted the encrypted bytes
21010 * @param {Uint32Array} key the bytes of the decryption key
21011 * @param {Uint32Array} initVector the initialization vector (IV) to
21012 * @param {Function} done the function to run when done
21013 * @class Decrypter
21014 */
21015
21016
21017 var Decrypter = /*#__PURE__*/function () {
21018 function Decrypter(encrypted, key, initVector, done) {
21019 var step = Decrypter.STEP;
21020 var encrypted32 = new Int32Array(encrypted.buffer);
21021 var decrypted = new Uint8Array(encrypted.byteLength);
21022 var i = 0;
21023 this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
21024
21025 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21026
21027 for (i = step; i < encrypted32.length; i += step) {
21028 initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
21029 this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
21030 } // invoke the done() callback when everything is finished
21031
21032
21033 this.asyncStream_.push(function () {
21034 // remove pkcs#7 padding from the decrypted bytes
21035 done(null, unpad(decrypted));
21036 });
21037 }
21038 /**
21039 * a getter for step the maximum number of bytes to process at one time
21040 *
21041 * @return {number} the value of step 32000
21042 */
21043
21044
21045 var _proto = Decrypter.prototype;
21046 /**
21047 * @private
21048 */
21049
21050 _proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
21051 return function () {
21052 var bytes = decrypt(encrypted, key, initVector);
21053 decrypted.set(bytes, encrypted.byteOffset);
21054 };
21055 };
21056
21057 createClass(Decrypter, null, [{
21058 key: "STEP",
21059 get: function get() {
21060 // 4 * 8000;
21061 return 32000;
21062 }
21063 }]);
21064 return Decrypter;
21065 }();
21066 /**
21067 * @file bin-utils.js
21068 */
21069
21070 /**
21071 * Creates an object for sending to a web worker modifying properties that are TypedArrays
21072 * into a new object with seperated properties for the buffer, byteOffset, and byteLength.
21073 *
21074 * @param {Object} message
21075 * Object of properties and values to send to the web worker
21076 * @return {Object}
21077 * Modified message with TypedArray values expanded
21078 * @function createTransferableMessage
21079 */
21080
21081
21082 var createTransferableMessage = function createTransferableMessage(message) {
21083 var transferable = {};
21084 Object.keys(message).forEach(function (key) {
21085 var value = message[key];
21086
21087 if (ArrayBuffer.isView(value)) {
21088 transferable[key] = {
21089 bytes: value.buffer,
21090 byteOffset: value.byteOffset,
21091 byteLength: value.byteLength
21092 };
21093 } else {
21094 transferable[key] = value;
21095 }
21096 });
21097 return transferable;
21098 };
21099 /* global self */
21100
21101 /**
21102 * Our web worker interface so that things can talk to aes-decrypter
21103 * that will be running in a web worker. the scope is passed to this by
21104 * webworkify.
21105 */
21106
21107
21108 self.onmessage = function (event) {
21109 var data = event.data;
21110 var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
21111 var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
21112 var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
21113 /* eslint-disable no-new, handle-callback-err */
21114
21115 new Decrypter(encrypted, key, iv, function (err, bytes) {
21116 self.postMessage(createTransferableMessage({
21117 source: data.source,
21118 decrypted: bytes
21119 }), [bytes.buffer]);
21120 });
21121 /* eslint-enable */
21122 };
21123}));
21124var Decrypter = factory(workerCode);
21125/* rollup-plugin-worker-factory end for worker!/Users/bcasey/Projects/videojs-http-streaming/src/decrypter-worker.js */
21126
21127/**
21128 * Convert the properties of an HLS track into an audioTrackKind.
21129 *
21130 * @private
21131 */
21132
21133var audioTrackKind_ = function audioTrackKind_(properties) {
21134 var kind = properties.default ? 'main' : 'alternative';
21135
21136 if (properties.characteristics && properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
21137 kind = 'main-desc';
21138 }
21139
21140 return kind;
21141};
21142/**
21143 * Pause provided segment loader and playlist loader if active
21144 *
21145 * @param {SegmentLoader} segmentLoader
21146 * SegmentLoader to pause
21147 * @param {Object} mediaType
21148 * Active media type
21149 * @function stopLoaders
21150 */
21151
21152
21153var stopLoaders = function stopLoaders(segmentLoader, mediaType) {
21154 segmentLoader.abort();
21155 segmentLoader.pause();
21156
21157 if (mediaType && mediaType.activePlaylistLoader) {
21158 mediaType.activePlaylistLoader.pause();
21159 mediaType.activePlaylistLoader = null;
21160 }
21161};
21162/**
21163 * Start loading provided segment loader and playlist loader
21164 *
21165 * @param {PlaylistLoader} playlistLoader
21166 * PlaylistLoader to start loading
21167 * @param {Object} mediaType
21168 * Active media type
21169 * @function startLoaders
21170 */
21171
21172var startLoaders = function startLoaders(playlistLoader, mediaType) {
21173 // Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
21174 // playlist loader
21175 mediaType.activePlaylistLoader = playlistLoader;
21176 playlistLoader.load();
21177};
21178/**
21179 * Returns a function to be called when the media group changes. It performs a
21180 * non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
21181 * change of group is merely a rendition switch of the same content at another encoding,
21182 * rather than a change of content, such as switching audio from English to Spanish.
21183 *
21184 * @param {string} type
21185 * MediaGroup type
21186 * @param {Object} settings
21187 * Object containing required information for media groups
21188 * @return {Function}
21189 * Handler for a non-destructive resync of SegmentLoader when the active media
21190 * group changes.
21191 * @function onGroupChanged
21192 */
21193
21194var onGroupChanged = function onGroupChanged(type, settings) {
21195 return function () {
21196 var _settings$segmentLoad = settings.segmentLoaders,
21197 segmentLoader = _settings$segmentLoad[type],
21198 mainSegmentLoader = _settings$segmentLoad.main,
21199 mediaType = settings.mediaTypes[type];
21200 var activeTrack = mediaType.activeTrack();
21201 var activeGroup = mediaType.getActiveGroup();
21202 var previousActiveLoader = mediaType.activePlaylistLoader;
21203 var lastGroup = mediaType.lastGroup_; // the group did not change do nothing
21204
21205 if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
21206 return;
21207 }
21208
21209 mediaType.lastGroup_ = activeGroup;
21210 mediaType.lastTrack_ = activeTrack;
21211 stopLoaders(segmentLoader, mediaType);
21212
21213 if (!activeGroup || activeGroup.isMasterPlaylist) {
21214 // there is no group active or active group is a main playlist and won't change
21215 return;
21216 }
21217
21218 if (!activeGroup.playlistLoader) {
21219 if (previousActiveLoader) {
21220 // The previous group had a playlist loader but the new active group does not
21221 // this means we are switching from demuxed to muxed audio. In this case we want to
21222 // do a destructive reset of the main segment loader and not restart the audio
21223 // loaders.
21224 mainSegmentLoader.resetEverything();
21225 }
21226
21227 return;
21228 } // Non-destructive resync
21229
21230
21231 segmentLoader.resyncLoader();
21232 startLoaders(activeGroup.playlistLoader, mediaType);
21233 };
21234};
21235var onGroupChanging = function onGroupChanging(type, settings) {
21236 return function () {
21237 var segmentLoader = settings.segmentLoaders[type],
21238 mediaType = settings.mediaTypes[type];
21239 mediaType.lastGroup_ = null;
21240 segmentLoader.abort();
21241 segmentLoader.pause();
21242 };
21243};
21244/**
21245 * Returns a function to be called when the media track changes. It performs a
21246 * destructive reset of the SegmentLoader to ensure we start loading as close to
21247 * currentTime as possible.
21248 *
21249 * @param {string} type
21250 * MediaGroup type
21251 * @param {Object} settings
21252 * Object containing required information for media groups
21253 * @return {Function}
21254 * Handler for a destructive reset of SegmentLoader when the active media
21255 * track changes.
21256 * @function onTrackChanged
21257 */
21258
21259var onTrackChanged = function onTrackChanged(type, settings) {
21260 return function () {
21261 var masterPlaylistLoader = settings.masterPlaylistLoader,
21262 _settings$segmentLoad2 = settings.segmentLoaders,
21263 segmentLoader = _settings$segmentLoad2[type],
21264 mainSegmentLoader = _settings$segmentLoad2.main,
21265 mediaType = settings.mediaTypes[type];
21266 var activeTrack = mediaType.activeTrack();
21267 var activeGroup = mediaType.getActiveGroup();
21268 var previousActiveLoader = mediaType.activePlaylistLoader;
21269 var lastTrack = mediaType.lastTrack_; // track did not change, do nothing
21270
21271 if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
21272 return;
21273 }
21274
21275 mediaType.lastGroup_ = activeGroup;
21276 mediaType.lastTrack_ = activeTrack;
21277 stopLoaders(segmentLoader, mediaType);
21278
21279 if (!activeGroup) {
21280 // there is no group active so we do not want to restart loaders
21281 return;
21282 }
21283
21284 if (activeGroup.isMasterPlaylist) {
21285 // track did not change, do nothing
21286 if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
21287 return;
21288 }
21289
21290 var mpc = settings.vhs.masterPlaylistController_;
21291 var newPlaylist = mpc.selectPlaylist(); // media will not change do nothing
21292
21293 if (mpc.media() === newPlaylist) {
21294 return;
21295 }
21296
21297 mediaType.logger_("track change. Switching master audio from " + lastTrack.id + " to " + activeTrack.id);
21298 masterPlaylistLoader.pause();
21299 mainSegmentLoader.resetEverything();
21300 mpc.fastQualityChange_(newPlaylist);
21301 return;
21302 }
21303
21304 if (type === 'AUDIO') {
21305 if (!activeGroup.playlistLoader) {
21306 // when switching from demuxed audio/video to muxed audio/video (noted by no
21307 // playlist loader for the audio group), we want to do a destructive reset of the
21308 // main segment loader and not restart the audio loaders
21309 mainSegmentLoader.setAudio(true); // don't have to worry about disabling the audio of the audio segment loader since
21310 // it should be stopped
21311
21312 mainSegmentLoader.resetEverything();
21313 return;
21314 } // although the segment loader is an audio segment loader, call the setAudio
21315 // function to ensure it is prepared to re-append the init segment (or handle other
21316 // config changes)
21317
21318
21319 segmentLoader.setAudio(true);
21320 mainSegmentLoader.setAudio(false);
21321 }
21322
21323 if (previousActiveLoader === activeGroup.playlistLoader) {
21324 // Nothing has actually changed. This can happen because track change events can fire
21325 // multiple times for a "single" change. One for enabling the new active track, and
21326 // one for disabling the track that was active
21327 startLoaders(activeGroup.playlistLoader, mediaType);
21328 return;
21329 }
21330
21331 if (segmentLoader.track) {
21332 // For WebVTT, set the new text track in the segmentloader
21333 segmentLoader.track(activeTrack);
21334 } // destructive reset
21335
21336
21337 segmentLoader.resetEverything();
21338 startLoaders(activeGroup.playlistLoader, mediaType);
21339 };
21340};
21341var onError = {
21342 /**
21343 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21344 * an error.
21345 *
21346 * @param {string} type
21347 * MediaGroup type
21348 * @param {Object} settings
21349 * Object containing required information for media groups
21350 * @return {Function}
21351 * Error handler. Logs warning (or error if the playlist is blacklisted) to
21352 * console and switches back to default audio track.
21353 * @function onError.AUDIO
21354 */
21355 AUDIO: function AUDIO(type, settings) {
21356 return function () {
21357 var segmentLoader = settings.segmentLoaders[type],
21358 mediaType = settings.mediaTypes[type],
21359 blacklistCurrentPlaylist = settings.blacklistCurrentPlaylist;
21360 stopLoaders(segmentLoader, mediaType); // switch back to default audio track
21361
21362 var activeTrack = mediaType.activeTrack();
21363 var activeGroup = mediaType.activeGroup();
21364 var id = (activeGroup.filter(function (group) {
21365 return group.default;
21366 })[0] || activeGroup[0]).id;
21367 var defaultTrack = mediaType.tracks[id];
21368
21369 if (activeTrack === defaultTrack) {
21370 // Default track encountered an error. All we can do now is blacklist the current
21371 // rendition and hope another will switch audio groups
21372 blacklistCurrentPlaylist({
21373 message: 'Problem encountered loading the default audio track.'
21374 });
21375 return;
21376 }
21377
21378 videojs__default["default"].log.warn('Problem encountered loading the alternate audio track.' + 'Switching back to default.');
21379
21380 for (var trackId in mediaType.tracks) {
21381 mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
21382 }
21383
21384 mediaType.onTrackChanged();
21385 };
21386 },
21387
21388 /**
21389 * Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
21390 * an error.
21391 *
21392 * @param {string} type
21393 * MediaGroup type
21394 * @param {Object} settings
21395 * Object containing required information for media groups
21396 * @return {Function}
21397 * Error handler. Logs warning to console and disables the active subtitle track
21398 * @function onError.SUBTITLES
21399 */
21400 SUBTITLES: function SUBTITLES(type, settings) {
21401 return function () {
21402 var segmentLoader = settings.segmentLoaders[type],
21403 mediaType = settings.mediaTypes[type];
21404 videojs__default["default"].log.warn('Problem encountered loading the subtitle track.' + 'Disabling subtitle track.');
21405 stopLoaders(segmentLoader, mediaType);
21406 var track = mediaType.activeTrack();
21407
21408 if (track) {
21409 track.mode = 'disabled';
21410 }
21411
21412 mediaType.onTrackChanged();
21413 };
21414 }
21415};
21416var setupListeners = {
21417 /**
21418 * Setup event listeners for audio playlist loader
21419 *
21420 * @param {string} type
21421 * MediaGroup type
21422 * @param {PlaylistLoader|null} playlistLoader
21423 * PlaylistLoader to register listeners on
21424 * @param {Object} settings
21425 * Object containing required information for media groups
21426 * @function setupListeners.AUDIO
21427 */
21428 AUDIO: function AUDIO(type, playlistLoader, settings) {
21429 if (!playlistLoader) {
21430 // no playlist loader means audio will be muxed with the video
21431 return;
21432 }
21433
21434 var tech = settings.tech,
21435 requestOptions = settings.requestOptions,
21436 segmentLoader = settings.segmentLoaders[type];
21437 playlistLoader.on('loadedmetadata', function () {
21438 var media = playlistLoader.media();
21439 segmentLoader.playlist(media, requestOptions); // if the video is already playing, or if this isn't a live video and preload
21440 // permits, start downloading segments
21441
21442 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21443 segmentLoader.load();
21444 }
21445 });
21446 playlistLoader.on('loadedplaylist', function () {
21447 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21448
21449 if (!tech.paused()) {
21450 segmentLoader.load();
21451 }
21452 });
21453 playlistLoader.on('error', onError[type](type, settings));
21454 },
21455
21456 /**
21457 * Setup event listeners for subtitle playlist loader
21458 *
21459 * @param {string} type
21460 * MediaGroup type
21461 * @param {PlaylistLoader|null} playlistLoader
21462 * PlaylistLoader to register listeners on
21463 * @param {Object} settings
21464 * Object containing required information for media groups
21465 * @function setupListeners.SUBTITLES
21466 */
21467 SUBTITLES: function SUBTITLES(type, playlistLoader, settings) {
21468 var tech = settings.tech,
21469 requestOptions = settings.requestOptions,
21470 segmentLoader = settings.segmentLoaders[type],
21471 mediaType = settings.mediaTypes[type];
21472 playlistLoader.on('loadedmetadata', function () {
21473 var media = playlistLoader.media();
21474 segmentLoader.playlist(media, requestOptions);
21475 segmentLoader.track(mediaType.activeTrack()); // if the video is already playing, or if this isn't a live video and preload
21476 // permits, start downloading segments
21477
21478 if (!tech.paused() || media.endList && tech.preload() !== 'none') {
21479 segmentLoader.load();
21480 }
21481 });
21482 playlistLoader.on('loadedplaylist', function () {
21483 segmentLoader.playlist(playlistLoader.media(), requestOptions); // If the player isn't paused, ensure that the segment loader is running
21484
21485 if (!tech.paused()) {
21486 segmentLoader.load();
21487 }
21488 });
21489 playlistLoader.on('error', onError[type](type, settings));
21490 }
21491};
21492var initialize = {
21493 /**
21494 * Setup PlaylistLoaders and AudioTracks for the audio groups
21495 *
21496 * @param {string} type
21497 * MediaGroup type
21498 * @param {Object} settings
21499 * Object containing required information for media groups
21500 * @function initialize.AUDIO
21501 */
21502 'AUDIO': function AUDIO(type, settings) {
21503 var vhs = settings.vhs,
21504 sourceType = settings.sourceType,
21505 segmentLoader = settings.segmentLoaders[type],
21506 requestOptions = settings.requestOptions,
21507 mediaGroups = settings.master.mediaGroups,
21508 _settings$mediaTypes$ = settings.mediaTypes[type],
21509 groups = _settings$mediaTypes$.groups,
21510 tracks = _settings$mediaTypes$.tracks,
21511 logger_ = _settings$mediaTypes$.logger_,
21512 masterPlaylistLoader = settings.masterPlaylistLoader;
21513 var audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master); // force a default if we have none
21514
21515 if (!mediaGroups[type] || Object.keys(mediaGroups[type]).length === 0) {
21516 mediaGroups[type] = {
21517 main: {
21518 default: {
21519 default: true
21520 }
21521 }
21522 };
21523
21524 if (audioOnlyMaster) {
21525 mediaGroups[type].main.default.playlists = masterPlaylistLoader.master.playlists;
21526 }
21527 }
21528
21529 for (var groupId in mediaGroups[type]) {
21530 if (!groups[groupId]) {
21531 groups[groupId] = [];
21532 }
21533
21534 for (var variantLabel in mediaGroups[type][groupId]) {
21535 var properties = mediaGroups[type][groupId][variantLabel];
21536 var playlistLoader = void 0;
21537
21538 if (audioOnlyMaster) {
21539 logger_("AUDIO group '" + groupId + "' label '" + variantLabel + "' is a master playlist");
21540 properties.isMasterPlaylist = true;
21541 playlistLoader = null; // if vhs-json was provided as the source, and the media playlist was resolved,
21542 // use the resolved media playlist object
21543 } else if (sourceType === 'vhs-json' && properties.playlists) {
21544 playlistLoader = new PlaylistLoader(properties.playlists[0], vhs, requestOptions);
21545 } else if (properties.resolvedUri) {
21546 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions); // TODO: dash isn't the only type with properties.playlists
21547 // should we even have properties.playlists in this check.
21548 } else if (properties.playlists && sourceType === 'dash') {
21549 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21550 } else {
21551 // no resolvedUri means the audio is muxed with the video when using this
21552 // audio track
21553 playlistLoader = null;
21554 }
21555
21556 properties = videojs__default["default"].mergeOptions({
21557 id: variantLabel,
21558 playlistLoader: playlistLoader
21559 }, properties);
21560 setupListeners[type](type, properties.playlistLoader, settings);
21561 groups[groupId].push(properties);
21562
21563 if (typeof tracks[variantLabel] === 'undefined') {
21564 var track = new videojs__default["default"].AudioTrack({
21565 id: variantLabel,
21566 kind: audioTrackKind_(properties),
21567 enabled: false,
21568 language: properties.language,
21569 default: properties.default,
21570 label: variantLabel
21571 });
21572 tracks[variantLabel] = track;
21573 }
21574 }
21575 } // setup single error event handler for the segment loader
21576
21577
21578 segmentLoader.on('error', onError[type](type, settings));
21579 },
21580
21581 /**
21582 * Setup PlaylistLoaders and TextTracks for the subtitle groups
21583 *
21584 * @param {string} type
21585 * MediaGroup type
21586 * @param {Object} settings
21587 * Object containing required information for media groups
21588 * @function initialize.SUBTITLES
21589 */
21590 'SUBTITLES': function SUBTITLES(type, settings) {
21591 var tech = settings.tech,
21592 vhs = settings.vhs,
21593 sourceType = settings.sourceType,
21594 segmentLoader = settings.segmentLoaders[type],
21595 requestOptions = settings.requestOptions,
21596 mediaGroups = settings.master.mediaGroups,
21597 _settings$mediaTypes$2 = settings.mediaTypes[type],
21598 groups = _settings$mediaTypes$2.groups,
21599 tracks = _settings$mediaTypes$2.tracks,
21600 masterPlaylistLoader = settings.masterPlaylistLoader;
21601
21602 for (var groupId in mediaGroups[type]) {
21603 if (!groups[groupId]) {
21604 groups[groupId] = [];
21605 }
21606
21607 for (var variantLabel in mediaGroups[type][groupId]) {
21608 if (mediaGroups[type][groupId][variantLabel].forced) {
21609 // Subtitle playlists with the forced attribute are not selectable in Safari.
21610 // According to Apple's HLS Authoring Specification:
21611 // If content has forced subtitles and regular subtitles in a given language,
21612 // the regular subtitles track in that language MUST contain both the forced
21613 // subtitles and the regular subtitles for that language.
21614 // Because of this requirement and that Safari does not add forced subtitles,
21615 // forced subtitles are skipped here to maintain consistent experience across
21616 // all platforms
21617 continue;
21618 }
21619
21620 var properties = mediaGroups[type][groupId][variantLabel];
21621 var playlistLoader = void 0;
21622
21623 if (sourceType === 'hls') {
21624 playlistLoader = new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
21625 } else if (sourceType === 'dash') {
21626 var playlists = properties.playlists.filter(function (p) {
21627 return p.excludeUntil !== Infinity;
21628 });
21629
21630 if (!playlists.length) {
21631 return;
21632 }
21633
21634 playlistLoader = new DashPlaylistLoader(properties.playlists[0], vhs, requestOptions, masterPlaylistLoader);
21635 } else if (sourceType === 'vhs-json') {
21636 playlistLoader = new PlaylistLoader( // if the vhs-json object included the media playlist, use the media playlist
21637 // as provided, otherwise use the resolved URI to load the playlist
21638 properties.playlists ? properties.playlists[0] : properties.resolvedUri, vhs, requestOptions);
21639 }
21640
21641 properties = videojs__default["default"].mergeOptions({
21642 id: variantLabel,
21643 playlistLoader: playlistLoader
21644 }, properties);
21645 setupListeners[type](type, properties.playlistLoader, settings);
21646 groups[groupId].push(properties);
21647
21648 if (typeof tracks[variantLabel] === 'undefined') {
21649 var track = tech.addRemoteTextTrack({
21650 id: variantLabel,
21651 kind: 'subtitles',
21652 default: properties.default && properties.autoselect,
21653 language: properties.language,
21654 label: variantLabel
21655 }, false).track;
21656 tracks[variantLabel] = track;
21657 }
21658 }
21659 } // setup single error event handler for the segment loader
21660
21661
21662 segmentLoader.on('error', onError[type](type, settings));
21663 },
21664
21665 /**
21666 * Setup TextTracks for the closed-caption groups
21667 *
21668 * @param {String} type
21669 * MediaGroup type
21670 * @param {Object} settings
21671 * Object containing required information for media groups
21672 * @function initialize['CLOSED-CAPTIONS']
21673 */
21674 'CLOSED-CAPTIONS': function CLOSEDCAPTIONS(type, settings) {
21675 var tech = settings.tech,
21676 mediaGroups = settings.master.mediaGroups,
21677 _settings$mediaTypes$3 = settings.mediaTypes[type],
21678 groups = _settings$mediaTypes$3.groups,
21679 tracks = _settings$mediaTypes$3.tracks;
21680
21681 for (var groupId in mediaGroups[type]) {
21682 if (!groups[groupId]) {
21683 groups[groupId] = [];
21684 }
21685
21686 for (var variantLabel in mediaGroups[type][groupId]) {
21687 var properties = mediaGroups[type][groupId][variantLabel]; // Look for either 608 (CCn) or 708 (SERVICEn) caption services
21688
21689 if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
21690 continue;
21691 }
21692
21693 var captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
21694 var newProps = {
21695 label: variantLabel,
21696 language: properties.language,
21697 instreamId: properties.instreamId,
21698 default: properties.default && properties.autoselect
21699 };
21700
21701 if (captionServices[newProps.instreamId]) {
21702 newProps = videojs__default["default"].mergeOptions(newProps, captionServices[newProps.instreamId]);
21703 }
21704
21705 if (newProps.default === undefined) {
21706 delete newProps.default;
21707 } // No PlaylistLoader is required for Closed-Captions because the captions are
21708 // embedded within the video stream
21709
21710
21711 groups[groupId].push(videojs__default["default"].mergeOptions({
21712 id: variantLabel
21713 }, properties));
21714
21715 if (typeof tracks[variantLabel] === 'undefined') {
21716 var track = tech.addRemoteTextTrack({
21717 id: newProps.instreamId,
21718 kind: 'captions',
21719 default: newProps.default,
21720 language: newProps.language,
21721 label: newProps.label
21722 }, false).track;
21723 tracks[variantLabel] = track;
21724 }
21725 }
21726 }
21727 }
21728};
21729
21730var groupMatch = function groupMatch(list, media) {
21731 for (var i = 0; i < list.length; i++) {
21732 if (playlistMatch(media, list[i])) {
21733 return true;
21734 }
21735
21736 if (list[i].playlists && groupMatch(list[i].playlists, media)) {
21737 return true;
21738 }
21739 }
21740
21741 return false;
21742};
21743/**
21744 * Returns a function used to get the active group of the provided type
21745 *
21746 * @param {string} type
21747 * MediaGroup type
21748 * @param {Object} settings
21749 * Object containing required information for media groups
21750 * @return {Function}
21751 * Function that returns the active media group for the provided type. Takes an
21752 * optional parameter {TextTrack} track. If no track is provided, a list of all
21753 * variants in the group, otherwise the variant corresponding to the provided
21754 * track is returned.
21755 * @function activeGroup
21756 */
21757
21758
21759var activeGroup = function activeGroup(type, settings) {
21760 return function (track) {
21761 var masterPlaylistLoader = settings.masterPlaylistLoader,
21762 groups = settings.mediaTypes[type].groups;
21763 var media = masterPlaylistLoader.media();
21764
21765 if (!media) {
21766 return null;
21767 }
21768
21769 var variants = null; // set to variants to main media active group
21770
21771 if (media.attributes[type]) {
21772 variants = groups[media.attributes[type]];
21773 }
21774
21775 var groupKeys = Object.keys(groups);
21776
21777 if (!variants) {
21778 // find the masterPlaylistLoader media
21779 // that is in a media group if we are dealing
21780 // with audio only
21781 if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
21782 for (var i = 0; i < groupKeys.length; i++) {
21783 var groupPropertyList = groups[groupKeys[i]];
21784
21785 if (groupMatch(groupPropertyList, media)) {
21786 variants = groupPropertyList;
21787 break;
21788 }
21789 } // use the main group if it exists
21790
21791 } else if (groups.main) {
21792 variants = groups.main; // only one group, use that one
21793 } else if (groupKeys.length === 1) {
21794 variants = groups[groupKeys[0]];
21795 }
21796 }
21797
21798 if (typeof track === 'undefined') {
21799 return variants;
21800 }
21801
21802 if (track === null || !variants) {
21803 // An active track was specified so a corresponding group is expected. track === null
21804 // means no track is currently active so there is no corresponding group
21805 return null;
21806 }
21807
21808 return variants.filter(function (props) {
21809 return props.id === track.id;
21810 })[0] || null;
21811 };
21812};
21813var activeTrack = {
21814 /**
21815 * Returns a function used to get the active track of type provided
21816 *
21817 * @param {string} type
21818 * MediaGroup type
21819 * @param {Object} settings
21820 * Object containing required information for media groups
21821 * @return {Function}
21822 * Function that returns the active media track for the provided type. Returns
21823 * null if no track is active
21824 * @function activeTrack.AUDIO
21825 */
21826 AUDIO: function AUDIO(type, settings) {
21827 return function () {
21828 var tracks = settings.mediaTypes[type].tracks;
21829
21830 for (var id in tracks) {
21831 if (tracks[id].enabled) {
21832 return tracks[id];
21833 }
21834 }
21835
21836 return null;
21837 };
21838 },
21839
21840 /**
21841 * Returns a function used to get the active track of type provided
21842 *
21843 * @param {string} type
21844 * MediaGroup type
21845 * @param {Object} settings
21846 * Object containing required information for media groups
21847 * @return {Function}
21848 * Function that returns the active media track for the provided type. Returns
21849 * null if no track is active
21850 * @function activeTrack.SUBTITLES
21851 */
21852 SUBTITLES: function SUBTITLES(type, settings) {
21853 return function () {
21854 var tracks = settings.mediaTypes[type].tracks;
21855
21856 for (var id in tracks) {
21857 if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
21858 return tracks[id];
21859 }
21860 }
21861
21862 return null;
21863 };
21864 }
21865};
21866var getActiveGroup = function getActiveGroup(type, _ref) {
21867 var mediaTypes = _ref.mediaTypes;
21868 return function () {
21869 var activeTrack_ = mediaTypes[type].activeTrack();
21870
21871 if (!activeTrack_) {
21872 return null;
21873 }
21874
21875 return mediaTypes[type].activeGroup(activeTrack_);
21876 };
21877};
21878/**
21879 * Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
21880 * Closed-Captions) specified in the master manifest.
21881 *
21882 * @param {Object} settings
21883 * Object containing required information for setting up the media groups
21884 * @param {Tech} settings.tech
21885 * The tech of the player
21886 * @param {Object} settings.requestOptions
21887 * XHR request options used by the segment loaders
21888 * @param {PlaylistLoader} settings.masterPlaylistLoader
21889 * PlaylistLoader for the master source
21890 * @param {VhsHandler} settings.vhs
21891 * VHS SourceHandler
21892 * @param {Object} settings.master
21893 * The parsed master manifest
21894 * @param {Object} settings.mediaTypes
21895 * Object to store the loaders, tracks, and utility methods for each media type
21896 * @param {Function} settings.blacklistCurrentPlaylist
21897 * Blacklists the current rendition and forces a rendition switch.
21898 * @function setupMediaGroups
21899 */
21900
21901var setupMediaGroups = function setupMediaGroups(settings) {
21902 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
21903 initialize[type](type, settings);
21904 });
21905 var mediaTypes = settings.mediaTypes,
21906 masterPlaylistLoader = settings.masterPlaylistLoader,
21907 tech = settings.tech,
21908 vhs = settings.vhs,
21909 _settings$segmentLoad3 = settings.segmentLoaders,
21910 audioSegmentLoader = _settings$segmentLoad3['AUDIO'],
21911 mainSegmentLoader = _settings$segmentLoad3.main; // setup active group and track getters and change event handlers
21912
21913 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21914 mediaTypes[type].activeGroup = activeGroup(type, settings);
21915 mediaTypes[type].activeTrack = activeTrack[type](type, settings);
21916 mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
21917 mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
21918 mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
21919 mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
21920 }); // DO NOT enable the default subtitle or caption track.
21921 // DO enable the default audio track
21922
21923 var audioGroup = mediaTypes.AUDIO.activeGroup();
21924
21925 if (audioGroup) {
21926 var groupId = (audioGroup.filter(function (group) {
21927 return group.default;
21928 })[0] || audioGroup[0]).id;
21929 mediaTypes.AUDIO.tracks[groupId].enabled = true;
21930 mediaTypes.AUDIO.onGroupChanged();
21931 mediaTypes.AUDIO.onTrackChanged();
21932 var activeAudioGroup = mediaTypes.AUDIO.getActiveGroup(); // a similar check for handling setAudio on each loader is run again each time the
21933 // track is changed, but needs to be handled here since the track may not be considered
21934 // changed on the first call to onTrackChanged
21935
21936 if (!activeAudioGroup.playlistLoader) {
21937 // either audio is muxed with video or the stream is audio only
21938 mainSegmentLoader.setAudio(true);
21939 } else {
21940 // audio is demuxed
21941 mainSegmentLoader.setAudio(false);
21942 audioSegmentLoader.setAudio(true);
21943 }
21944 }
21945
21946 masterPlaylistLoader.on('mediachange', function () {
21947 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21948 return mediaTypes[type].onGroupChanged();
21949 });
21950 });
21951 masterPlaylistLoader.on('mediachanging', function () {
21952 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
21953 return mediaTypes[type].onGroupChanging();
21954 });
21955 }); // custom audio track change event handler for usage event
21956
21957 var onAudioTrackChanged = function onAudioTrackChanged() {
21958 mediaTypes.AUDIO.onTrackChanged();
21959 tech.trigger({
21960 type: 'usage',
21961 name: 'vhs-audio-change'
21962 });
21963 tech.trigger({
21964 type: 'usage',
21965 name: 'hls-audio-change'
21966 });
21967 };
21968
21969 tech.audioTracks().addEventListener('change', onAudioTrackChanged);
21970 tech.remoteTextTracks().addEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
21971 vhs.on('dispose', function () {
21972 tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
21973 tech.remoteTextTracks().removeEventListener('change', mediaTypes.SUBTITLES.onTrackChanged);
21974 }); // clear existing audio tracks and add the ones we just created
21975
21976 tech.clearTracks('audio');
21977
21978 for (var id in mediaTypes.AUDIO.tracks) {
21979 tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
21980 }
21981};
21982/**
21983 * Creates skeleton object used to store the loaders, tracks, and utility methods for each
21984 * media type
21985 *
21986 * @return {Object}
21987 * Object to store the loaders, tracks, and utility methods for each media type
21988 * @function createMediaTypes
21989 */
21990
21991var createMediaTypes = function createMediaTypes() {
21992 var mediaTypes = {};
21993 ['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach(function (type) {
21994 mediaTypes[type] = {
21995 groups: {},
21996 tracks: {},
21997 activePlaylistLoader: null,
21998 activeGroup: noop,
21999 activeTrack: noop,
22000 getActiveGroup: noop,
22001 onGroupChanged: noop,
22002 onTrackChanged: noop,
22003 lastTrack_: null,
22004 logger_: logger("MediaGroups[" + type + "]")
22005 };
22006 });
22007 return mediaTypes;
22008};
22009
22010var ABORT_EARLY_BLACKLIST_SECONDS = 60 * 2;
22011var Vhs$1; // SegmentLoader stats that need to have each loader's
22012// values summed to calculate the final value
22013
22014var loaderStats = ['mediaRequests', 'mediaRequestsAborted', 'mediaRequestsTimedout', 'mediaRequestsErrored', 'mediaTransferDuration', 'mediaBytesTransferred', 'mediaAppends'];
22015
22016var sumLoaderStat = function sumLoaderStat(stat) {
22017 return this.audioSegmentLoader_[stat] + this.mainSegmentLoader_[stat];
22018};
22019
22020var shouldSwitchToMedia = function shouldSwitchToMedia(_ref) {
22021 var currentPlaylist = _ref.currentPlaylist,
22022 nextPlaylist = _ref.nextPlaylist,
22023 forwardBuffer = _ref.forwardBuffer,
22024 bufferLowWaterLine = _ref.bufferLowWaterLine,
22025 bufferHighWaterLine = _ref.bufferHighWaterLine,
22026 duration = _ref.duration,
22027 experimentalBufferBasedABR = _ref.experimentalBufferBasedABR,
22028 log = _ref.log;
22029
22030 // we have no other playlist to switch to
22031 if (!nextPlaylist) {
22032 videojs__default["default"].log.warn('We received no playlist to switch to. Please check your stream.');
22033 return false;
22034 }
22035
22036 var sharedLogLine = "allowing switch " + (currentPlaylist && currentPlaylist.id || 'null') + " -> " + nextPlaylist.id;
22037
22038 if (!currentPlaylist) {
22039 log(sharedLogLine + " as current playlist is not set");
22040 return true;
22041 } // no need to switch if playlist is the same
22042
22043
22044 if (nextPlaylist.id === currentPlaylist.id) {
22045 return false;
22046 } // If the playlist is live, then we want to not take low water line into account.
22047 // This is because in LIVE, the player plays 3 segments from the end of the
22048 // playlist, and if `BUFFER_LOW_WATER_LINE` is greater than the duration availble
22049 // in those segments, a viewer will never experience a rendition upswitch.
22050
22051
22052 if (!currentPlaylist.endList) {
22053 log(sharedLogLine + " as current playlist is live");
22054 return true;
22055 }
22056
22057 var maxBufferLowWaterLine = experimentalBufferBasedABR ? Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE : Config.MAX_BUFFER_LOW_WATER_LINE; // For the same reason as LIVE, we ignore the low water line when the VOD
22058 // duration is below the max potential low water line
22059
22060 if (duration < maxBufferLowWaterLine) {
22061 log(sharedLogLine + " as duration < max low water line (" + duration + " < " + maxBufferLowWaterLine + ")");
22062 return true;
22063 }
22064
22065 var nextBandwidth = nextPlaylist.attributes.BANDWIDTH;
22066 var currBandwidth = currentPlaylist.attributes.BANDWIDTH; // when switching down, if our buffer is lower than the high water line,
22067 // we can switch down
22068
22069 if (nextBandwidth < currBandwidth && (!experimentalBufferBasedABR || forwardBuffer < bufferHighWaterLine)) {
22070 var logLine = sharedLogLine + " as next bandwidth < current bandwidth (" + nextBandwidth + " < " + currBandwidth + ")";
22071
22072 if (experimentalBufferBasedABR) {
22073 logLine += " and forwardBuffer < bufferHighWaterLine (" + forwardBuffer + " < " + bufferHighWaterLine + ")";
22074 }
22075
22076 log(logLine);
22077 return true;
22078 } // and if our buffer is higher than the low water line,
22079 // we can switch up
22080
22081
22082 if ((!experimentalBufferBasedABR || nextBandwidth > currBandwidth) && forwardBuffer >= bufferLowWaterLine) {
22083 var _logLine = sharedLogLine + " as forwardBuffer >= bufferLowWaterLine (" + forwardBuffer + " >= " + bufferLowWaterLine + ")";
22084
22085 if (experimentalBufferBasedABR) {
22086 _logLine += " and next bandwidth > current bandwidth (" + nextBandwidth + " > " + currBandwidth + ")";
22087 }
22088
22089 log(_logLine);
22090 return true;
22091 }
22092
22093 log("not " + sharedLogLine + " as no switching criteria met");
22094 return false;
22095};
22096/**
22097 * the master playlist controller controller all interactons
22098 * between playlists and segmentloaders. At this time this mainly
22099 * involves a master playlist and a series of audio playlists
22100 * if they are available
22101 *
22102 * @class MasterPlaylistController
22103 * @extends videojs.EventTarget
22104 */
22105
22106
22107var MasterPlaylistController = /*#__PURE__*/function (_videojs$EventTarget) {
22108 _inheritsLoose__default["default"](MasterPlaylistController, _videojs$EventTarget);
22109
22110 function MasterPlaylistController(options) {
22111 var _this;
22112
22113 _this = _videojs$EventTarget.call(this) || this;
22114 var src = options.src,
22115 handleManifestRedirects = options.handleManifestRedirects,
22116 withCredentials = options.withCredentials,
22117 tech = options.tech,
22118 bandwidth = options.bandwidth,
22119 externVhs = options.externVhs,
22120 useCueTags = options.useCueTags,
22121 blacklistDuration = options.blacklistDuration,
22122 enableLowInitialPlaylist = options.enableLowInitialPlaylist,
22123 sourceType = options.sourceType,
22124 cacheEncryptionKeys = options.cacheEncryptionKeys,
22125 experimentalBufferBasedABR = options.experimentalBufferBasedABR,
22126 experimentalLeastPixelDiffSelector = options.experimentalLeastPixelDiffSelector,
22127 captionServices = options.captionServices;
22128
22129 if (!src) {
22130 throw new Error('A non-empty playlist URL or JSON manifest string is required');
22131 }
22132
22133 var maxPlaylistRetries = options.maxPlaylistRetries;
22134
22135 if (maxPlaylistRetries === null || typeof maxPlaylistRetries === 'undefined') {
22136 maxPlaylistRetries = Infinity;
22137 }
22138
22139 Vhs$1 = externVhs;
22140 _this.experimentalBufferBasedABR = Boolean(experimentalBufferBasedABR);
22141 _this.experimentalLeastPixelDiffSelector = Boolean(experimentalLeastPixelDiffSelector);
22142 _this.withCredentials = withCredentials;
22143 _this.tech_ = tech;
22144 _this.vhs_ = tech.vhs;
22145 _this.sourceType_ = sourceType;
22146 _this.useCueTags_ = useCueTags;
22147 _this.blacklistDuration = blacklistDuration;
22148 _this.maxPlaylistRetries = maxPlaylistRetries;
22149 _this.enableLowInitialPlaylist = enableLowInitialPlaylist;
22150
22151 if (_this.useCueTags_) {
22152 _this.cueTagsTrack_ = _this.tech_.addTextTrack('metadata', 'ad-cues');
22153 _this.cueTagsTrack_.inBandMetadataTrackDispatchType = '';
22154 }
22155
22156 _this.requestOptions_ = {
22157 withCredentials: withCredentials,
22158 handleManifestRedirects: handleManifestRedirects,
22159 maxPlaylistRetries: maxPlaylistRetries,
22160 timeout: null
22161 };
22162
22163 _this.on('error', _this.pauseLoading);
22164
22165 _this.mediaTypes_ = createMediaTypes();
22166 _this.mediaSource = new window__default["default"].MediaSource();
22167 _this.handleDurationChange_ = _this.handleDurationChange_.bind(_assertThisInitialized__default["default"](_this));
22168 _this.handleSourceOpen_ = _this.handleSourceOpen_.bind(_assertThisInitialized__default["default"](_this));
22169 _this.handleSourceEnded_ = _this.handleSourceEnded_.bind(_assertThisInitialized__default["default"](_this));
22170
22171 _this.mediaSource.addEventListener('durationchange', _this.handleDurationChange_); // load the media source into the player
22172
22173
22174 _this.mediaSource.addEventListener('sourceopen', _this.handleSourceOpen_);
22175
22176 _this.mediaSource.addEventListener('sourceended', _this.handleSourceEnded_); // we don't have to handle sourceclose since dispose will handle termination of
22177 // everything, and the MediaSource should not be detached without a proper disposal
22178
22179
22180 _this.seekable_ = videojs__default["default"].createTimeRanges();
22181 _this.hasPlayed_ = false;
22182 _this.syncController_ = new SyncController(options);
22183 _this.segmentMetadataTrack_ = tech.addRemoteTextTrack({
22184 kind: 'metadata',
22185 label: 'segment-metadata'
22186 }, false).track;
22187 _this.decrypter_ = new Decrypter();
22188 _this.sourceUpdater_ = new SourceUpdater(_this.mediaSource);
22189 _this.inbandTextTracks_ = {};
22190 _this.timelineChangeController_ = new TimelineChangeController();
22191 var segmentLoaderSettings = {
22192 vhs: _this.vhs_,
22193 parse708captions: options.parse708captions,
22194 captionServices: captionServices,
22195 mediaSource: _this.mediaSource,
22196 currentTime: _this.tech_.currentTime.bind(_this.tech_),
22197 seekable: function seekable() {
22198 return _this.seekable();
22199 },
22200 seeking: function seeking() {
22201 return _this.tech_.seeking();
22202 },
22203 duration: function duration() {
22204 return _this.duration();
22205 },
22206 hasPlayed: function hasPlayed() {
22207 return _this.hasPlayed_;
22208 },
22209 goalBufferLength: function goalBufferLength() {
22210 return _this.goalBufferLength();
22211 },
22212 bandwidth: bandwidth,
22213 syncController: _this.syncController_,
22214 decrypter: _this.decrypter_,
22215 sourceType: _this.sourceType_,
22216 inbandTextTracks: _this.inbandTextTracks_,
22217 cacheEncryptionKeys: cacheEncryptionKeys,
22218 sourceUpdater: _this.sourceUpdater_,
22219 timelineChangeController: _this.timelineChangeController_,
22220 experimentalExactManifestTimings: options.experimentalExactManifestTimings
22221 }; // The source type check not only determines whether a special DASH playlist loader
22222 // should be used, but also covers the case where the provided src is a vhs-json
22223 // manifest object (instead of a URL). In the case of vhs-json, the default
22224 // PlaylistLoader should be used.
22225
22226 _this.masterPlaylistLoader_ = _this.sourceType_ === 'dash' ? new DashPlaylistLoader(src, _this.vhs_, _this.requestOptions_) : new PlaylistLoader(src, _this.vhs_, _this.requestOptions_);
22227
22228 _this.setupMasterPlaylistLoaderListeners_(); // setup segment loaders
22229 // combined audio/video or just video when alternate audio track is selected
22230
22231
22232 _this.mainSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
22233 segmentMetadataTrack: _this.segmentMetadataTrack_,
22234 loaderType: 'main'
22235 }), options); // alternate audio track
22236
22237 _this.audioSegmentLoader_ = new SegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
22238 loaderType: 'audio'
22239 }), options);
22240 _this.subtitleSegmentLoader_ = new VTTSegmentLoader(videojs__default["default"].mergeOptions(segmentLoaderSettings, {
22241 loaderType: 'vtt',
22242 featuresNativeTextTracks: _this.tech_.featuresNativeTextTracks
22243 }), options);
22244
22245 _this.setupSegmentLoaderListeners_();
22246
22247 if (_this.experimentalBufferBasedABR) {
22248 _this.masterPlaylistLoader_.one('loadedplaylist', function () {
22249 return _this.startABRTimer_();
22250 });
22251
22252 _this.tech_.on('pause', function () {
22253 return _this.stopABRTimer_();
22254 });
22255
22256 _this.tech_.on('play', function () {
22257 return _this.startABRTimer_();
22258 });
22259 } // Create SegmentLoader stat-getters
22260 // mediaRequests_
22261 // mediaRequestsAborted_
22262 // mediaRequestsTimedout_
22263 // mediaRequestsErrored_
22264 // mediaTransferDuration_
22265 // mediaBytesTransferred_
22266 // mediaAppends_
22267
22268
22269 loaderStats.forEach(function (stat) {
22270 _this[stat + '_'] = sumLoaderStat.bind(_assertThisInitialized__default["default"](_this), stat);
22271 });
22272 _this.logger_ = logger('MPC');
22273 _this.triggeredFmp4Usage = false;
22274
22275 if (_this.tech_.preload() === 'none') {
22276 _this.loadOnPlay_ = function () {
22277 _this.loadOnPlay_ = null;
22278
22279 _this.masterPlaylistLoader_.load();
22280 };
22281
22282 _this.tech_.one('play', _this.loadOnPlay_);
22283 } else {
22284 _this.masterPlaylistLoader_.load();
22285 }
22286
22287 _this.timeToLoadedData__ = -1;
22288 _this.mainAppendsToLoadedData__ = -1;
22289 _this.audioAppendsToLoadedData__ = -1;
22290 var event = _this.tech_.preload() === 'none' ? 'play' : 'loadstart'; // start the first frame timer on loadstart or play (for preload none)
22291
22292 _this.tech_.one(event, function () {
22293 var timeToLoadedDataStart = Date.now();
22294
22295 _this.tech_.one('loadeddata', function () {
22296 _this.timeToLoadedData__ = Date.now() - timeToLoadedDataStart;
22297 _this.mainAppendsToLoadedData__ = _this.mainSegmentLoader_.mediaAppends;
22298 _this.audioAppendsToLoadedData__ = _this.audioSegmentLoader_.mediaAppends;
22299 });
22300 });
22301
22302 return _this;
22303 }
22304
22305 var _proto = MasterPlaylistController.prototype;
22306
22307 _proto.mainAppendsToLoadedData_ = function mainAppendsToLoadedData_() {
22308 return this.mainAppendsToLoadedData__;
22309 };
22310
22311 _proto.audioAppendsToLoadedData_ = function audioAppendsToLoadedData_() {
22312 return this.audioAppendsToLoadedData__;
22313 };
22314
22315 _proto.appendsToLoadedData_ = function appendsToLoadedData_() {
22316 var main = this.mainAppendsToLoadedData_();
22317 var audio = this.audioAppendsToLoadedData_();
22318
22319 if (main === -1 || audio === -1) {
22320 return -1;
22321 }
22322
22323 return main + audio;
22324 };
22325
22326 _proto.timeToLoadedData_ = function timeToLoadedData_() {
22327 return this.timeToLoadedData__;
22328 }
22329 /**
22330 * Run selectPlaylist and switch to the new playlist if we should
22331 *
22332 * @private
22333 *
22334 */
22335 ;
22336
22337 _proto.checkABR_ = function checkABR_() {
22338 var nextPlaylist = this.selectPlaylist();
22339
22340 if (nextPlaylist && this.shouldSwitchToMedia_(nextPlaylist)) {
22341 this.switchMedia_(nextPlaylist, 'abr');
22342 }
22343 };
22344
22345 _proto.switchMedia_ = function switchMedia_(playlist, cause, delay) {
22346 var oldMedia = this.media();
22347 var oldId = oldMedia && (oldMedia.id || oldMedia.uri);
22348 var newId = playlist.id || playlist.uri;
22349
22350 if (oldId && oldId !== newId) {
22351 this.logger_("switch media " + oldId + " -> " + newId + " from " + cause);
22352 this.tech_.trigger({
22353 type: 'usage',
22354 name: "vhs-rendition-change-" + cause
22355 });
22356 }
22357
22358 this.masterPlaylistLoader_.media(playlist, delay);
22359 }
22360 /**
22361 * Start a timer that periodically calls checkABR_
22362 *
22363 * @private
22364 */
22365 ;
22366
22367 _proto.startABRTimer_ = function startABRTimer_() {
22368 var _this2 = this;
22369
22370 this.stopABRTimer_();
22371 this.abrTimer_ = window__default["default"].setInterval(function () {
22372 return _this2.checkABR_();
22373 }, 250);
22374 }
22375 /**
22376 * Stop the timer that periodically calls checkABR_
22377 *
22378 * @private
22379 */
22380 ;
22381
22382 _proto.stopABRTimer_ = function stopABRTimer_() {
22383 // if we're scrubbing, we don't need to pause.
22384 // This getter will be added to Video.js in version 7.11.
22385 if (this.tech_.scrubbing && this.tech_.scrubbing()) {
22386 return;
22387 }
22388
22389 window__default["default"].clearInterval(this.abrTimer_);
22390 this.abrTimer_ = null;
22391 }
22392 /**
22393 * Get a list of playlists for the currently selected audio playlist
22394 *
22395 * @return {Array} the array of audio playlists
22396 */
22397 ;
22398
22399 _proto.getAudioTrackPlaylists_ = function getAudioTrackPlaylists_() {
22400 var master = this.master();
22401 var defaultPlaylists = master && master.playlists || []; // if we don't have any audio groups then we can only
22402 // assume that the audio tracks are contained in masters
22403 // playlist array, use that or an empty array.
22404
22405 if (!master || !master.mediaGroups || !master.mediaGroups.AUDIO) {
22406 return defaultPlaylists;
22407 }
22408
22409 var AUDIO = master.mediaGroups.AUDIO;
22410 var groupKeys = Object.keys(AUDIO);
22411 var track; // get the current active track
22412
22413 if (Object.keys(this.mediaTypes_.AUDIO.groups).length) {
22414 track = this.mediaTypes_.AUDIO.activeTrack(); // or get the default track from master if mediaTypes_ isn't setup yet
22415 } else {
22416 // default group is `main` or just the first group.
22417 var defaultGroup = AUDIO.main || groupKeys.length && AUDIO[groupKeys[0]];
22418
22419 for (var label in defaultGroup) {
22420 if (defaultGroup[label].default) {
22421 track = {
22422 label: label
22423 };
22424 break;
22425 }
22426 }
22427 } // no active track no playlists.
22428
22429
22430 if (!track) {
22431 return defaultPlaylists;
22432 }
22433
22434 var playlists = []; // get all of the playlists that are possible for the
22435 // active track.
22436
22437 for (var group in AUDIO) {
22438 if (AUDIO[group][track.label]) {
22439 var properties = AUDIO[group][track.label];
22440
22441 if (properties.playlists && properties.playlists.length) {
22442 playlists.push.apply(playlists, properties.playlists);
22443 } else if (properties.uri) {
22444 playlists.push(properties);
22445 } else if (master.playlists.length) {
22446 // if an audio group does not have a uri
22447 // see if we have main playlists that use it as a group.
22448 // if we do then add those to the playlists list.
22449 for (var i = 0; i < master.playlists.length; i++) {
22450 var playlist = master.playlists[i];
22451
22452 if (playlist.attributes && playlist.attributes.AUDIO && playlist.attributes.AUDIO === group) {
22453 playlists.push(playlist);
22454 }
22455 }
22456 }
22457 }
22458 }
22459
22460 if (!playlists.length) {
22461 return defaultPlaylists;
22462 }
22463
22464 return playlists;
22465 }
22466 /**
22467 * Register event handlers on the master playlist loader. A helper
22468 * function for construction time.
22469 *
22470 * @private
22471 */
22472 ;
22473
22474 _proto.setupMasterPlaylistLoaderListeners_ = function setupMasterPlaylistLoaderListeners_() {
22475 var _this3 = this;
22476
22477 this.masterPlaylistLoader_.on('loadedmetadata', function () {
22478 var media = _this3.masterPlaylistLoader_.media();
22479
22480 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22481 // timeout the request.
22482
22483 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22484 _this3.requestOptions_.timeout = 0;
22485 } else {
22486 _this3.requestOptions_.timeout = requestTimeout;
22487 } // if this isn't a live video and preload permits, start
22488 // downloading segments
22489
22490
22491 if (media.endList && _this3.tech_.preload() !== 'none') {
22492 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22493
22494 _this3.mainSegmentLoader_.load();
22495 }
22496
22497 setupMediaGroups({
22498 sourceType: _this3.sourceType_,
22499 segmentLoaders: {
22500 AUDIO: _this3.audioSegmentLoader_,
22501 SUBTITLES: _this3.subtitleSegmentLoader_,
22502 main: _this3.mainSegmentLoader_
22503 },
22504 tech: _this3.tech_,
22505 requestOptions: _this3.requestOptions_,
22506 masterPlaylistLoader: _this3.masterPlaylistLoader_,
22507 vhs: _this3.vhs_,
22508 master: _this3.master(),
22509 mediaTypes: _this3.mediaTypes_,
22510 blacklistCurrentPlaylist: _this3.blacklistCurrentPlaylist.bind(_this3)
22511 });
22512
22513 _this3.triggerPresenceUsage_(_this3.master(), media);
22514
22515 _this3.setupFirstPlay();
22516
22517 if (!_this3.mediaTypes_.AUDIO.activePlaylistLoader || _this3.mediaTypes_.AUDIO.activePlaylistLoader.media()) {
22518 _this3.trigger('selectedinitialmedia');
22519 } else {
22520 // We must wait for the active audio playlist loader to
22521 // finish setting up before triggering this event so the
22522 // representations API and EME setup is correct
22523 _this3.mediaTypes_.AUDIO.activePlaylistLoader.one('loadedmetadata', function () {
22524 _this3.trigger('selectedinitialmedia');
22525 });
22526 }
22527 });
22528 this.masterPlaylistLoader_.on('loadedplaylist', function () {
22529 if (_this3.loadOnPlay_) {
22530 _this3.tech_.off('play', _this3.loadOnPlay_);
22531 }
22532
22533 var updatedPlaylist = _this3.masterPlaylistLoader_.media();
22534
22535 if (!updatedPlaylist) {
22536 // exclude any variants that are not supported by the browser before selecting
22537 // an initial media as the playlist selectors do not consider browser support
22538 _this3.excludeUnsupportedVariants_();
22539
22540 var selectedMedia;
22541
22542 if (_this3.enableLowInitialPlaylist) {
22543 selectedMedia = _this3.selectInitialPlaylist();
22544 }
22545
22546 if (!selectedMedia) {
22547 selectedMedia = _this3.selectPlaylist();
22548 }
22549
22550 if (!selectedMedia || !_this3.shouldSwitchToMedia_(selectedMedia)) {
22551 return;
22552 }
22553
22554 _this3.initialMedia_ = selectedMedia;
22555
22556 _this3.switchMedia_(_this3.initialMedia_, 'initial'); // Under the standard case where a source URL is provided, loadedplaylist will
22557 // fire again since the playlist will be requested. In the case of vhs-json
22558 // (where the manifest object is provided as the source), when the media
22559 // playlist's `segments` list is already available, a media playlist won't be
22560 // requested, and loadedplaylist won't fire again, so the playlist handler must be
22561 // called on its own here.
22562
22563
22564 var haveJsonSource = _this3.sourceType_ === 'vhs-json' && _this3.initialMedia_.segments;
22565
22566 if (!haveJsonSource) {
22567 return;
22568 }
22569
22570 updatedPlaylist = _this3.initialMedia_;
22571 }
22572
22573 _this3.handleUpdatedMediaPlaylist(updatedPlaylist);
22574 });
22575 this.masterPlaylistLoader_.on('error', function () {
22576 _this3.blacklistCurrentPlaylist(_this3.masterPlaylistLoader_.error);
22577 });
22578 this.masterPlaylistLoader_.on('mediachanging', function () {
22579 _this3.mainSegmentLoader_.abort();
22580
22581 _this3.mainSegmentLoader_.pause();
22582 });
22583 this.masterPlaylistLoader_.on('mediachange', function () {
22584 var media = _this3.masterPlaylistLoader_.media();
22585
22586 var requestTimeout = media.targetDuration * 1.5 * 1000; // If we don't have any more available playlists, we don't want to
22587 // timeout the request.
22588
22589 if (isLowestEnabledRendition(_this3.masterPlaylistLoader_.master, _this3.masterPlaylistLoader_.media())) {
22590 _this3.requestOptions_.timeout = 0;
22591 } else {
22592 _this3.requestOptions_.timeout = requestTimeout;
22593 } // TODO: Create a new event on the PlaylistLoader that signals
22594 // that the segments have changed in some way and use that to
22595 // update the SegmentLoader instead of doing it twice here and
22596 // on `loadedplaylist`
22597
22598
22599 _this3.mainSegmentLoader_.playlist(media, _this3.requestOptions_);
22600
22601 _this3.mainSegmentLoader_.load();
22602
22603 _this3.tech_.trigger({
22604 type: 'mediachange',
22605 bubbles: true
22606 });
22607 });
22608 this.masterPlaylistLoader_.on('playlistunchanged', function () {
22609 var updatedPlaylist = _this3.masterPlaylistLoader_.media(); // ignore unchanged playlists that have already been
22610 // excluded for not-changing. We likely just have a really slowly updating
22611 // playlist.
22612
22613
22614 if (updatedPlaylist.lastExcludeReason_ === 'playlist-unchanged') {
22615 return;
22616 }
22617
22618 var playlistOutdated = _this3.stuckAtPlaylistEnd_(updatedPlaylist);
22619
22620 if (playlistOutdated) {
22621 // Playlist has stopped updating and we're stuck at its end. Try to
22622 // blacklist it and switch to another playlist in the hope that that
22623 // one is updating (and give the player a chance to re-adjust to the
22624 // safe live point).
22625 _this3.blacklistCurrentPlaylist({
22626 message: 'Playlist no longer updating.',
22627 reason: 'playlist-unchanged'
22628 }); // useful for monitoring QoS
22629
22630
22631 _this3.tech_.trigger('playliststuck');
22632 }
22633 });
22634 this.masterPlaylistLoader_.on('renditiondisabled', function () {
22635 _this3.tech_.trigger({
22636 type: 'usage',
22637 name: 'vhs-rendition-disabled'
22638 });
22639
22640 _this3.tech_.trigger({
22641 type: 'usage',
22642 name: 'hls-rendition-disabled'
22643 });
22644 });
22645 this.masterPlaylistLoader_.on('renditionenabled', function () {
22646 _this3.tech_.trigger({
22647 type: 'usage',
22648 name: 'vhs-rendition-enabled'
22649 });
22650
22651 _this3.tech_.trigger({
22652 type: 'usage',
22653 name: 'hls-rendition-enabled'
22654 });
22655 });
22656 }
22657 /**
22658 * Given an updated media playlist (whether it was loaded for the first time, or
22659 * refreshed for live playlists), update any relevant properties and state to reflect
22660 * changes in the media that should be accounted for (e.g., cues and duration).
22661 *
22662 * @param {Object} updatedPlaylist the updated media playlist object
22663 *
22664 * @private
22665 */
22666 ;
22667
22668 _proto.handleUpdatedMediaPlaylist = function handleUpdatedMediaPlaylist(updatedPlaylist) {
22669 if (this.useCueTags_) {
22670 this.updateAdCues_(updatedPlaylist);
22671 } // TODO: Create a new event on the PlaylistLoader that signals
22672 // that the segments have changed in some way and use that to
22673 // update the SegmentLoader instead of doing it twice here and
22674 // on `mediachange`
22675
22676
22677 this.mainSegmentLoader_.playlist(updatedPlaylist, this.requestOptions_);
22678 this.updateDuration(!updatedPlaylist.endList); // If the player isn't paused, ensure that the segment loader is running,
22679 // as it is possible that it was temporarily stopped while waiting for
22680 // a playlist (e.g., in case the playlist errored and we re-requested it).
22681
22682 if (!this.tech_.paused()) {
22683 this.mainSegmentLoader_.load();
22684
22685 if (this.audioSegmentLoader_) {
22686 this.audioSegmentLoader_.load();
22687 }
22688 }
22689 }
22690 /**
22691 * A helper function for triggerring presence usage events once per source
22692 *
22693 * @private
22694 */
22695 ;
22696
22697 _proto.triggerPresenceUsage_ = function triggerPresenceUsage_(master, media) {
22698 var mediaGroups = master.mediaGroups || {};
22699 var defaultDemuxed = true;
22700 var audioGroupKeys = Object.keys(mediaGroups.AUDIO);
22701
22702 for (var mediaGroup in mediaGroups.AUDIO) {
22703 for (var label in mediaGroups.AUDIO[mediaGroup]) {
22704 var properties = mediaGroups.AUDIO[mediaGroup][label];
22705
22706 if (!properties.uri) {
22707 defaultDemuxed = false;
22708 }
22709 }
22710 }
22711
22712 if (defaultDemuxed) {
22713 this.tech_.trigger({
22714 type: 'usage',
22715 name: 'vhs-demuxed'
22716 });
22717 this.tech_.trigger({
22718 type: 'usage',
22719 name: 'hls-demuxed'
22720 });
22721 }
22722
22723 if (Object.keys(mediaGroups.SUBTITLES).length) {
22724 this.tech_.trigger({
22725 type: 'usage',
22726 name: 'vhs-webvtt'
22727 });
22728 this.tech_.trigger({
22729 type: 'usage',
22730 name: 'hls-webvtt'
22731 });
22732 }
22733
22734 if (Vhs$1.Playlist.isAes(media)) {
22735 this.tech_.trigger({
22736 type: 'usage',
22737 name: 'vhs-aes'
22738 });
22739 this.tech_.trigger({
22740 type: 'usage',
22741 name: 'hls-aes'
22742 });
22743 }
22744
22745 if (audioGroupKeys.length && Object.keys(mediaGroups.AUDIO[audioGroupKeys[0]]).length > 1) {
22746 this.tech_.trigger({
22747 type: 'usage',
22748 name: 'vhs-alternate-audio'
22749 });
22750 this.tech_.trigger({
22751 type: 'usage',
22752 name: 'hls-alternate-audio'
22753 });
22754 }
22755
22756 if (this.useCueTags_) {
22757 this.tech_.trigger({
22758 type: 'usage',
22759 name: 'vhs-playlist-cue-tags'
22760 });
22761 this.tech_.trigger({
22762 type: 'usage',
22763 name: 'hls-playlist-cue-tags'
22764 });
22765 }
22766 };
22767
22768 _proto.shouldSwitchToMedia_ = function shouldSwitchToMedia_(nextPlaylist) {
22769 var currentPlaylist = this.masterPlaylistLoader_.media();
22770 var buffered = this.tech_.buffered();
22771 var forwardBuffer = buffered.length ? buffered.end(buffered.length - 1) - this.tech_.currentTime() : 0;
22772 var bufferLowWaterLine = this.bufferLowWaterLine();
22773 var bufferHighWaterLine = this.bufferHighWaterLine();
22774 return shouldSwitchToMedia({
22775 currentPlaylist: currentPlaylist,
22776 nextPlaylist: nextPlaylist,
22777 forwardBuffer: forwardBuffer,
22778 bufferLowWaterLine: bufferLowWaterLine,
22779 bufferHighWaterLine: bufferHighWaterLine,
22780 duration: this.duration(),
22781 experimentalBufferBasedABR: this.experimentalBufferBasedABR,
22782 log: this.logger_
22783 });
22784 }
22785 /**
22786 * Register event handlers on the segment loaders. A helper function
22787 * for construction time.
22788 *
22789 * @private
22790 */
22791 ;
22792
22793 _proto.setupSegmentLoaderListeners_ = function setupSegmentLoaderListeners_() {
22794 var _this4 = this;
22795
22796 if (!this.experimentalBufferBasedABR) {
22797 this.mainSegmentLoader_.on('bandwidthupdate', function () {
22798 var nextPlaylist = _this4.selectPlaylist();
22799
22800 if (_this4.shouldSwitchToMedia_(nextPlaylist)) {
22801 _this4.switchMedia_(nextPlaylist, 'bandwidthupdate');
22802 }
22803
22804 _this4.tech_.trigger('bandwidthupdate');
22805 });
22806 this.mainSegmentLoader_.on('progress', function () {
22807 _this4.trigger('progress');
22808 });
22809 }
22810
22811 this.mainSegmentLoader_.on('error', function () {
22812 _this4.blacklistCurrentPlaylist(_this4.mainSegmentLoader_.error());
22813 });
22814 this.mainSegmentLoader_.on('appenderror', function () {
22815 _this4.error = _this4.mainSegmentLoader_.error_;
22816
22817 _this4.trigger('error');
22818 });
22819 this.mainSegmentLoader_.on('syncinfoupdate', function () {
22820 _this4.onSyncInfoUpdate_();
22821 });
22822 this.mainSegmentLoader_.on('timestampoffset', function () {
22823 _this4.tech_.trigger({
22824 type: 'usage',
22825 name: 'vhs-timestamp-offset'
22826 });
22827
22828 _this4.tech_.trigger({
22829 type: 'usage',
22830 name: 'hls-timestamp-offset'
22831 });
22832 });
22833 this.audioSegmentLoader_.on('syncinfoupdate', function () {
22834 _this4.onSyncInfoUpdate_();
22835 });
22836 this.audioSegmentLoader_.on('appenderror', function () {
22837 _this4.error = _this4.audioSegmentLoader_.error_;
22838
22839 _this4.trigger('error');
22840 });
22841 this.mainSegmentLoader_.on('ended', function () {
22842 _this4.logger_('main segment loader ended');
22843
22844 _this4.onEndOfStream();
22845 });
22846 this.mainSegmentLoader_.on('earlyabort', function (event) {
22847 // never try to early abort with the new ABR algorithm
22848 if (_this4.experimentalBufferBasedABR) {
22849 return;
22850 }
22851
22852 _this4.delegateLoaders_('all', ['abort']);
22853
22854 _this4.blacklistCurrentPlaylist({
22855 message: 'Aborted early because there isn\'t enough bandwidth to complete the ' + 'request without rebuffering.'
22856 }, ABORT_EARLY_BLACKLIST_SECONDS);
22857 });
22858
22859 var updateCodecs = function updateCodecs() {
22860 if (!_this4.sourceUpdater_.hasCreatedSourceBuffers()) {
22861 return _this4.tryToCreateSourceBuffers_();
22862 }
22863
22864 var codecs = _this4.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
22865
22866
22867 if (!codecs) {
22868 return;
22869 }
22870
22871 _this4.sourceUpdater_.addOrChangeSourceBuffers(codecs);
22872 };
22873
22874 this.mainSegmentLoader_.on('trackinfo', updateCodecs);
22875 this.audioSegmentLoader_.on('trackinfo', updateCodecs);
22876 this.mainSegmentLoader_.on('fmp4', function () {
22877 if (!_this4.triggeredFmp4Usage) {
22878 _this4.tech_.trigger({
22879 type: 'usage',
22880 name: 'vhs-fmp4'
22881 });
22882
22883 _this4.tech_.trigger({
22884 type: 'usage',
22885 name: 'hls-fmp4'
22886 });
22887
22888 _this4.triggeredFmp4Usage = true;
22889 }
22890 });
22891 this.audioSegmentLoader_.on('fmp4', function () {
22892 if (!_this4.triggeredFmp4Usage) {
22893 _this4.tech_.trigger({
22894 type: 'usage',
22895 name: 'vhs-fmp4'
22896 });
22897
22898 _this4.tech_.trigger({
22899 type: 'usage',
22900 name: 'hls-fmp4'
22901 });
22902
22903 _this4.triggeredFmp4Usage = true;
22904 }
22905 });
22906 this.audioSegmentLoader_.on('ended', function () {
22907 _this4.logger_('audioSegmentLoader ended');
22908
22909 _this4.onEndOfStream();
22910 });
22911 };
22912
22913 _proto.mediaSecondsLoaded_ = function mediaSecondsLoaded_() {
22914 return Math.max(this.audioSegmentLoader_.mediaSecondsLoaded + this.mainSegmentLoader_.mediaSecondsLoaded);
22915 }
22916 /**
22917 * Call load on our SegmentLoaders
22918 */
22919 ;
22920
22921 _proto.load = function load() {
22922 this.mainSegmentLoader_.load();
22923
22924 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
22925 this.audioSegmentLoader_.load();
22926 }
22927
22928 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
22929 this.subtitleSegmentLoader_.load();
22930 }
22931 }
22932 /**
22933 * Re-tune playback quality level for the current player
22934 * conditions without performing destructive actions, like
22935 * removing already buffered content
22936 *
22937 * @private
22938 * @deprecated
22939 */
22940 ;
22941
22942 _proto.smoothQualityChange_ = function smoothQualityChange_(media) {
22943 if (media === void 0) {
22944 media = this.selectPlaylist();
22945 }
22946
22947 this.fastQualityChange_(media);
22948 }
22949 /**
22950 * Re-tune playback quality level for the current player
22951 * conditions. This method will perform destructive actions like removing
22952 * already buffered content in order to readjust the currently active
22953 * playlist quickly. This is good for manual quality changes
22954 *
22955 * @private
22956 */
22957 ;
22958
22959 _proto.fastQualityChange_ = function fastQualityChange_(media) {
22960 var _this5 = this;
22961
22962 if (media === void 0) {
22963 media = this.selectPlaylist();
22964 }
22965
22966 if (media === this.masterPlaylistLoader_.media()) {
22967 this.logger_('skipping fastQualityChange because new media is same as old');
22968 return;
22969 }
22970
22971 this.switchMedia_(media, 'fast-quality'); // Delete all buffered data to allow an immediate quality switch, then seek to give
22972 // the browser a kick to remove any cached frames from the previous rendtion (.04 seconds
22973 // ahead is roughly the minimum that will accomplish this across a variety of content
22974 // in IE and Edge, but seeking in place is sufficient on all other browsers)
22975 // Edge/IE bug: https://developer.microsoft.com/en-us/microsoft-edge/platform/issues/14600375/
22976 // Chrome bug: https://bugs.chromium.org/p/chromium/issues/detail?id=651904
22977
22978 this.mainSegmentLoader_.resetEverything(function () {
22979 // Since this is not a typical seek, we avoid the seekTo method which can cause segments
22980 // from the previously enabled rendition to load before the new playlist has finished loading
22981 if (videojs__default["default"].browser.IE_VERSION || videojs__default["default"].browser.IS_EDGE) {
22982 _this5.tech_.setCurrentTime(_this5.tech_.currentTime() + 0.04);
22983 } else {
22984 _this5.tech_.setCurrentTime(_this5.tech_.currentTime());
22985 }
22986 }); // don't need to reset audio as it is reset when media changes
22987 }
22988 /**
22989 * Begin playback.
22990 */
22991 ;
22992
22993 _proto.play = function play() {
22994 if (this.setupFirstPlay()) {
22995 return;
22996 }
22997
22998 if (this.tech_.ended()) {
22999 this.tech_.setCurrentTime(0);
23000 }
23001
23002 if (this.hasPlayed_) {
23003 this.load();
23004 }
23005
23006 var seekable = this.tech_.seekable(); // if the viewer has paused and we fell out of the live window,
23007 // seek forward to the live point
23008
23009 if (this.tech_.duration() === Infinity) {
23010 if (this.tech_.currentTime() < seekable.start(0)) {
23011 return this.tech_.setCurrentTime(seekable.end(seekable.length - 1));
23012 }
23013 }
23014 }
23015 /**
23016 * Seek to the latest media position if this is a live video and the
23017 * player and video are loaded and initialized.
23018 */
23019 ;
23020
23021 _proto.setupFirstPlay = function setupFirstPlay() {
23022 var _this6 = this;
23023
23024 var media = this.masterPlaylistLoader_.media(); // Check that everything is ready to begin buffering for the first call to play
23025 // If 1) there is no active media
23026 // 2) the player is paused
23027 // 3) the first play has already been setup
23028 // then exit early
23029
23030 if (!media || this.tech_.paused() || this.hasPlayed_) {
23031 return false;
23032 } // when the video is a live stream
23033
23034
23035 if (!media.endList) {
23036 var seekable = this.seekable();
23037
23038 if (!seekable.length) {
23039 // without a seekable range, the player cannot seek to begin buffering at the live
23040 // point
23041 return false;
23042 }
23043
23044 if (videojs__default["default"].browser.IE_VERSION && this.tech_.readyState() === 0) {
23045 // IE11 throws an InvalidStateError if you try to set currentTime while the
23046 // readyState is 0, so it must be delayed until the tech fires loadedmetadata.
23047 this.tech_.one('loadedmetadata', function () {
23048 _this6.trigger('firstplay');
23049
23050 _this6.tech_.setCurrentTime(seekable.end(0));
23051
23052 _this6.hasPlayed_ = true;
23053 });
23054 return false;
23055 } // trigger firstplay to inform the source handler to ignore the next seek event
23056
23057
23058 this.trigger('firstplay'); // seek to the live point
23059
23060 this.tech_.setCurrentTime(seekable.end(0));
23061 }
23062
23063 this.hasPlayed_ = true; // we can begin loading now that everything is ready
23064
23065 this.load();
23066 return true;
23067 }
23068 /**
23069 * handle the sourceopen event on the MediaSource
23070 *
23071 * @private
23072 */
23073 ;
23074
23075 _proto.handleSourceOpen_ = function handleSourceOpen_() {
23076 // Only attempt to create the source buffer if none already exist.
23077 // handleSourceOpen is also called when we are "re-opening" a source buffer
23078 // after `endOfStream` has been called (in response to a seek for instance)
23079 this.tryToCreateSourceBuffers_(); // if autoplay is enabled, begin playback. This is duplicative of
23080 // code in video.js but is required because play() must be invoked
23081 // *after* the media source has opened.
23082
23083 if (this.tech_.autoplay()) {
23084 var playPromise = this.tech_.play(); // Catch/silence error when a pause interrupts a play request
23085 // on browsers which return a promise
23086
23087 if (typeof playPromise !== 'undefined' && typeof playPromise.then === 'function') {
23088 playPromise.then(null, function (e) {});
23089 }
23090 }
23091
23092 this.trigger('sourceopen');
23093 }
23094 /**
23095 * handle the sourceended event on the MediaSource
23096 *
23097 * @private
23098 */
23099 ;
23100
23101 _proto.handleSourceEnded_ = function handleSourceEnded_() {
23102 if (!this.inbandTextTracks_.metadataTrack_) {
23103 return;
23104 }
23105
23106 var cues = this.inbandTextTracks_.metadataTrack_.cues;
23107
23108 if (!cues || !cues.length) {
23109 return;
23110 }
23111
23112 var duration = this.duration();
23113 cues[cues.length - 1].endTime = isNaN(duration) || Math.abs(duration) === Infinity ? Number.MAX_VALUE : duration;
23114 }
23115 /**
23116 * handle the durationchange event on the MediaSource
23117 *
23118 * @private
23119 */
23120 ;
23121
23122 _proto.handleDurationChange_ = function handleDurationChange_() {
23123 this.tech_.trigger('durationchange');
23124 }
23125 /**
23126 * Calls endOfStream on the media source when all active stream types have called
23127 * endOfStream
23128 *
23129 * @param {string} streamType
23130 * Stream type of the segment loader that called endOfStream
23131 * @private
23132 */
23133 ;
23134
23135 _proto.onEndOfStream = function onEndOfStream() {
23136 var isEndOfStream = this.mainSegmentLoader_.ended_;
23137
23138 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23139 var mainMediaInfo = this.mainSegmentLoader_.getCurrentMediaInfo_(); // if the audio playlist loader exists, then alternate audio is active
23140
23141 if (!mainMediaInfo || mainMediaInfo.hasVideo) {
23142 // if we do not know if the main segment loader contains video yet or if we
23143 // definitively know the main segment loader contains video, then we need to wait
23144 // for both main and audio segment loaders to call endOfStream
23145 isEndOfStream = isEndOfStream && this.audioSegmentLoader_.ended_;
23146 } else {
23147 // otherwise just rely on the audio loader
23148 isEndOfStream = this.audioSegmentLoader_.ended_;
23149 }
23150 }
23151
23152 if (!isEndOfStream) {
23153 return;
23154 }
23155
23156 this.stopABRTimer_();
23157 this.sourceUpdater_.endOfStream();
23158 }
23159 /**
23160 * Check if a playlist has stopped being updated
23161 *
23162 * @param {Object} playlist the media playlist object
23163 * @return {boolean} whether the playlist has stopped being updated or not
23164 */
23165 ;
23166
23167 _proto.stuckAtPlaylistEnd_ = function stuckAtPlaylistEnd_(playlist) {
23168 var seekable = this.seekable();
23169
23170 if (!seekable.length) {
23171 // playlist doesn't have enough information to determine whether we are stuck
23172 return false;
23173 }
23174
23175 var expired = this.syncController_.getExpiredTime(playlist, this.duration());
23176
23177 if (expired === null) {
23178 return false;
23179 } // does not use the safe live end to calculate playlist end, since we
23180 // don't want to say we are stuck while there is still content
23181
23182
23183 var absolutePlaylistEnd = Vhs$1.Playlist.playlistEnd(playlist, expired);
23184 var currentTime = this.tech_.currentTime();
23185 var buffered = this.tech_.buffered();
23186
23187 if (!buffered.length) {
23188 // return true if the playhead reached the absolute end of the playlist
23189 return absolutePlaylistEnd - currentTime <= SAFE_TIME_DELTA;
23190 }
23191
23192 var bufferedEnd = buffered.end(buffered.length - 1); // return true if there is too little buffer left and buffer has reached absolute
23193 // end of playlist
23194
23195 return bufferedEnd - currentTime <= SAFE_TIME_DELTA && absolutePlaylistEnd - bufferedEnd <= SAFE_TIME_DELTA;
23196 }
23197 /**
23198 * Blacklists a playlist when an error occurs for a set amount of time
23199 * making it unavailable for selection by the rendition selection algorithm
23200 * and then forces a new playlist (rendition) selection.
23201 *
23202 * @param {Object=} error an optional error that may include the playlist
23203 * to blacklist
23204 * @param {number=} blacklistDuration an optional number of seconds to blacklist the
23205 * playlist
23206 */
23207 ;
23208
23209 _proto.blacklistCurrentPlaylist = function blacklistCurrentPlaylist(error, blacklistDuration) {
23210 if (error === void 0) {
23211 error = {};
23212 }
23213
23214 // If the `error` was generated by the playlist loader, it will contain
23215 // the playlist we were trying to load (but failed) and that should be
23216 // blacklisted instead of the currently selected playlist which is likely
23217 // out-of-date in this scenario
23218 var currentPlaylist = error.playlist || this.masterPlaylistLoader_.media();
23219 blacklistDuration = blacklistDuration || error.blacklistDuration || this.blacklistDuration; // If there is no current playlist, then an error occurred while we were
23220 // trying to load the master OR while we were disposing of the tech
23221
23222 if (!currentPlaylist) {
23223 this.error = error;
23224
23225 if (this.mediaSource.readyState !== 'open') {
23226 this.trigger('error');
23227 } else {
23228 this.sourceUpdater_.endOfStream('network');
23229 }
23230
23231 return;
23232 }
23233
23234 currentPlaylist.playlistErrors_++;
23235 var playlists = this.masterPlaylistLoader_.master.playlists;
23236 var enabledPlaylists = playlists.filter(isEnabled);
23237 var isFinalRendition = enabledPlaylists.length === 1 && enabledPlaylists[0] === currentPlaylist; // Don't blacklist the only playlist unless it was blacklisted
23238 // forever
23239
23240 if (playlists.length === 1 && blacklistDuration !== Infinity) {
23241 videojs__default["default"].log.warn("Problem encountered with playlist " + currentPlaylist.id + ". " + 'Trying again since it is the only playlist.');
23242 this.tech_.trigger('retryplaylist'); // if this is a final rendition, we should delay
23243
23244 return this.masterPlaylistLoader_.load(isFinalRendition);
23245 }
23246
23247 if (isFinalRendition) {
23248 // Since we're on the final non-blacklisted playlist, and we're about to blacklist
23249 // it, instead of erring the player or retrying this playlist, clear out the current
23250 // blacklist. This allows other playlists to be attempted in case any have been
23251 // fixed.
23252 var reincluded = false;
23253 playlists.forEach(function (playlist) {
23254 // skip current playlist which is about to be blacklisted
23255 if (playlist === currentPlaylist) {
23256 return;
23257 }
23258
23259 var excludeUntil = playlist.excludeUntil; // a playlist cannot be reincluded if it wasn't excluded to begin with.
23260
23261 if (typeof excludeUntil !== 'undefined' && excludeUntil !== Infinity) {
23262 reincluded = true;
23263 delete playlist.excludeUntil;
23264 }
23265 });
23266
23267 if (reincluded) {
23268 videojs__default["default"].log.warn('Removing other playlists from the exclusion list because the last ' + 'rendition is about to be excluded.'); // Technically we are retrying a playlist, in that we are simply retrying a previous
23269 // playlist. This is needed for users relying on the retryplaylist event to catch a
23270 // case where the player might be stuck and looping through "dead" playlists.
23271
23272 this.tech_.trigger('retryplaylist');
23273 }
23274 } // Blacklist this playlist
23275
23276
23277 var excludeUntil;
23278
23279 if (currentPlaylist.playlistErrors_ > this.maxPlaylistRetries) {
23280 excludeUntil = Infinity;
23281 } else {
23282 excludeUntil = Date.now() + blacklistDuration * 1000;
23283 }
23284
23285 currentPlaylist.excludeUntil = excludeUntil;
23286
23287 if (error.reason) {
23288 currentPlaylist.lastExcludeReason_ = error.reason;
23289 }
23290
23291 this.tech_.trigger('blacklistplaylist');
23292 this.tech_.trigger({
23293 type: 'usage',
23294 name: 'vhs-rendition-blacklisted'
23295 });
23296 this.tech_.trigger({
23297 type: 'usage',
23298 name: 'hls-rendition-blacklisted'
23299 }); // TODO: should we select a new playlist if this blacklist wasn't for the currentPlaylist?
23300 // Would be something like media().id !=== currentPlaylist.id and we would need something
23301 // like `pendingMedia` in playlist loaders to check against that too. This will prevent us
23302 // from loading a new playlist on any blacklist.
23303 // Select a new playlist
23304
23305 var nextPlaylist = this.selectPlaylist();
23306
23307 if (!nextPlaylist) {
23308 this.error = 'Playback cannot continue. No available working or supported playlists.';
23309 this.trigger('error');
23310 return;
23311 }
23312
23313 var logFn = error.internal ? this.logger_ : videojs__default["default"].log.warn;
23314 var errorMessage = error.message ? ' ' + error.message : '';
23315 logFn((error.internal ? 'Internal problem' : 'Problem') + " encountered with playlist " + currentPlaylist.id + "." + (errorMessage + " Switching to playlist " + nextPlaylist.id + ".")); // if audio group changed reset audio loaders
23316
23317 if (nextPlaylist.attributes.AUDIO !== currentPlaylist.attributes.AUDIO) {
23318 this.delegateLoaders_('audio', ['abort', 'pause']);
23319 } // if subtitle group changed reset subtitle loaders
23320
23321
23322 if (nextPlaylist.attributes.SUBTITLES !== currentPlaylist.attributes.SUBTITLES) {
23323 this.delegateLoaders_('subtitle', ['abort', 'pause']);
23324 }
23325
23326 this.delegateLoaders_('main', ['abort', 'pause']);
23327 var delayDuration = nextPlaylist.targetDuration / 2 * 1000 || 5 * 1000;
23328 var shouldDelay = typeof nextPlaylist.lastRequest === 'number' && Date.now() - nextPlaylist.lastRequest <= delayDuration; // delay if it's a final rendition or if the last refresh is sooner than half targetDuration
23329
23330 return this.switchMedia_(nextPlaylist, 'exclude', isFinalRendition || shouldDelay);
23331 }
23332 /**
23333 * Pause all segment/playlist loaders
23334 */
23335 ;
23336
23337 _proto.pauseLoading = function pauseLoading() {
23338 this.delegateLoaders_('all', ['abort', 'pause']);
23339 this.stopABRTimer_();
23340 }
23341 /**
23342 * Call a set of functions in order on playlist loaders, segment loaders,
23343 * or both types of loaders.
23344 *
23345 * @param {string} filter
23346 * Filter loaders that should call fnNames using a string. Can be:
23347 * * all - run on all loaders
23348 * * audio - run on all audio loaders
23349 * * subtitle - run on all subtitle loaders
23350 * * main - run on the main/master loaders
23351 *
23352 * @param {Array|string} fnNames
23353 * A string or array of function names to call.
23354 */
23355 ;
23356
23357 _proto.delegateLoaders_ = function delegateLoaders_(filter, fnNames) {
23358 var _this7 = this;
23359
23360 var loaders = [];
23361 var dontFilterPlaylist = filter === 'all';
23362
23363 if (dontFilterPlaylist || filter === 'main') {
23364 loaders.push(this.masterPlaylistLoader_);
23365 }
23366
23367 var mediaTypes = [];
23368
23369 if (dontFilterPlaylist || filter === 'audio') {
23370 mediaTypes.push('AUDIO');
23371 }
23372
23373 if (dontFilterPlaylist || filter === 'subtitle') {
23374 mediaTypes.push('CLOSED-CAPTIONS');
23375 mediaTypes.push('SUBTITLES');
23376 }
23377
23378 mediaTypes.forEach(function (mediaType) {
23379 var loader = _this7.mediaTypes_[mediaType] && _this7.mediaTypes_[mediaType].activePlaylistLoader;
23380
23381 if (loader) {
23382 loaders.push(loader);
23383 }
23384 });
23385 ['main', 'audio', 'subtitle'].forEach(function (name) {
23386 var loader = _this7[name + "SegmentLoader_"];
23387
23388 if (loader && (filter === name || filter === 'all')) {
23389 loaders.push(loader);
23390 }
23391 });
23392 loaders.forEach(function (loader) {
23393 return fnNames.forEach(function (fnName) {
23394 if (typeof loader[fnName] === 'function') {
23395 loader[fnName]();
23396 }
23397 });
23398 });
23399 }
23400 /**
23401 * set the current time on all segment loaders
23402 *
23403 * @param {TimeRange} currentTime the current time to set
23404 * @return {TimeRange} the current time
23405 */
23406 ;
23407
23408 _proto.setCurrentTime = function setCurrentTime(currentTime) {
23409 var buffered = findRange(this.tech_.buffered(), currentTime);
23410
23411 if (!(this.masterPlaylistLoader_ && this.masterPlaylistLoader_.media())) {
23412 // return immediately if the metadata is not ready yet
23413 return 0;
23414 } // it's clearly an edge-case but don't thrown an error if asked to
23415 // seek within an empty playlist
23416
23417
23418 if (!this.masterPlaylistLoader_.media().segments) {
23419 return 0;
23420 } // if the seek location is already buffered, continue buffering as usual
23421
23422
23423 if (buffered && buffered.length) {
23424 return currentTime;
23425 } // cancel outstanding requests so we begin buffering at the new
23426 // location
23427
23428
23429 this.mainSegmentLoader_.resetEverything();
23430 this.mainSegmentLoader_.abort();
23431
23432 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23433 this.audioSegmentLoader_.resetEverything();
23434 this.audioSegmentLoader_.abort();
23435 }
23436
23437 if (this.mediaTypes_.SUBTITLES.activePlaylistLoader) {
23438 this.subtitleSegmentLoader_.resetEverything();
23439 this.subtitleSegmentLoader_.abort();
23440 } // start segment loader loading in case they are paused
23441
23442
23443 this.load();
23444 }
23445 /**
23446 * get the current duration
23447 *
23448 * @return {TimeRange} the duration
23449 */
23450 ;
23451
23452 _proto.duration = function duration() {
23453 if (!this.masterPlaylistLoader_) {
23454 return 0;
23455 }
23456
23457 var media = this.masterPlaylistLoader_.media();
23458
23459 if (!media) {
23460 // no playlists loaded yet, so can't determine a duration
23461 return 0;
23462 } // Don't rely on the media source for duration in the case of a live playlist since
23463 // setting the native MediaSource's duration to infinity ends up with consequences to
23464 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23465 //
23466 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23467 // however, few browsers have support for setLiveSeekableRange()
23468 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23469 //
23470 // Until a time when the duration of the media source can be set to infinity, and a
23471 // seekable range specified across browsers, just return Infinity.
23472
23473
23474 if (!media.endList) {
23475 return Infinity;
23476 } // Since this is a VOD video, it is safe to rely on the media source's duration (if
23477 // available). If it's not available, fall back to a playlist-calculated estimate.
23478
23479
23480 if (this.mediaSource) {
23481 return this.mediaSource.duration;
23482 }
23483
23484 return Vhs$1.Playlist.duration(media);
23485 }
23486 /**
23487 * check the seekable range
23488 *
23489 * @return {TimeRange} the seekable range
23490 */
23491 ;
23492
23493 _proto.seekable = function seekable() {
23494 return this.seekable_;
23495 };
23496
23497 _proto.onSyncInfoUpdate_ = function onSyncInfoUpdate_() {
23498 var audioSeekable;
23499
23500 if (!this.masterPlaylistLoader_) {
23501 return;
23502 }
23503
23504 var media = this.masterPlaylistLoader_.media();
23505
23506 if (!media) {
23507 return;
23508 }
23509
23510 var expired = this.syncController_.getExpiredTime(media, this.duration());
23511
23512 if (expired === null) {
23513 // not enough information to update seekable
23514 return;
23515 }
23516
23517 var master = this.masterPlaylistLoader_.master;
23518 var mainSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23519
23520 if (mainSeekable.length === 0) {
23521 return;
23522 }
23523
23524 if (this.mediaTypes_.AUDIO.activePlaylistLoader) {
23525 media = this.mediaTypes_.AUDIO.activePlaylistLoader.media();
23526 expired = this.syncController_.getExpiredTime(media, this.duration());
23527
23528 if (expired === null) {
23529 return;
23530 }
23531
23532 audioSeekable = Vhs$1.Playlist.seekable(media, expired, Vhs$1.Playlist.liveEdgeDelay(master, media));
23533
23534 if (audioSeekable.length === 0) {
23535 return;
23536 }
23537 }
23538
23539 var oldEnd;
23540 var oldStart;
23541
23542 if (this.seekable_ && this.seekable_.length) {
23543 oldEnd = this.seekable_.end(0);
23544 oldStart = this.seekable_.start(0);
23545 }
23546
23547 if (!audioSeekable) {
23548 // seekable has been calculated based on buffering video data so it
23549 // can be returned directly
23550 this.seekable_ = mainSeekable;
23551 } else if (audioSeekable.start(0) > mainSeekable.end(0) || mainSeekable.start(0) > audioSeekable.end(0)) {
23552 // seekables are pretty far off, rely on main
23553 this.seekable_ = mainSeekable;
23554 } else {
23555 this.seekable_ = videojs__default["default"].createTimeRanges([[audioSeekable.start(0) > mainSeekable.start(0) ? audioSeekable.start(0) : mainSeekable.start(0), audioSeekable.end(0) < mainSeekable.end(0) ? audioSeekable.end(0) : mainSeekable.end(0)]]);
23556 } // seekable is the same as last time
23557
23558
23559 if (this.seekable_ && this.seekable_.length) {
23560 if (this.seekable_.end(0) === oldEnd && this.seekable_.start(0) === oldStart) {
23561 return;
23562 }
23563 }
23564
23565 this.logger_("seekable updated [" + printableRange(this.seekable_) + "]");
23566 this.tech_.trigger('seekablechanged');
23567 }
23568 /**
23569 * Update the player duration
23570 */
23571 ;
23572
23573 _proto.updateDuration = function updateDuration(isLive) {
23574 if (this.updateDuration_) {
23575 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23576 this.updateDuration_ = null;
23577 }
23578
23579 if (this.mediaSource.readyState !== 'open') {
23580 this.updateDuration_ = this.updateDuration.bind(this, isLive);
23581 this.mediaSource.addEventListener('sourceopen', this.updateDuration_);
23582 return;
23583 }
23584
23585 if (isLive) {
23586 var seekable = this.seekable();
23587
23588 if (!seekable.length) {
23589 return;
23590 } // Even in the case of a live playlist, the native MediaSource's duration should not
23591 // be set to Infinity (even though this would be expected for a live playlist), since
23592 // setting the native MediaSource's duration to infinity ends up with consequences to
23593 // seekable behavior. See https://github.com/w3c/media-source/issues/5 for details.
23594 //
23595 // This is resolved in the spec by https://github.com/w3c/media-source/pull/92,
23596 // however, few browsers have support for setLiveSeekableRange()
23597 // https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/setLiveSeekableRange
23598 //
23599 // Until a time when the duration of the media source can be set to infinity, and a
23600 // seekable range specified across browsers, the duration should be greater than or
23601 // equal to the last possible seekable value.
23602 // MediaSource duration starts as NaN
23603 // It is possible (and probable) that this case will never be reached for many
23604 // sources, since the MediaSource reports duration as the highest value without
23605 // accounting for timestamp offset. For example, if the timestamp offset is -100 and
23606 // we buffered times 0 to 100 with real times of 100 to 200, even though current
23607 // time will be between 0 and 100, the native media source may report the duration
23608 // as 200. However, since we report duration separate from the media source (as
23609 // Infinity), and as long as the native media source duration value is greater than
23610 // our reported seekable range, seeks will work as expected. The large number as
23611 // duration for live is actually a strategy used by some players to work around the
23612 // issue of live seekable ranges cited above.
23613
23614
23615 if (isNaN(this.mediaSource.duration) || this.mediaSource.duration < seekable.end(seekable.length - 1)) {
23616 this.sourceUpdater_.setDuration(seekable.end(seekable.length - 1));
23617 }
23618
23619 return;
23620 }
23621
23622 var buffered = this.tech_.buffered();
23623 var duration = Vhs$1.Playlist.duration(this.masterPlaylistLoader_.media());
23624
23625 if (buffered.length > 0) {
23626 duration = Math.max(duration, buffered.end(buffered.length - 1));
23627 }
23628
23629 if (this.mediaSource.duration !== duration) {
23630 this.sourceUpdater_.setDuration(duration);
23631 }
23632 }
23633 /**
23634 * dispose of the MasterPlaylistController and everything
23635 * that it controls
23636 */
23637 ;
23638
23639 _proto.dispose = function dispose() {
23640 var _this8 = this;
23641
23642 this.trigger('dispose');
23643 this.decrypter_.terminate();
23644 this.masterPlaylistLoader_.dispose();
23645 this.mainSegmentLoader_.dispose();
23646
23647 if (this.loadOnPlay_) {
23648 this.tech_.off('play', this.loadOnPlay_);
23649 }
23650
23651 ['AUDIO', 'SUBTITLES'].forEach(function (type) {
23652 var groups = _this8.mediaTypes_[type].groups;
23653
23654 for (var id in groups) {
23655 groups[id].forEach(function (group) {
23656 if (group.playlistLoader) {
23657 group.playlistLoader.dispose();
23658 }
23659 });
23660 }
23661 });
23662 this.audioSegmentLoader_.dispose();
23663 this.subtitleSegmentLoader_.dispose();
23664 this.sourceUpdater_.dispose();
23665 this.timelineChangeController_.dispose();
23666 this.stopABRTimer_();
23667
23668 if (this.updateDuration_) {
23669 this.mediaSource.removeEventListener('sourceopen', this.updateDuration_);
23670 }
23671
23672 this.mediaSource.removeEventListener('durationchange', this.handleDurationChange_); // load the media source into the player
23673
23674 this.mediaSource.removeEventListener('sourceopen', this.handleSourceOpen_);
23675 this.mediaSource.removeEventListener('sourceended', this.handleSourceEnded_);
23676 this.off();
23677 }
23678 /**
23679 * return the master playlist object if we have one
23680 *
23681 * @return {Object} the master playlist object that we parsed
23682 */
23683 ;
23684
23685 _proto.master = function master() {
23686 return this.masterPlaylistLoader_.master;
23687 }
23688 /**
23689 * return the currently selected playlist
23690 *
23691 * @return {Object} the currently selected playlist object that we parsed
23692 */
23693 ;
23694
23695 _proto.media = function media() {
23696 // playlist loader will not return media if it has not been fully loaded
23697 return this.masterPlaylistLoader_.media() || this.initialMedia_;
23698 };
23699
23700 _proto.areMediaTypesKnown_ = function areMediaTypesKnown_() {
23701 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
23702 var hasMainMediaInfo = !!this.mainSegmentLoader_.getCurrentMediaInfo_(); // if we are not using an audio loader, then we have audio media info
23703 // otherwise check on the segment loader.
23704
23705 var hasAudioMediaInfo = !usingAudioLoader ? true : !!this.audioSegmentLoader_.getCurrentMediaInfo_(); // one or both loaders has not loaded sufficently to get codecs
23706
23707 if (!hasMainMediaInfo || !hasAudioMediaInfo) {
23708 return false;
23709 }
23710
23711 return true;
23712 };
23713
23714 _proto.getCodecsOrExclude_ = function getCodecsOrExclude_() {
23715 var _this9 = this;
23716
23717 var media = {
23718 main: this.mainSegmentLoader_.getCurrentMediaInfo_() || {},
23719 audio: this.audioSegmentLoader_.getCurrentMediaInfo_() || {}
23720 }; // set "main" media equal to video
23721
23722 media.video = media.main;
23723 var playlistCodecs = codecsForPlaylist(this.master(), this.media());
23724 var codecs = {};
23725 var usingAudioLoader = !!this.mediaTypes_.AUDIO.activePlaylistLoader;
23726
23727 if (media.main.hasVideo) {
23728 codecs.video = playlistCodecs.video || media.main.videoCodec || codecs_js.DEFAULT_VIDEO_CODEC;
23729 }
23730
23731 if (media.main.isMuxed) {
23732 codecs.video += "," + (playlistCodecs.audio || media.main.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC);
23733 }
23734
23735 if (media.main.hasAudio && !media.main.isMuxed || media.audio.hasAudio || usingAudioLoader) {
23736 codecs.audio = playlistCodecs.audio || media.main.audioCodec || media.audio.audioCodec || codecs_js.DEFAULT_AUDIO_CODEC; // set audio isFmp4 so we use the correct "supports" function below
23737
23738 media.audio.isFmp4 = media.main.hasAudio && !media.main.isMuxed ? media.main.isFmp4 : media.audio.isFmp4;
23739 } // no codecs, no playback.
23740
23741
23742 if (!codecs.audio && !codecs.video) {
23743 this.blacklistCurrentPlaylist({
23744 playlist: this.media(),
23745 message: 'Could not determine codecs for playlist.',
23746 blacklistDuration: Infinity
23747 });
23748 return;
23749 } // fmp4 relies on browser support, while ts relies on muxer support
23750
23751
23752 var supportFunction = function supportFunction(isFmp4, codec) {
23753 return isFmp4 ? codecs_js.browserSupportsCodec(codec) : codecs_js.muxerSupportsCodec(codec);
23754 };
23755
23756 var unsupportedCodecs = {};
23757 var unsupportedAudio;
23758 ['video', 'audio'].forEach(function (type) {
23759 if (codecs.hasOwnProperty(type) && !supportFunction(media[type].isFmp4, codecs[type])) {
23760 var supporter = media[type].isFmp4 ? 'browser' : 'muxer';
23761 unsupportedCodecs[supporter] = unsupportedCodecs[supporter] || [];
23762 unsupportedCodecs[supporter].push(codecs[type]);
23763
23764 if (type === 'audio') {
23765 unsupportedAudio = supporter;
23766 }
23767 }
23768 });
23769
23770 if (usingAudioLoader && unsupportedAudio && this.media().attributes.AUDIO) {
23771 var audioGroup = this.media().attributes.AUDIO;
23772 this.master().playlists.forEach(function (variant) {
23773 var variantAudioGroup = variant.attributes && variant.attributes.AUDIO;
23774
23775 if (variantAudioGroup === audioGroup && variant !== _this9.media()) {
23776 variant.excludeUntil = Infinity;
23777 }
23778 });
23779 this.logger_("excluding audio group " + audioGroup + " as " + unsupportedAudio + " does not support codec(s): \"" + codecs.audio + "\"");
23780 } // if we have any unsupported codecs blacklist this playlist.
23781
23782
23783 if (Object.keys(unsupportedCodecs).length) {
23784 var message = Object.keys(unsupportedCodecs).reduce(function (acc, supporter) {
23785 if (acc) {
23786 acc += ', ';
23787 }
23788
23789 acc += supporter + " does not support codec(s): \"" + unsupportedCodecs[supporter].join(',') + "\"";
23790 return acc;
23791 }, '') + '.';
23792 this.blacklistCurrentPlaylist({
23793 playlist: this.media(),
23794 internal: true,
23795 message: message,
23796 blacklistDuration: Infinity
23797 });
23798 return;
23799 } // check if codec switching is happening
23800
23801
23802 if (this.sourceUpdater_.hasCreatedSourceBuffers() && !this.sourceUpdater_.canChangeType()) {
23803 var switchMessages = [];
23804 ['video', 'audio'].forEach(function (type) {
23805 var newCodec = (codecs_js.parseCodecs(_this9.sourceUpdater_.codecs[type] || '')[0] || {}).type;
23806 var oldCodec = (codecs_js.parseCodecs(codecs[type] || '')[0] || {}).type;
23807
23808 if (newCodec && oldCodec && newCodec.toLowerCase() !== oldCodec.toLowerCase()) {
23809 switchMessages.push("\"" + _this9.sourceUpdater_.codecs[type] + "\" -> \"" + codecs[type] + "\"");
23810 }
23811 });
23812
23813 if (switchMessages.length) {
23814 this.blacklistCurrentPlaylist({
23815 playlist: this.media(),
23816 message: "Codec switching not supported: " + switchMessages.join(', ') + ".",
23817 blacklistDuration: Infinity,
23818 internal: true
23819 });
23820 return;
23821 }
23822 } // TODO: when using the muxer shouldn't we just return
23823 // the codecs that the muxer outputs?
23824
23825
23826 return codecs;
23827 }
23828 /**
23829 * Create source buffers and exlude any incompatible renditions.
23830 *
23831 * @private
23832 */
23833 ;
23834
23835 _proto.tryToCreateSourceBuffers_ = function tryToCreateSourceBuffers_() {
23836 // media source is not ready yet or sourceBuffers are already
23837 // created.
23838 if (this.mediaSource.readyState !== 'open' || this.sourceUpdater_.hasCreatedSourceBuffers()) {
23839 return;
23840 }
23841
23842 if (!this.areMediaTypesKnown_()) {
23843 return;
23844 }
23845
23846 var codecs = this.getCodecsOrExclude_(); // no codecs means that the playlist was excluded
23847
23848 if (!codecs) {
23849 return;
23850 }
23851
23852 this.sourceUpdater_.createSourceBuffers(codecs);
23853 var codecString = [codecs.video, codecs.audio].filter(Boolean).join(',');
23854 this.excludeIncompatibleVariants_(codecString);
23855 }
23856 /**
23857 * Excludes playlists with codecs that are unsupported by the muxer and browser.
23858 */
23859 ;
23860
23861 _proto.excludeUnsupportedVariants_ = function excludeUnsupportedVariants_() {
23862 var _this10 = this;
23863
23864 var playlists = this.master().playlists;
23865 var ids = []; // TODO: why don't we have a property to loop through all
23866 // playlist? Why did we ever mix indexes and keys?
23867
23868 Object.keys(playlists).forEach(function (key) {
23869 var variant = playlists[key]; // check if we already processed this playlist.
23870
23871 if (ids.indexOf(variant.id) !== -1) {
23872 return;
23873 }
23874
23875 ids.push(variant.id);
23876 var codecs = codecsForPlaylist(_this10.master, variant);
23877 var unsupported = [];
23878
23879 if (codecs.audio && !codecs_js.muxerSupportsCodec(codecs.audio) && !codecs_js.browserSupportsCodec(codecs.audio)) {
23880 unsupported.push("audio codec " + codecs.audio);
23881 }
23882
23883 if (codecs.video && !codecs_js.muxerSupportsCodec(codecs.video) && !codecs_js.browserSupportsCodec(codecs.video)) {
23884 unsupported.push("video codec " + codecs.video);
23885 }
23886
23887 if (codecs.text && codecs.text === 'stpp.ttml.im1t') {
23888 unsupported.push("text codec " + codecs.text);
23889 }
23890
23891 if (unsupported.length) {
23892 variant.excludeUntil = Infinity;
23893
23894 _this10.logger_("excluding " + variant.id + " for unsupported: " + unsupported.join(', '));
23895 }
23896 });
23897 }
23898 /**
23899 * Blacklist playlists that are known to be codec or
23900 * stream-incompatible with the SourceBuffer configuration. For
23901 * instance, Media Source Extensions would cause the video element to
23902 * stall waiting for video data if you switched from a variant with
23903 * video and audio to an audio-only one.
23904 *
23905 * @param {Object} media a media playlist compatible with the current
23906 * set of SourceBuffers. Variants in the current master playlist that
23907 * do not appear to have compatible codec or stream configurations
23908 * will be excluded from the default playlist selection algorithm
23909 * indefinitely.
23910 * @private
23911 */
23912 ;
23913
23914 _proto.excludeIncompatibleVariants_ = function excludeIncompatibleVariants_(codecString) {
23915 var _this11 = this;
23916
23917 var ids = [];
23918 var playlists = this.master().playlists;
23919 var codecs = unwrapCodecList(codecs_js.parseCodecs(codecString));
23920 var codecCount_ = codecCount(codecs);
23921 var videoDetails = codecs.video && codecs_js.parseCodecs(codecs.video)[0] || null;
23922 var audioDetails = codecs.audio && codecs_js.parseCodecs(codecs.audio)[0] || null;
23923 Object.keys(playlists).forEach(function (key) {
23924 var variant = playlists[key]; // check if we already processed this playlist.
23925 // or it if it is already excluded forever.
23926
23927 if (ids.indexOf(variant.id) !== -1 || variant.excludeUntil === Infinity) {
23928 return;
23929 }
23930
23931 ids.push(variant.id);
23932 var blacklistReasons = []; // get codecs from the playlist for this variant
23933
23934 var variantCodecs = codecsForPlaylist(_this11.masterPlaylistLoader_.master, variant);
23935 var variantCodecCount = codecCount(variantCodecs); // if no codecs are listed, we cannot determine that this
23936 // variant is incompatible. Wait for mux.js to probe
23937
23938 if (!variantCodecs.audio && !variantCodecs.video) {
23939 return;
23940 } // TODO: we can support this by removing the
23941 // old media source and creating a new one, but it will take some work.
23942 // The number of streams cannot change
23943
23944
23945 if (variantCodecCount !== codecCount_) {
23946 blacklistReasons.push("codec count \"" + variantCodecCount + "\" !== \"" + codecCount_ + "\"");
23947 } // only exclude playlists by codec change, if codecs cannot switch
23948 // during playback.
23949
23950
23951 if (!_this11.sourceUpdater_.canChangeType()) {
23952 var variantVideoDetails = variantCodecs.video && codecs_js.parseCodecs(variantCodecs.video)[0] || null;
23953 var variantAudioDetails = variantCodecs.audio && codecs_js.parseCodecs(variantCodecs.audio)[0] || null; // the video codec cannot change
23954
23955 if (variantVideoDetails && videoDetails && variantVideoDetails.type.toLowerCase() !== videoDetails.type.toLowerCase()) {
23956 blacklistReasons.push("video codec \"" + variantVideoDetails.type + "\" !== \"" + videoDetails.type + "\"");
23957 } // the audio codec cannot change
23958
23959
23960 if (variantAudioDetails && audioDetails && variantAudioDetails.type.toLowerCase() !== audioDetails.type.toLowerCase()) {
23961 blacklistReasons.push("audio codec \"" + variantAudioDetails.type + "\" !== \"" + audioDetails.type + "\"");
23962 }
23963 }
23964
23965 if (blacklistReasons.length) {
23966 variant.excludeUntil = Infinity;
23967
23968 _this11.logger_("blacklisting " + variant.id + ": " + blacklistReasons.join(' && '));
23969 }
23970 });
23971 };
23972
23973 _proto.updateAdCues_ = function updateAdCues_(media) {
23974 var offset = 0;
23975 var seekable = this.seekable();
23976
23977 if (seekable.length) {
23978 offset = seekable.start(0);
23979 }
23980
23981 updateAdCues(media, this.cueTagsTrack_, offset);
23982 }
23983 /**
23984 * Calculates the desired forward buffer length based on current time
23985 *
23986 * @return {number} Desired forward buffer length in seconds
23987 */
23988 ;
23989
23990 _proto.goalBufferLength = function goalBufferLength() {
23991 var currentTime = this.tech_.currentTime();
23992 var initial = Config.GOAL_BUFFER_LENGTH;
23993 var rate = Config.GOAL_BUFFER_LENGTH_RATE;
23994 var max = Math.max(initial, Config.MAX_GOAL_BUFFER_LENGTH);
23995 return Math.min(initial + currentTime * rate, max);
23996 }
23997 /**
23998 * Calculates the desired buffer low water line based on current time
23999 *
24000 * @return {number} Desired buffer low water line in seconds
24001 */
24002 ;
24003
24004 _proto.bufferLowWaterLine = function bufferLowWaterLine() {
24005 var currentTime = this.tech_.currentTime();
24006 var initial = Config.BUFFER_LOW_WATER_LINE;
24007 var rate = Config.BUFFER_LOW_WATER_LINE_RATE;
24008 var max = Math.max(initial, Config.MAX_BUFFER_LOW_WATER_LINE);
24009 var newMax = Math.max(initial, Config.EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE);
24010 return Math.min(initial + currentTime * rate, this.experimentalBufferBasedABR ? newMax : max);
24011 };
24012
24013 _proto.bufferHighWaterLine = function bufferHighWaterLine() {
24014 return Config.BUFFER_HIGH_WATER_LINE;
24015 };
24016
24017 return MasterPlaylistController;
24018}(videojs__default["default"].EventTarget);
24019
24020/**
24021 * Returns a function that acts as the Enable/disable playlist function.
24022 *
24023 * @param {PlaylistLoader} loader - The master playlist loader
24024 * @param {string} playlistID - id of the playlist
24025 * @param {Function} changePlaylistFn - A function to be called after a
24026 * playlist's enabled-state has been changed. Will NOT be called if a
24027 * playlist's enabled-state is unchanged
24028 * @param {boolean=} enable - Value to set the playlist enabled-state to
24029 * or if undefined returns the current enabled-state for the playlist
24030 * @return {Function} Function for setting/getting enabled
24031 */
24032
24033var enableFunction = function enableFunction(loader, playlistID, changePlaylistFn) {
24034 return function (enable) {
24035 var playlist = loader.master.playlists[playlistID];
24036 var incompatible = isIncompatible(playlist);
24037 var currentlyEnabled = isEnabled(playlist);
24038
24039 if (typeof enable === 'undefined') {
24040 return currentlyEnabled;
24041 }
24042
24043 if (enable) {
24044 delete playlist.disabled;
24045 } else {
24046 playlist.disabled = true;
24047 }
24048
24049 if (enable !== currentlyEnabled && !incompatible) {
24050 // Ensure the outside world knows about our changes
24051 changePlaylistFn();
24052
24053 if (enable) {
24054 loader.trigger('renditionenabled');
24055 } else {
24056 loader.trigger('renditiondisabled');
24057 }
24058 }
24059
24060 return enable;
24061 };
24062};
24063/**
24064 * The representation object encapsulates the publicly visible information
24065 * in a media playlist along with a setter/getter-type function (enabled)
24066 * for changing the enabled-state of a particular playlist entry
24067 *
24068 * @class Representation
24069 */
24070
24071
24072var Representation = function Representation(vhsHandler, playlist, id) {
24073 var mpc = vhsHandler.masterPlaylistController_,
24074 smoothQualityChange = vhsHandler.options_.smoothQualityChange; // Get a reference to a bound version of the quality change function
24075
24076 var changeType = smoothQualityChange ? 'smooth' : 'fast';
24077 var qualityChangeFunction = mpc[changeType + "QualityChange_"].bind(mpc); // some playlist attributes are optional
24078
24079 if (playlist.attributes) {
24080 var resolution = playlist.attributes.RESOLUTION;
24081 this.width = resolution && resolution.width;
24082 this.height = resolution && resolution.height;
24083 this.bandwidth = playlist.attributes.BANDWIDTH;
24084 }
24085
24086 this.codecs = codecsForPlaylist(mpc.master(), playlist);
24087 this.playlist = playlist; // The id is simply the ordinality of the media playlist
24088 // within the master playlist
24089
24090 this.id = id; // Partially-apply the enableFunction to create a playlist-
24091 // specific variant
24092
24093 this.enabled = enableFunction(vhsHandler.playlists, playlist.id, qualityChangeFunction);
24094};
24095/**
24096 * A mixin function that adds the `representations` api to an instance
24097 * of the VhsHandler class
24098 *
24099 * @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
24100 * representation API into
24101 */
24102
24103
24104var renditionSelectionMixin = function renditionSelectionMixin(vhsHandler) {
24105 // Add a single API-specific function to the VhsHandler instance
24106 vhsHandler.representations = function () {
24107 var master = vhsHandler.masterPlaylistController_.master();
24108 var playlists = isAudioOnly(master) ? vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() : master.playlists;
24109
24110 if (!playlists) {
24111 return [];
24112 }
24113
24114 return playlists.filter(function (media) {
24115 return !isIncompatible(media);
24116 }).map(function (e, i) {
24117 return new Representation(vhsHandler, e, e.id);
24118 });
24119 };
24120};
24121
24122/**
24123 * @file playback-watcher.js
24124 *
24125 * Playback starts, and now my watch begins. It shall not end until my death. I shall
24126 * take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
24127 * and win no glory. I shall live and die at my post. I am the corrector of the underflow.
24128 * I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
24129 * my life and honor to the Playback Watch, for this Player and all the Players to come.
24130 */
24131
24132var timerCancelEvents = ['seeking', 'seeked', 'pause', 'playing', 'error'];
24133/**
24134 * @class PlaybackWatcher
24135 */
24136
24137var PlaybackWatcher = /*#__PURE__*/function () {
24138 /**
24139 * Represents an PlaybackWatcher object.
24140 *
24141 * @class
24142 * @param {Object} options an object that includes the tech and settings
24143 */
24144 function PlaybackWatcher(options) {
24145 var _this = this;
24146
24147 this.masterPlaylistController_ = options.masterPlaylistController;
24148 this.tech_ = options.tech;
24149 this.seekable = options.seekable;
24150 this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
24151 this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
24152 this.media = options.media;
24153 this.consecutiveUpdates = 0;
24154 this.lastRecordedTime = null;
24155 this.timer_ = null;
24156 this.checkCurrentTimeTimeout_ = null;
24157 this.logger_ = logger('PlaybackWatcher');
24158 this.logger_('initialize');
24159
24160 var playHandler = function playHandler() {
24161 return _this.monitorCurrentTime_();
24162 };
24163
24164 var canPlayHandler = function canPlayHandler() {
24165 return _this.monitorCurrentTime_();
24166 };
24167
24168 var waitingHandler = function waitingHandler() {
24169 return _this.techWaiting_();
24170 };
24171
24172 var cancelTimerHandler = function cancelTimerHandler() {
24173 return _this.cancelTimer_();
24174 };
24175
24176 var mpc = this.masterPlaylistController_;
24177 var loaderTypes = ['main', 'subtitle', 'audio'];
24178 var loaderChecks = {};
24179 loaderTypes.forEach(function (type) {
24180 loaderChecks[type] = {
24181 reset: function reset() {
24182 return _this.resetSegmentDownloads_(type);
24183 },
24184 updateend: function updateend() {
24185 return _this.checkSegmentDownloads_(type);
24186 }
24187 };
24188 mpc[type + "SegmentLoader_"].on('appendsdone', loaderChecks[type].updateend); // If a rendition switch happens during a playback stall where the buffer
24189 // isn't changing we want to reset. We cannot assume that the new rendition
24190 // will also be stalled, until after new appends.
24191
24192 mpc[type + "SegmentLoader_"].on('playlistupdate', loaderChecks[type].reset); // Playback stalls should not be detected right after seeking.
24193 // This prevents one segment playlists (single vtt or single segment content)
24194 // from being detected as stalling. As the buffer will not change in those cases, since
24195 // the buffer is the entire video duration.
24196
24197 _this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
24198 });
24199 /**
24200 * We check if a seek was into a gap through the following steps:
24201 * 1. We get a seeking event and we do not get a seeked event. This means that
24202 * a seek was attempted but not completed.
24203 * 2. We run `fixesBadSeeks_` on segment loader appends. This means that we already
24204 * removed everything from our buffer and appended a segment, and should be ready
24205 * to check for gaps.
24206 */
24207
24208 var setSeekingHandlers = function setSeekingHandlers(fn) {
24209 ['main', 'audio'].forEach(function (type) {
24210 mpc[type + "SegmentLoader_"][fn]('appended', _this.seekingAppendCheck_);
24211 });
24212 };
24213
24214 this.seekingAppendCheck_ = function () {
24215 if (_this.fixesBadSeeks_()) {
24216 _this.consecutiveUpdates = 0;
24217 _this.lastRecordedTime = _this.tech_.currentTime();
24218 setSeekingHandlers('off');
24219 }
24220 };
24221
24222 this.clearSeekingAppendCheck_ = function () {
24223 return setSeekingHandlers('off');
24224 };
24225
24226 this.watchForBadSeeking_ = function () {
24227 _this.clearSeekingAppendCheck_();
24228
24229 setSeekingHandlers('on');
24230 };
24231
24232 this.tech_.on('seeked', this.clearSeekingAppendCheck_);
24233 this.tech_.on('seeking', this.watchForBadSeeking_);
24234 this.tech_.on('waiting', waitingHandler);
24235 this.tech_.on(timerCancelEvents, cancelTimerHandler);
24236 this.tech_.on('canplay', canPlayHandler);
24237 /*
24238 An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
24239 is surfaced in one of two ways:
24240 1) The `waiting` event is fired before the player has buffered content, making it impossible
24241 to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
24242 we can check if playback is stalled due to a gap, and skip the gap if necessary.
24243 2) A source with a gap at the beginning of the stream is loaded programatically while the player
24244 is in a playing state. To catch this case, it's important that our one-time play listener is setup
24245 even if the player is in a playing state
24246 */
24247
24248 this.tech_.one('play', playHandler); // Define the dispose function to clean up our events
24249
24250 this.dispose = function () {
24251 _this.clearSeekingAppendCheck_();
24252
24253 _this.logger_('dispose');
24254
24255 _this.tech_.off('waiting', waitingHandler);
24256
24257 _this.tech_.off(timerCancelEvents, cancelTimerHandler);
24258
24259 _this.tech_.off('canplay', canPlayHandler);
24260
24261 _this.tech_.off('play', playHandler);
24262
24263 _this.tech_.off('seeking', _this.watchForBadSeeking_);
24264
24265 _this.tech_.off('seeked', _this.clearSeekingAppendCheck_);
24266
24267 loaderTypes.forEach(function (type) {
24268 mpc[type + "SegmentLoader_"].off('appendsdone', loaderChecks[type].updateend);
24269 mpc[type + "SegmentLoader_"].off('playlistupdate', loaderChecks[type].reset);
24270
24271 _this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
24272 });
24273
24274 if (_this.checkCurrentTimeTimeout_) {
24275 window__default["default"].clearTimeout(_this.checkCurrentTimeTimeout_);
24276 }
24277
24278 _this.cancelTimer_();
24279 };
24280 }
24281 /**
24282 * Periodically check current time to see if playback stopped
24283 *
24284 * @private
24285 */
24286
24287
24288 var _proto = PlaybackWatcher.prototype;
24289
24290 _proto.monitorCurrentTime_ = function monitorCurrentTime_() {
24291 this.checkCurrentTime_();
24292
24293 if (this.checkCurrentTimeTimeout_) {
24294 window__default["default"].clearTimeout(this.checkCurrentTimeTimeout_);
24295 } // 42 = 24 fps // 250 is what Webkit uses // FF uses 15
24296
24297
24298 this.checkCurrentTimeTimeout_ = window__default["default"].setTimeout(this.monitorCurrentTime_.bind(this), 250);
24299 }
24300 /**
24301 * Reset stalled download stats for a specific type of loader
24302 *
24303 * @param {string} type
24304 * The segment loader type to check.
24305 *
24306 * @listens SegmentLoader#playlistupdate
24307 * @listens Tech#seeking
24308 * @listens Tech#seeked
24309 */
24310 ;
24311
24312 _proto.resetSegmentDownloads_ = function resetSegmentDownloads_(type) {
24313 var loader = this.masterPlaylistController_[type + "SegmentLoader_"];
24314
24315 if (this[type + "StalledDownloads_"] > 0) {
24316 this.logger_("resetting possible stalled download count for " + type + " loader");
24317 }
24318
24319 this[type + "StalledDownloads_"] = 0;
24320 this[type + "Buffered_"] = loader.buffered_();
24321 }
24322 /**
24323 * Checks on every segment `appendsdone` to see
24324 * if segment appends are making progress. If they are not
24325 * and we are still downloading bytes. We blacklist the playlist.
24326 *
24327 * @param {string} type
24328 * The segment loader type to check.
24329 *
24330 * @listens SegmentLoader#appendsdone
24331 */
24332 ;
24333
24334 _proto.checkSegmentDownloads_ = function checkSegmentDownloads_(type) {
24335 var mpc = this.masterPlaylistController_;
24336 var loader = mpc[type + "SegmentLoader_"];
24337 var buffered = loader.buffered_();
24338 var isBufferedDifferent = isRangeDifferent(this[type + "Buffered_"], buffered);
24339 this[type + "Buffered_"] = buffered; // if another watcher is going to fix the issue or
24340 // the buffered value for this loader changed
24341 // appends are working
24342
24343 if (isBufferedDifferent) {
24344 this.resetSegmentDownloads_(type);
24345 return;
24346 }
24347
24348 this[type + "StalledDownloads_"]++;
24349 this.logger_("found #" + this[type + "StalledDownloads_"] + " " + type + " appends that did not increase buffer (possible stalled download)", {
24350 playlistId: loader.playlist_ && loader.playlist_.id,
24351 buffered: timeRangesToArray(buffered)
24352 }); // after 10 possibly stalled appends with no reset, exclude
24353
24354 if (this[type + "StalledDownloads_"] < 10) {
24355 return;
24356 }
24357
24358 this.logger_(type + " loader stalled download exclusion");
24359 this.resetSegmentDownloads_(type);
24360 this.tech_.trigger({
24361 type: 'usage',
24362 name: "vhs-" + type + "-download-exclusion"
24363 });
24364
24365 if (type === 'subtitle') {
24366 return;
24367 } // TODO: should we exclude audio tracks rather than main tracks
24368 // when type is audio?
24369
24370
24371 mpc.blacklistCurrentPlaylist({
24372 message: "Excessive " + type + " segment downloading detected."
24373 }, Infinity);
24374 }
24375 /**
24376 * The purpose of this function is to emulate the "waiting" event on
24377 * browsers that do not emit it when they are waiting for more
24378 * data to continue playback
24379 *
24380 * @private
24381 */
24382 ;
24383
24384 _proto.checkCurrentTime_ = function checkCurrentTime_() {
24385 if (this.tech_.paused() || this.tech_.seeking()) {
24386 return;
24387 }
24388
24389 var currentTime = this.tech_.currentTime();
24390 var buffered = this.tech_.buffered();
24391
24392 if (this.lastRecordedTime === currentTime && (!buffered.length || currentTime + SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
24393 // If current time is at the end of the final buffered region, then any playback
24394 // stall is most likely caused by buffering in a low bandwidth environment. The tech
24395 // should fire a `waiting` event in this scenario, but due to browser and tech
24396 // inconsistencies. Calling `techWaiting_` here allows us to simulate
24397 // responding to a native `waiting` event when the tech fails to emit one.
24398 return this.techWaiting_();
24399 }
24400
24401 if (this.consecutiveUpdates >= 5 && currentTime === this.lastRecordedTime) {
24402 this.consecutiveUpdates++;
24403 this.waiting_();
24404 } else if (currentTime === this.lastRecordedTime) {
24405 this.consecutiveUpdates++;
24406 } else {
24407 this.consecutiveUpdates = 0;
24408 this.lastRecordedTime = currentTime;
24409 }
24410 }
24411 /**
24412 * Cancels any pending timers and resets the 'timeupdate' mechanism
24413 * designed to detect that we are stalled
24414 *
24415 * @private
24416 */
24417 ;
24418
24419 _proto.cancelTimer_ = function cancelTimer_() {
24420 this.consecutiveUpdates = 0;
24421
24422 if (this.timer_) {
24423 this.logger_('cancelTimer_');
24424 clearTimeout(this.timer_);
24425 }
24426
24427 this.timer_ = null;
24428 }
24429 /**
24430 * Fixes situations where there's a bad seek
24431 *
24432 * @return {boolean} whether an action was taken to fix the seek
24433 * @private
24434 */
24435 ;
24436
24437 _proto.fixesBadSeeks_ = function fixesBadSeeks_() {
24438 var seeking = this.tech_.seeking();
24439
24440 if (!seeking) {
24441 return false;
24442 } // TODO: It's possible that these seekable checks should be moved out of this function
24443 // and into a function that runs on seekablechange. It's also possible that we only need
24444 // afterSeekableWindow as the buffered check at the bottom is good enough to handle before
24445 // seekable range.
24446
24447
24448 var seekable = this.seekable();
24449 var currentTime = this.tech_.currentTime();
24450 var isAfterSeekableRange = this.afterSeekableWindow_(seekable, currentTime, this.media(), this.allowSeeksWithinUnsafeLiveWindow);
24451 var seekTo;
24452
24453 if (isAfterSeekableRange) {
24454 var seekableEnd = seekable.end(seekable.length - 1); // sync to live point (if VOD, our seekable was updated and we're simply adjusting)
24455
24456 seekTo = seekableEnd;
24457 }
24458
24459 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24460 var seekableStart = seekable.start(0); // sync to the beginning of the live window
24461 // provide a buffer of .1 seconds to handle rounding/imprecise numbers
24462
24463 seekTo = seekableStart + ( // if the playlist is too short and the seekable range is an exact time (can
24464 // happen in live with a 3 segment playlist), then don't use a time delta
24465 seekableStart === seekable.end(0) ? 0 : SAFE_TIME_DELTA);
24466 }
24467
24468 if (typeof seekTo !== 'undefined') {
24469 this.logger_("Trying to seek outside of seekable at time " + currentTime + " with " + ("seekable range " + printableRange(seekable) + ". Seeking to ") + (seekTo + "."));
24470 this.tech_.setCurrentTime(seekTo);
24471 return true;
24472 }
24473
24474 var sourceUpdater = this.masterPlaylistController_.sourceUpdater_;
24475 var buffered = this.tech_.buffered();
24476 var audioBuffered = sourceUpdater.audioBuffer ? sourceUpdater.audioBuffered() : null;
24477 var videoBuffered = sourceUpdater.videoBuffer ? sourceUpdater.videoBuffered() : null; // verify that at least two segment durations have been
24478 // appended before checking for a gap.
24479
24480 var twoSegmentDurations = (this.media().targetDuration - TIME_FUDGE_FACTOR) * 2;
24481 var bufferedToCheck = [audioBuffered, videoBuffered];
24482
24483 for (var i = 0; i < bufferedToCheck.length; i++) {
24484 // skip null buffered
24485 if (!bufferedToCheck[i]) {
24486 continue;
24487 }
24488
24489 var timeAhead = timeAheadOf(bufferedToCheck[i], currentTime); // if we are less than two video/audio segment durations behind,
24490 // we haven't appended enough to call this a bad seek.
24491
24492 if (timeAhead < twoSegmentDurations) {
24493 return false;
24494 }
24495 }
24496
24497 var nextRange = findNextRange(buffered, currentTime); // we have appended enough content, but we don't have anything buffered
24498 // to seek over the gap
24499
24500 if (nextRange.length === 0) {
24501 return false;
24502 }
24503
24504 seekTo = nextRange.start(0) + SAFE_TIME_DELTA;
24505 this.logger_("Buffered region starts (" + nextRange.start(0) + ") " + (" just beyond seek point (" + currentTime + "). Seeking to " + seekTo + "."));
24506 this.tech_.setCurrentTime(seekTo);
24507 return true;
24508 }
24509 /**
24510 * Handler for situations when we determine the player is waiting.
24511 *
24512 * @private
24513 */
24514 ;
24515
24516 _proto.waiting_ = function waiting_() {
24517 if (this.techWaiting_()) {
24518 return;
24519 } // All tech waiting checks failed. Use last resort correction
24520
24521
24522 var currentTime = this.tech_.currentTime();
24523 var buffered = this.tech_.buffered();
24524 var currentRange = findRange(buffered, currentTime); // Sometimes the player can stall for unknown reasons within a contiguous buffered
24525 // region with no indication that anything is amiss (seen in Firefox). Seeking to
24526 // currentTime is usually enough to kickstart the player. This checks that the player
24527 // is currently within a buffered region before attempting a corrective seek.
24528 // Chrome does not appear to continue `timeupdate` events after a `waiting` event
24529 // until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
24530 // make sure there is ~3 seconds of forward buffer before taking any corrective action
24531 // to avoid triggering an `unknownwaiting` event when the network is slow.
24532
24533 if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
24534 this.cancelTimer_();
24535 this.tech_.setCurrentTime(currentTime);
24536 this.logger_("Stopped at " + currentTime + " while inside a buffered region " + ("[" + currentRange.start(0) + " -> " + currentRange.end(0) + "]. Attempting to resume ") + 'playback by seeking to the current time.'); // unknown waiting corrections may be useful for monitoring QoS
24537
24538 this.tech_.trigger({
24539 type: 'usage',
24540 name: 'vhs-unknown-waiting'
24541 });
24542 this.tech_.trigger({
24543 type: 'usage',
24544 name: 'hls-unknown-waiting'
24545 });
24546 return;
24547 }
24548 }
24549 /**
24550 * Handler for situations when the tech fires a `waiting` event
24551 *
24552 * @return {boolean}
24553 * True if an action (or none) was needed to correct the waiting. False if no
24554 * checks passed
24555 * @private
24556 */
24557 ;
24558
24559 _proto.techWaiting_ = function techWaiting_() {
24560 var seekable = this.seekable();
24561 var currentTime = this.tech_.currentTime();
24562
24563 if (this.tech_.seeking() || this.timer_ !== null) {
24564 // Tech is seeking or already waiting on another action, no action needed
24565 return true;
24566 }
24567
24568 if (this.beforeSeekableWindow_(seekable, currentTime)) {
24569 var livePoint = seekable.end(seekable.length - 1);
24570 this.logger_("Fell out of live window at time " + currentTime + ". Seeking to " + ("live point (seekable end) " + livePoint));
24571 this.cancelTimer_();
24572 this.tech_.setCurrentTime(livePoint); // live window resyncs may be useful for monitoring QoS
24573
24574 this.tech_.trigger({
24575 type: 'usage',
24576 name: 'vhs-live-resync'
24577 });
24578 this.tech_.trigger({
24579 type: 'usage',
24580 name: 'hls-live-resync'
24581 });
24582 return true;
24583 }
24584
24585 var sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
24586 var buffered = this.tech_.buffered();
24587 var videoUnderflow = this.videoUnderflow_({
24588 audioBuffered: sourceUpdater.audioBuffered(),
24589 videoBuffered: sourceUpdater.videoBuffered(),
24590 currentTime: currentTime
24591 });
24592
24593 if (videoUnderflow) {
24594 // Even though the video underflowed and was stuck in a gap, the audio overplayed
24595 // the gap, leading currentTime into a buffered range. Seeking to currentTime
24596 // allows the video to catch up to the audio position without losing any audio
24597 // (only suffering ~3 seconds of frozen video and a pause in audio playback).
24598 this.cancelTimer_();
24599 this.tech_.setCurrentTime(currentTime); // video underflow may be useful for monitoring QoS
24600
24601 this.tech_.trigger({
24602 type: 'usage',
24603 name: 'vhs-video-underflow'
24604 });
24605 this.tech_.trigger({
24606 type: 'usage',
24607 name: 'hls-video-underflow'
24608 });
24609 return true;
24610 }
24611
24612 var nextRange = findNextRange(buffered, currentTime); // check for gap
24613
24614 if (nextRange.length > 0) {
24615 var difference = nextRange.start(0) - currentTime;
24616 this.logger_("Stopped at " + currentTime + ", setting timer for " + difference + ", seeking " + ("to " + nextRange.start(0)));
24617 this.cancelTimer_();
24618 this.timer_ = setTimeout(this.skipTheGap_.bind(this), difference * 1000, currentTime);
24619 return true;
24620 } // All checks failed. Returning false to indicate failure to correct waiting
24621
24622
24623 return false;
24624 };
24625
24626 _proto.afterSeekableWindow_ = function afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow) {
24627 if (allowSeeksWithinUnsafeLiveWindow === void 0) {
24628 allowSeeksWithinUnsafeLiveWindow = false;
24629 }
24630
24631 if (!seekable.length) {
24632 // we can't make a solid case if there's no seekable, default to false
24633 return false;
24634 }
24635
24636 var allowedEnd = seekable.end(seekable.length - 1) + SAFE_TIME_DELTA;
24637 var isLive = !playlist.endList;
24638
24639 if (isLive && allowSeeksWithinUnsafeLiveWindow) {
24640 allowedEnd = seekable.end(seekable.length - 1) + playlist.targetDuration * 3;
24641 }
24642
24643 if (currentTime > allowedEnd) {
24644 return true;
24645 }
24646
24647 return false;
24648 };
24649
24650 _proto.beforeSeekableWindow_ = function beforeSeekableWindow_(seekable, currentTime) {
24651 if (seekable.length && // can't fall before 0 and 0 seekable start identifies VOD stream
24652 seekable.start(0) > 0 && currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
24653 return true;
24654 }
24655
24656 return false;
24657 };
24658
24659 _proto.videoUnderflow_ = function videoUnderflow_(_ref) {
24660 var videoBuffered = _ref.videoBuffered,
24661 audioBuffered = _ref.audioBuffered,
24662 currentTime = _ref.currentTime;
24663
24664 // audio only content will not have video underflow :)
24665 if (!videoBuffered) {
24666 return;
24667 }
24668
24669 var gap; // find a gap in demuxed content.
24670
24671 if (videoBuffered.length && audioBuffered.length) {
24672 // in Chrome audio will continue to play for ~3s when we run out of video
24673 // so we have to check that the video buffer did have some buffer in the
24674 // past.
24675 var lastVideoRange = findRange(videoBuffered, currentTime - 3);
24676 var videoRange = findRange(videoBuffered, currentTime);
24677 var audioRange = findRange(audioBuffered, currentTime);
24678
24679 if (audioRange.length && !videoRange.length && lastVideoRange.length) {
24680 gap = {
24681 start: lastVideoRange.end(0),
24682 end: audioRange.end(0)
24683 };
24684 } // find a gap in muxed content.
24685
24686 } else {
24687 var nextRange = findNextRange(videoBuffered, currentTime); // Even if there is no available next range, there is still a possibility we are
24688 // stuck in a gap due to video underflow.
24689
24690 if (!nextRange.length) {
24691 gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
24692 }
24693 }
24694
24695 if (gap) {
24696 this.logger_("Encountered a gap in video from " + gap.start + " to " + gap.end + ". " + ("Seeking to current time " + currentTime));
24697 return true;
24698 }
24699
24700 return false;
24701 }
24702 /**
24703 * Timer callback. If playback still has not proceeded, then we seek
24704 * to the start of the next buffered region.
24705 *
24706 * @private
24707 */
24708 ;
24709
24710 _proto.skipTheGap_ = function skipTheGap_(scheduledCurrentTime) {
24711 var buffered = this.tech_.buffered();
24712 var currentTime = this.tech_.currentTime();
24713 var nextRange = findNextRange(buffered, currentTime);
24714 this.cancelTimer_();
24715
24716 if (nextRange.length === 0 || currentTime !== scheduledCurrentTime) {
24717 return;
24718 }
24719
24720 this.logger_('skipTheGap_:', 'currentTime:', currentTime, 'scheduled currentTime:', scheduledCurrentTime, 'nextRange start:', nextRange.start(0)); // only seek if we still have not played
24721
24722 this.tech_.setCurrentTime(nextRange.start(0) + TIME_FUDGE_FACTOR);
24723 this.tech_.trigger({
24724 type: 'usage',
24725 name: 'vhs-gap-skip'
24726 });
24727 this.tech_.trigger({
24728 type: 'usage',
24729 name: 'hls-gap-skip'
24730 });
24731 };
24732
24733 _proto.gapFromVideoUnderflow_ = function gapFromVideoUnderflow_(buffered, currentTime) {
24734 // At least in Chrome, if there is a gap in the video buffer, the audio will continue
24735 // playing for ~3 seconds after the video gap starts. This is done to account for
24736 // video buffer underflow/underrun (note that this is not done when there is audio
24737 // buffer underflow/underrun -- in that case the video will stop as soon as it
24738 // encounters the gap, as audio stalls are more noticeable/jarring to a user than
24739 // video stalls). The player's time will reflect the playthrough of audio, so the
24740 // time will appear as if we are in a buffered region, even if we are stuck in a
24741 // "gap."
24742 //
24743 // Example:
24744 // video buffer: 0 => 10.1, 10.2 => 20
24745 // audio buffer: 0 => 20
24746 // overall buffer: 0 => 10.1, 10.2 => 20
24747 // current time: 13
24748 //
24749 // Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
24750 // however, the audio continued playing until it reached ~3 seconds past the gap
24751 // (13 seconds), at which point it stops as well. Since current time is past the
24752 // gap, findNextRange will return no ranges.
24753 //
24754 // To check for this issue, we see if there is a gap that starts somewhere within
24755 // a 3 second range (3 seconds +/- 1 second) back from our current time.
24756 var gaps = findGaps(buffered);
24757
24758 for (var i = 0; i < gaps.length; i++) {
24759 var start = gaps.start(i);
24760 var end = gaps.end(i); // gap is starts no more than 4 seconds back
24761
24762 if (currentTime - start < 4 && currentTime - start > 2) {
24763 return {
24764 start: start,
24765 end: end
24766 };
24767 }
24768 }
24769
24770 return null;
24771 };
24772
24773 return PlaybackWatcher;
24774}();
24775
24776var defaultOptions = {
24777 errorInterval: 30,
24778 getSource: function getSource(next) {
24779 var tech = this.tech({
24780 IWillNotUseThisInPlugins: true
24781 });
24782 var sourceObj = tech.currentSource_ || this.currentSource();
24783 return next(sourceObj);
24784 }
24785};
24786/**
24787 * Main entry point for the plugin
24788 *
24789 * @param {Player} player a reference to a videojs Player instance
24790 * @param {Object} [options] an object with plugin options
24791 * @private
24792 */
24793
24794var initPlugin = function initPlugin(player, options) {
24795 var lastCalled = 0;
24796 var seekTo = 0;
24797 var localOptions = videojs__default["default"].mergeOptions(defaultOptions, options);
24798 player.ready(function () {
24799 player.trigger({
24800 type: 'usage',
24801 name: 'vhs-error-reload-initialized'
24802 });
24803 player.trigger({
24804 type: 'usage',
24805 name: 'hls-error-reload-initialized'
24806 });
24807 });
24808 /**
24809 * Player modifications to perform that must wait until `loadedmetadata`
24810 * has been triggered
24811 *
24812 * @private
24813 */
24814
24815 var loadedMetadataHandler = function loadedMetadataHandler() {
24816 if (seekTo) {
24817 player.currentTime(seekTo);
24818 }
24819 };
24820 /**
24821 * Set the source on the player element, play, and seek if necessary
24822 *
24823 * @param {Object} sourceObj An object specifying the source url and mime-type to play
24824 * @private
24825 */
24826
24827
24828 var setSource = function setSource(sourceObj) {
24829 if (sourceObj === null || sourceObj === undefined) {
24830 return;
24831 }
24832
24833 seekTo = player.duration() !== Infinity && player.currentTime() || 0;
24834 player.one('loadedmetadata', loadedMetadataHandler);
24835 player.src(sourceObj);
24836 player.trigger({
24837 type: 'usage',
24838 name: 'vhs-error-reload'
24839 });
24840 player.trigger({
24841 type: 'usage',
24842 name: 'hls-error-reload'
24843 });
24844 player.play();
24845 };
24846 /**
24847 * Attempt to get a source from either the built-in getSource function
24848 * or a custom function provided via the options
24849 *
24850 * @private
24851 */
24852
24853
24854 var errorHandler = function errorHandler() {
24855 // Do not attempt to reload the source if a source-reload occurred before
24856 // 'errorInterval' time has elapsed since the last source-reload
24857 if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
24858 player.trigger({
24859 type: 'usage',
24860 name: 'vhs-error-reload-canceled'
24861 });
24862 player.trigger({
24863 type: 'usage',
24864 name: 'hls-error-reload-canceled'
24865 });
24866 return;
24867 }
24868
24869 if (!localOptions.getSource || typeof localOptions.getSource !== 'function') {
24870 videojs__default["default"].log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
24871 return;
24872 }
24873
24874 lastCalled = Date.now();
24875 return localOptions.getSource.call(player, setSource);
24876 };
24877 /**
24878 * Unbind any event handlers that were bound by the plugin
24879 *
24880 * @private
24881 */
24882
24883
24884 var cleanupEvents = function cleanupEvents() {
24885 player.off('loadedmetadata', loadedMetadataHandler);
24886 player.off('error', errorHandler);
24887 player.off('dispose', cleanupEvents);
24888 };
24889 /**
24890 * Cleanup before re-initializing the plugin
24891 *
24892 * @param {Object} [newOptions] an object with plugin options
24893 * @private
24894 */
24895
24896
24897 var reinitPlugin = function reinitPlugin(newOptions) {
24898 cleanupEvents();
24899 initPlugin(player, newOptions);
24900 };
24901
24902 player.on('error', errorHandler);
24903 player.on('dispose', cleanupEvents); // Overwrite the plugin function so that we can correctly cleanup before
24904 // initializing the plugin
24905
24906 player.reloadSourceOnError = reinitPlugin;
24907};
24908/**
24909 * Reload the source when an error is detected as long as there
24910 * wasn't an error previously within the last 30 seconds
24911 *
24912 * @param {Object} [options] an object with plugin options
24913 */
24914
24915
24916var reloadSourceOnError = function reloadSourceOnError(options) {
24917 initPlugin(this, options);
24918};
24919
24920var version$4 = "2.11.1";
24921
24922var version$3 = "5.14.1";
24923
24924var version$2 = "0.19.2";
24925
24926var version$1 = "4.7.0";
24927
24928var version = "3.1.2";
24929
24930var Vhs = {
24931 PlaylistLoader: PlaylistLoader,
24932 Playlist: Playlist,
24933 utils: utils,
24934 STANDARD_PLAYLIST_SELECTOR: lastBandwidthSelector,
24935 INITIAL_PLAYLIST_SELECTOR: lowestBitrateCompatibleVariantSelector,
24936 lastBandwidthSelector: lastBandwidthSelector,
24937 movingAverageBandwidthSelector: movingAverageBandwidthSelector,
24938 comparePlaylistBandwidth: comparePlaylistBandwidth,
24939 comparePlaylistResolution: comparePlaylistResolution,
24940 xhr: xhrFactory()
24941}; // Define getter/setters for config properties
24942
24943Object.keys(Config).forEach(function (prop) {
24944 Object.defineProperty(Vhs, prop, {
24945 get: function get() {
24946 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
24947 return Config[prop];
24948 },
24949 set: function set(value) {
24950 videojs__default["default"].log.warn("using Vhs." + prop + " is UNSAFE be sure you know what you are doing");
24951
24952 if (typeof value !== 'number' || value < 0) {
24953 videojs__default["default"].log.warn("value of Vhs." + prop + " must be greater than or equal to 0");
24954 return;
24955 }
24956
24957 Config[prop] = value;
24958 }
24959 });
24960});
24961var LOCAL_STORAGE_KEY = 'videojs-vhs';
24962/**
24963 * Updates the selectedIndex of the QualityLevelList when a mediachange happens in vhs.
24964 *
24965 * @param {QualityLevelList} qualityLevels The QualityLevelList to update.
24966 * @param {PlaylistLoader} playlistLoader PlaylistLoader containing the new media info.
24967 * @function handleVhsMediaChange
24968 */
24969
24970var handleVhsMediaChange = function handleVhsMediaChange(qualityLevels, playlistLoader) {
24971 var newPlaylist = playlistLoader.media();
24972 var selectedIndex = -1;
24973
24974 for (var i = 0; i < qualityLevels.length; i++) {
24975 if (qualityLevels[i].id === newPlaylist.id) {
24976 selectedIndex = i;
24977 break;
24978 }
24979 }
24980
24981 qualityLevels.selectedIndex_ = selectedIndex;
24982 qualityLevels.trigger({
24983 selectedIndex: selectedIndex,
24984 type: 'change'
24985 });
24986};
24987/**
24988 * Adds quality levels to list once playlist metadata is available
24989 *
24990 * @param {QualityLevelList} qualityLevels The QualityLevelList to attach events to.
24991 * @param {Object} vhs Vhs object to listen to for media events.
24992 * @function handleVhsLoadedMetadata
24993 */
24994
24995
24996var handleVhsLoadedMetadata = function handleVhsLoadedMetadata(qualityLevels, vhs) {
24997 vhs.representations().forEach(function (rep) {
24998 qualityLevels.addQualityLevel(rep);
24999 });
25000 handleVhsMediaChange(qualityLevels, vhs.playlists);
25001}; // HLS is a source handler, not a tech. Make sure attempts to use it
25002// as one do not cause exceptions.
25003
25004
25005Vhs.canPlaySource = function () {
25006 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25007};
25008
25009var emeKeySystems = function emeKeySystems(keySystemOptions, mainPlaylist, audioPlaylist) {
25010 if (!keySystemOptions) {
25011 return keySystemOptions;
25012 }
25013
25014 var codecs = {};
25015
25016 if (mainPlaylist && mainPlaylist.attributes && mainPlaylist.attributes.CODECS) {
25017 codecs = unwrapCodecList(codecs_js.parseCodecs(mainPlaylist.attributes.CODECS));
25018 }
25019
25020 if (audioPlaylist && audioPlaylist.attributes && audioPlaylist.attributes.CODECS) {
25021 codecs.audio = audioPlaylist.attributes.CODECS;
25022 }
25023
25024 var videoContentType = codecs_js.getMimeForCodec(codecs.video);
25025 var audioContentType = codecs_js.getMimeForCodec(codecs.audio); // upsert the content types based on the selected playlist
25026
25027 var keySystemContentTypes = {};
25028
25029 for (var keySystem in keySystemOptions) {
25030 keySystemContentTypes[keySystem] = {};
25031
25032 if (audioContentType) {
25033 keySystemContentTypes[keySystem].audioContentType = audioContentType;
25034 }
25035
25036 if (videoContentType) {
25037 keySystemContentTypes[keySystem].videoContentType = videoContentType;
25038 } // Default to using the video playlist's PSSH even though they may be different, as
25039 // videojs-contrib-eme will only accept one in the options.
25040 //
25041 // This shouldn't be an issue for most cases as early intialization will handle all
25042 // unique PSSH values, and if they aren't, then encrypted events should have the
25043 // specific information needed for the unique license.
25044
25045
25046 if (mainPlaylist.contentProtection && mainPlaylist.contentProtection[keySystem] && mainPlaylist.contentProtection[keySystem].pssh) {
25047 keySystemContentTypes[keySystem].pssh = mainPlaylist.contentProtection[keySystem].pssh;
25048 } // videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
25049 // so we need to prevent overwriting the URL entirely
25050
25051
25052 if (typeof keySystemOptions[keySystem] === 'string') {
25053 keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
25054 }
25055 }
25056
25057 return videojs__default["default"].mergeOptions(keySystemOptions, keySystemContentTypes);
25058};
25059/**
25060 * @typedef {Object} KeySystems
25061 *
25062 * keySystems configuration for https://github.com/videojs/videojs-contrib-eme
25063 * Note: not all options are listed here.
25064 *
25065 * @property {Uint8Array} [pssh]
25066 * Protection System Specific Header
25067 */
25068
25069/**
25070 * Goes through all the playlists and collects an array of KeySystems options objects
25071 * containing each playlist's keySystems and their pssh values, if available.
25072 *
25073 * @param {Object[]} playlists
25074 * The playlists to look through
25075 * @param {string[]} keySystems
25076 * The keySystems to collect pssh values for
25077 *
25078 * @return {KeySystems[]}
25079 * An array of KeySystems objects containing available key systems and their
25080 * pssh values
25081 */
25082
25083
25084var getAllPsshKeySystemsOptions = function getAllPsshKeySystemsOptions(playlists, keySystems) {
25085 return playlists.reduce(function (keySystemsArr, playlist) {
25086 if (!playlist.contentProtection) {
25087 return keySystemsArr;
25088 }
25089
25090 var keySystemsOptions = keySystems.reduce(function (keySystemsObj, keySystem) {
25091 var keySystemOptions = playlist.contentProtection[keySystem];
25092
25093 if (keySystemOptions && keySystemOptions.pssh) {
25094 keySystemsObj[keySystem] = {
25095 pssh: keySystemOptions.pssh
25096 };
25097 }
25098
25099 return keySystemsObj;
25100 }, {});
25101
25102 if (Object.keys(keySystemsOptions).length) {
25103 keySystemsArr.push(keySystemsOptions);
25104 }
25105
25106 return keySystemsArr;
25107 }, []);
25108};
25109/**
25110 * Returns a promise that waits for the
25111 * [eme plugin](https://github.com/videojs/videojs-contrib-eme) to create a key session.
25112 *
25113 * Works around https://bugs.chromium.org/p/chromium/issues/detail?id=895449 in non-IE11
25114 * browsers.
25115 *
25116 * As per the above ticket, this is particularly important for Chrome, where, if
25117 * unencrypted content is appended before encrypted content and the key session has not
25118 * been created, a MEDIA_ERR_DECODE will be thrown once the encrypted content is reached
25119 * during playback.
25120 *
25121 * @param {Object} player
25122 * The player instance
25123 * @param {Object[]} sourceKeySystems
25124 * The key systems options from the player source
25125 * @param {Object} [audioMedia]
25126 * The active audio media playlist (optional)
25127 * @param {Object[]} mainPlaylists
25128 * The playlists found on the master playlist object
25129 *
25130 * @return {Object}
25131 * Promise that resolves when the key session has been created
25132 */
25133
25134
25135var waitForKeySessionCreation = function waitForKeySessionCreation(_ref) {
25136 var player = _ref.player,
25137 sourceKeySystems = _ref.sourceKeySystems,
25138 audioMedia = _ref.audioMedia,
25139 mainPlaylists = _ref.mainPlaylists;
25140
25141 if (!player.eme.initializeMediaKeys) {
25142 return Promise.resolve();
25143 } // TODO should all audio PSSH values be initialized for DRM?
25144 //
25145 // All unique video rendition pssh values are initialized for DRM, but here only
25146 // the initial audio playlist license is initialized. In theory, an encrypted
25147 // event should be fired if the user switches to an alternative audio playlist
25148 // where a license is required, but this case hasn't yet been tested. In addition, there
25149 // may be many alternate audio playlists unlikely to be used (e.g., multiple different
25150 // languages).
25151
25152
25153 var playlists = audioMedia ? mainPlaylists.concat([audioMedia]) : mainPlaylists;
25154 var keySystemsOptionsArr = getAllPsshKeySystemsOptions(playlists, Object.keys(sourceKeySystems));
25155 var initializationFinishedPromises = [];
25156 var keySessionCreatedPromises = []; // Since PSSH values are interpreted as initData, EME will dedupe any duplicates. The
25157 // only place where it should not be deduped is for ms-prefixed APIs, but the early
25158 // return for IE11 above, and the existence of modern EME APIs in addition to
25159 // ms-prefixed APIs on Edge should prevent this from being a concern.
25160 // initializeMediaKeys also won't use the webkit-prefixed APIs.
25161
25162 keySystemsOptionsArr.forEach(function (keySystemsOptions) {
25163 keySessionCreatedPromises.push(new Promise(function (resolve, reject) {
25164 player.tech_.one('keysessioncreated', resolve);
25165 }));
25166 initializationFinishedPromises.push(new Promise(function (resolve, reject) {
25167 player.eme.initializeMediaKeys({
25168 keySystems: keySystemsOptions
25169 }, function (err) {
25170 if (err) {
25171 reject(err);
25172 return;
25173 }
25174
25175 resolve();
25176 });
25177 }));
25178 }); // The reasons Promise.race is chosen over Promise.any:
25179 //
25180 // * Promise.any is only available in Safari 14+.
25181 // * None of these promises are expected to reject. If they do reject, it might be
25182 // better here for the race to surface the rejection, rather than mask it by using
25183 // Promise.any.
25184
25185 return Promise.race([// If a session was previously created, these will all finish resolving without
25186 // creating a new session, otherwise it will take until the end of all license
25187 // requests, which is why the key session check is used (to make setup much faster).
25188 Promise.all(initializationFinishedPromises), // Once a single session is created, the browser knows DRM will be used.
25189 Promise.race(keySessionCreatedPromises)]);
25190};
25191/**
25192 * If the [eme](https://github.com/videojs/videojs-contrib-eme) plugin is available, and
25193 * there are keySystems on the source, sets up source options to prepare the source for
25194 * eme.
25195 *
25196 * @param {Object} player
25197 * The player instance
25198 * @param {Object[]} sourceKeySystems
25199 * The key systems options from the player source
25200 * @param {Object} media
25201 * The active media playlist
25202 * @param {Object} [audioMedia]
25203 * The active audio media playlist (optional)
25204 *
25205 * @return {boolean}
25206 * Whether or not options were configured and EME is available
25207 */
25208
25209var setupEmeOptions = function setupEmeOptions(_ref2) {
25210 var player = _ref2.player,
25211 sourceKeySystems = _ref2.sourceKeySystems,
25212 media = _ref2.media,
25213 audioMedia = _ref2.audioMedia;
25214 var sourceOptions = emeKeySystems(sourceKeySystems, media, audioMedia);
25215
25216 if (!sourceOptions) {
25217 return false;
25218 }
25219
25220 player.currentSource().keySystems = sourceOptions; // eme handles the rest of the setup, so if it is missing
25221 // do nothing.
25222
25223 if (sourceOptions && !player.eme) {
25224 videojs__default["default"].log.warn('DRM encrypted source cannot be decrypted without a DRM plugin');
25225 return false;
25226 }
25227
25228 return true;
25229};
25230
25231var getVhsLocalStorage = function getVhsLocalStorage() {
25232 if (!window__default["default"].localStorage) {
25233 return null;
25234 }
25235
25236 var storedObject = window__default["default"].localStorage.getItem(LOCAL_STORAGE_KEY);
25237
25238 if (!storedObject) {
25239 return null;
25240 }
25241
25242 try {
25243 return JSON.parse(storedObject);
25244 } catch (e) {
25245 // someone may have tampered with the value
25246 return null;
25247 }
25248};
25249
25250var updateVhsLocalStorage = function updateVhsLocalStorage(options) {
25251 if (!window__default["default"].localStorage) {
25252 return false;
25253 }
25254
25255 var objectToStore = getVhsLocalStorage();
25256 objectToStore = objectToStore ? videojs__default["default"].mergeOptions(objectToStore, options) : options;
25257
25258 try {
25259 window__default["default"].localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(objectToStore));
25260 } catch (e) {
25261 // Throws if storage is full (e.g., always on iOS 5+ Safari private mode, where
25262 // storage is set to 0).
25263 // https://developer.mozilla.org/en-US/docs/Web/API/Storage/setItem#Exceptions
25264 // No need to perform any operation.
25265 return false;
25266 }
25267
25268 return objectToStore;
25269};
25270/**
25271 * Parses VHS-supported media types from data URIs. See
25272 * https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
25273 * for information on data URIs.
25274 *
25275 * @param {string} dataUri
25276 * The data URI
25277 *
25278 * @return {string|Object}
25279 * The parsed object/string, or the original string if no supported media type
25280 * was found
25281 */
25282
25283
25284var expandDataUri = function expandDataUri(dataUri) {
25285 if (dataUri.toLowerCase().indexOf('data:application/vnd.videojs.vhs+json,') === 0) {
25286 return JSON.parse(dataUri.substring(dataUri.indexOf(',') + 1));
25287 } // no known case for this data URI, return the string as-is
25288
25289
25290 return dataUri;
25291};
25292/**
25293 * Whether the browser has built-in HLS support.
25294 */
25295
25296
25297Vhs.supportsNativeHls = function () {
25298 if (!document__default["default"] || !document__default["default"].createElement) {
25299 return false;
25300 }
25301
25302 var video = document__default["default"].createElement('video'); // native HLS is definitely not supported if HTML5 video isn't
25303
25304 if (!videojs__default["default"].getTech('Html5').isSupported()) {
25305 return false;
25306 } // HLS manifests can go by many mime-types
25307
25308
25309 var canPlay = [// Apple santioned
25310 'application/vnd.apple.mpegurl', // Apple sanctioned for backwards compatibility
25311 'audio/mpegurl', // Very common
25312 'audio/x-mpegurl', // Very common
25313 'application/x-mpegurl', // Included for completeness
25314 'video/x-mpegurl', 'video/mpegurl', 'application/mpegurl'];
25315 return canPlay.some(function (canItPlay) {
25316 return /maybe|probably/i.test(video.canPlayType(canItPlay));
25317 });
25318}();
25319
25320Vhs.supportsNativeDash = function () {
25321 if (!document__default["default"] || !document__default["default"].createElement || !videojs__default["default"].getTech('Html5').isSupported()) {
25322 return false;
25323 }
25324
25325 return /maybe|probably/i.test(document__default["default"].createElement('video').canPlayType('application/dash+xml'));
25326}();
25327
25328Vhs.supportsTypeNatively = function (type) {
25329 if (type === 'hls') {
25330 return Vhs.supportsNativeHls;
25331 }
25332
25333 if (type === 'dash') {
25334 return Vhs.supportsNativeDash;
25335 }
25336
25337 return false;
25338};
25339/**
25340 * HLS is a source handler, not a tech. Make sure attempts to use it
25341 * as one do not cause exceptions.
25342 */
25343
25344
25345Vhs.isSupported = function () {
25346 return videojs__default["default"].log.warn('HLS is no longer a tech. Please remove it from ' + 'your player\'s techOrder.');
25347};
25348
25349var Component = videojs__default["default"].getComponent('Component');
25350/**
25351 * The Vhs Handler object, where we orchestrate all of the parts
25352 * of HLS to interact with video.js
25353 *
25354 * @class VhsHandler
25355 * @extends videojs.Component
25356 * @param {Object} source the soruce object
25357 * @param {Tech} tech the parent tech object
25358 * @param {Object} options optional and required options
25359 */
25360
25361var VhsHandler = /*#__PURE__*/function (_Component) {
25362 _inheritsLoose__default["default"](VhsHandler, _Component);
25363
25364 function VhsHandler(source, tech, options) {
25365 var _this;
25366
25367 _this = _Component.call(this, tech, videojs__default["default"].mergeOptions(options.hls, options.vhs)) || this;
25368
25369 if (options.hls && Object.keys(options.hls).length) {
25370 videojs__default["default"].log.warn('Using hls options is deprecated. Use vhs instead.');
25371 } // if a tech level `initialBandwidth` option was passed
25372 // use that over the VHS level `bandwidth` option
25373
25374
25375 if (typeof options.initialBandwidth === 'number') {
25376 _this.options_.bandwidth = options.initialBandwidth;
25377 }
25378
25379 _this.logger_ = logger('VhsHandler'); // tech.player() is deprecated but setup a reference to HLS for
25380 // backwards-compatibility
25381
25382 if (tech.options_ && tech.options_.playerId) {
25383 var _player = videojs__default["default"](tech.options_.playerId);
25384
25385 if (!_player.hasOwnProperty('hls')) {
25386 Object.defineProperty(_player, 'hls', {
25387 get: function get() {
25388 videojs__default["default"].log.warn('player.hls is deprecated. Use player.tech().vhs instead.');
25389 tech.trigger({
25390 type: 'usage',
25391 name: 'hls-player-access'
25392 });
25393 return _assertThisInitialized__default["default"](_this);
25394 },
25395 configurable: true
25396 });
25397 }
25398
25399 if (!_player.hasOwnProperty('vhs')) {
25400 Object.defineProperty(_player, 'vhs', {
25401 get: function get() {
25402 videojs__default["default"].log.warn('player.vhs is deprecated. Use player.tech().vhs instead.');
25403 tech.trigger({
25404 type: 'usage',
25405 name: 'vhs-player-access'
25406 });
25407 return _assertThisInitialized__default["default"](_this);
25408 },
25409 configurable: true
25410 });
25411 }
25412
25413 if (!_player.hasOwnProperty('dash')) {
25414 Object.defineProperty(_player, 'dash', {
25415 get: function get() {
25416 videojs__default["default"].log.warn('player.dash is deprecated. Use player.tech().vhs instead.');
25417 return _assertThisInitialized__default["default"](_this);
25418 },
25419 configurable: true
25420 });
25421 }
25422
25423 _this.player_ = _player;
25424 }
25425
25426 _this.tech_ = tech;
25427 _this.source_ = source;
25428 _this.stats = {};
25429 _this.ignoreNextSeekingEvent_ = false;
25430
25431 _this.setOptions_();
25432
25433 if (_this.options_.overrideNative && tech.overrideNativeAudioTracks && tech.overrideNativeVideoTracks) {
25434 tech.overrideNativeAudioTracks(true);
25435 tech.overrideNativeVideoTracks(true);
25436 } else if (_this.options_.overrideNative && (tech.featuresNativeVideoTracks || tech.featuresNativeAudioTracks)) {
25437 // overriding native HLS only works if audio tracks have been emulated
25438 // error early if we're misconfigured
25439 throw new Error('Overriding native HLS requires emulated tracks. ' + 'See https://git.io/vMpjB');
25440 } // listen for fullscreenchange events for this player so that we
25441 // can adjust our quality selection quickly
25442
25443
25444 _this.on(document__default["default"], ['fullscreenchange', 'webkitfullscreenchange', 'mozfullscreenchange', 'MSFullscreenChange'], function (event) {
25445 var fullscreenElement = document__default["default"].fullscreenElement || document__default["default"].webkitFullscreenElement || document__default["default"].mozFullScreenElement || document__default["default"].msFullscreenElement;
25446
25447 if (fullscreenElement && fullscreenElement.contains(_this.tech_.el())) {
25448 _this.masterPlaylistController_.fastQualityChange_();
25449 } else {
25450 // When leaving fullscreen, since the in page pixel dimensions should be smaller
25451 // than full screen, see if there should be a rendition switch down to preserve
25452 // bandwidth.
25453 _this.masterPlaylistController_.checkABR_();
25454 }
25455 });
25456
25457 _this.on(_this.tech_, 'seeking', function () {
25458 if (this.ignoreNextSeekingEvent_) {
25459 this.ignoreNextSeekingEvent_ = false;
25460 return;
25461 }
25462
25463 this.setCurrentTime(this.tech_.currentTime());
25464 });
25465
25466 _this.on(_this.tech_, 'error', function () {
25467 // verify that the error was real and we are loaded
25468 // enough to have mpc loaded.
25469 if (this.tech_.error() && this.masterPlaylistController_) {
25470 this.masterPlaylistController_.pauseLoading();
25471 }
25472 });
25473
25474 _this.on(_this.tech_, 'play', _this.play);
25475
25476 return _this;
25477 }
25478
25479 var _proto = VhsHandler.prototype;
25480
25481 _proto.setOptions_ = function setOptions_() {
25482 var _this2 = this;
25483
25484 // defaults
25485 this.options_.withCredentials = this.options_.withCredentials || false;
25486 this.options_.handleManifestRedirects = this.options_.handleManifestRedirects === false ? false : true;
25487 this.options_.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions === false ? false : true;
25488 this.options_.useDevicePixelRatio = this.options_.useDevicePixelRatio || false;
25489 this.options_.smoothQualityChange = this.options_.smoothQualityChange || false;
25490 this.options_.useBandwidthFromLocalStorage = typeof this.source_.useBandwidthFromLocalStorage !== 'undefined' ? this.source_.useBandwidthFromLocalStorage : this.options_.useBandwidthFromLocalStorage || false;
25491 this.options_.customTagParsers = this.options_.customTagParsers || [];
25492 this.options_.customTagMappers = this.options_.customTagMappers || [];
25493 this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
25494
25495 if (typeof this.options_.blacklistDuration !== 'number') {
25496 this.options_.blacklistDuration = 5 * 60;
25497 }
25498
25499 if (typeof this.options_.bandwidth !== 'number') {
25500 if (this.options_.useBandwidthFromLocalStorage) {
25501 var storedObject = getVhsLocalStorage();
25502
25503 if (storedObject && storedObject.bandwidth) {
25504 this.options_.bandwidth = storedObject.bandwidth;
25505 this.tech_.trigger({
25506 type: 'usage',
25507 name: 'vhs-bandwidth-from-local-storage'
25508 });
25509 this.tech_.trigger({
25510 type: 'usage',
25511 name: 'hls-bandwidth-from-local-storage'
25512 });
25513 }
25514
25515 if (storedObject && storedObject.throughput) {
25516 this.options_.throughput = storedObject.throughput;
25517 this.tech_.trigger({
25518 type: 'usage',
25519 name: 'vhs-throughput-from-local-storage'
25520 });
25521 this.tech_.trigger({
25522 type: 'usage',
25523 name: 'hls-throughput-from-local-storage'
25524 });
25525 }
25526 }
25527 } // if bandwidth was not set by options or pulled from local storage, start playlist
25528 // selection at a reasonable bandwidth
25529
25530
25531 if (typeof this.options_.bandwidth !== 'number') {
25532 this.options_.bandwidth = Config.INITIAL_BANDWIDTH;
25533 } // If the bandwidth number is unchanged from the initial setting
25534 // then this takes precedence over the enableLowInitialPlaylist option
25535
25536
25537 this.options_.enableLowInitialPlaylist = this.options_.enableLowInitialPlaylist && this.options_.bandwidth === Config.INITIAL_BANDWIDTH; // grab options passed to player.src
25538
25539 ['withCredentials', 'useDevicePixelRatio', 'limitRenditionByPlayerDimensions', 'bandwidth', 'smoothQualityChange', 'customTagParsers', 'customTagMappers', 'handleManifestRedirects', 'cacheEncryptionKeys', 'playlistSelector', 'initialPlaylistSelector', 'experimentalBufferBasedABR', 'liveRangeSafeTimeDelta', 'experimentalLLHLS', 'experimentalExactManifestTimings', 'experimentalLeastPixelDiffSelector'].forEach(function (option) {
25540 if (typeof _this2.source_[option] !== 'undefined') {
25541 _this2.options_[option] = _this2.source_[option];
25542 }
25543 });
25544 this.limitRenditionByPlayerDimensions = this.options_.limitRenditionByPlayerDimensions;
25545 this.useDevicePixelRatio = this.options_.useDevicePixelRatio;
25546 }
25547 /**
25548 * called when player.src gets called, handle a new source
25549 *
25550 * @param {Object} src the source object to handle
25551 */
25552 ;
25553
25554 _proto.src = function src(_src, type) {
25555 var _this3 = this;
25556
25557 // do nothing if the src is falsey
25558 if (!_src) {
25559 return;
25560 }
25561
25562 this.setOptions_(); // add master playlist controller options
25563
25564 this.options_.src = expandDataUri(this.source_.src);
25565 this.options_.tech = this.tech_;
25566 this.options_.externVhs = Vhs;
25567 this.options_.sourceType = mediaTypes_js.simpleTypeFromSourceType(type); // Whenever we seek internally, we should update the tech
25568
25569 this.options_.seekTo = function (time) {
25570 _this3.tech_.setCurrentTime(time);
25571 };
25572
25573 if (this.options_.smoothQualityChange) {
25574 videojs__default["default"].log.warn('smoothQualityChange is deprecated and will be removed in the next major version');
25575 }
25576
25577 this.masterPlaylistController_ = new MasterPlaylistController(this.options_);
25578 var playbackWatcherOptions = videojs__default["default"].mergeOptions({
25579 liveRangeSafeTimeDelta: SAFE_TIME_DELTA
25580 }, this.options_, {
25581 seekable: function seekable() {
25582 return _this3.seekable();
25583 },
25584 media: function media() {
25585 return _this3.masterPlaylistController_.media();
25586 },
25587 masterPlaylistController: this.masterPlaylistController_
25588 });
25589 this.playbackWatcher_ = new PlaybackWatcher(playbackWatcherOptions);
25590 this.masterPlaylistController_.on('error', function () {
25591 var player = videojs__default["default"].players[_this3.tech_.options_.playerId];
25592 var error = _this3.masterPlaylistController_.error;
25593
25594 if (typeof error === 'object' && !error.code) {
25595 error.code = 3;
25596 } else if (typeof error === 'string') {
25597 error = {
25598 message: error,
25599 code: 3
25600 };
25601 }
25602
25603 player.error(error);
25604 });
25605 var defaultSelector = this.options_.experimentalBufferBasedABR ? Vhs.movingAverageBandwidthSelector(0.55) : Vhs.STANDARD_PLAYLIST_SELECTOR; // `this` in selectPlaylist should be the VhsHandler for backwards
25606 // compatibility with < v2
25607
25608 this.masterPlaylistController_.selectPlaylist = this.selectPlaylist ? this.selectPlaylist.bind(this) : defaultSelector.bind(this);
25609 this.masterPlaylistController_.selectInitialPlaylist = Vhs.INITIAL_PLAYLIST_SELECTOR.bind(this); // re-expose some internal objects for backwards compatibility with < v2
25610
25611 this.playlists = this.masterPlaylistController_.masterPlaylistLoader_;
25612 this.mediaSource = this.masterPlaylistController_.mediaSource; // Proxy assignment of some properties to the master playlist
25613 // controller. Using a custom property for backwards compatibility
25614 // with < v2
25615
25616 Object.defineProperties(this, {
25617 selectPlaylist: {
25618 get: function get() {
25619 return this.masterPlaylistController_.selectPlaylist;
25620 },
25621 set: function set(selectPlaylist) {
25622 this.masterPlaylistController_.selectPlaylist = selectPlaylist.bind(this);
25623 }
25624 },
25625 throughput: {
25626 get: function get() {
25627 return this.masterPlaylistController_.mainSegmentLoader_.throughput.rate;
25628 },
25629 set: function set(throughput) {
25630 this.masterPlaylistController_.mainSegmentLoader_.throughput.rate = throughput; // By setting `count` to 1 the throughput value becomes the starting value
25631 // for the cumulative average
25632
25633 this.masterPlaylistController_.mainSegmentLoader_.throughput.count = 1;
25634 }
25635 },
25636 bandwidth: {
25637 get: function get() {
25638 return this.masterPlaylistController_.mainSegmentLoader_.bandwidth;
25639 },
25640 set: function set(bandwidth) {
25641 this.masterPlaylistController_.mainSegmentLoader_.bandwidth = bandwidth; // setting the bandwidth manually resets the throughput counter
25642 // `count` is set to zero that current value of `rate` isn't included
25643 // in the cumulative average
25644
25645 this.masterPlaylistController_.mainSegmentLoader_.throughput = {
25646 rate: 0,
25647 count: 0
25648 };
25649 }
25650 },
25651
25652 /**
25653 * `systemBandwidth` is a combination of two serial processes bit-rates. The first
25654 * is the network bitrate provided by `bandwidth` and the second is the bitrate of
25655 * the entire process after that - decryption, transmuxing, and appending - provided
25656 * by `throughput`.
25657 *
25658 * Since the two process are serial, the overall system bandwidth is given by:
25659 * sysBandwidth = 1 / (1 / bandwidth + 1 / throughput)
25660 */
25661 systemBandwidth: {
25662 get: function get() {
25663 var invBandwidth = 1 / (this.bandwidth || 1);
25664 var invThroughput;
25665
25666 if (this.throughput > 0) {
25667 invThroughput = 1 / this.throughput;
25668 } else {
25669 invThroughput = 0;
25670 }
25671
25672 var systemBitrate = Math.floor(1 / (invBandwidth + invThroughput));
25673 return systemBitrate;
25674 },
25675 set: function set() {
25676 videojs__default["default"].log.error('The "systemBandwidth" property is read-only');
25677 }
25678 }
25679 });
25680
25681 if (this.options_.bandwidth) {
25682 this.bandwidth = this.options_.bandwidth;
25683 }
25684
25685 if (this.options_.throughput) {
25686 this.throughput = this.options_.throughput;
25687 }
25688
25689 Object.defineProperties(this.stats, {
25690 bandwidth: {
25691 get: function get() {
25692 return _this3.bandwidth || 0;
25693 },
25694 enumerable: true
25695 },
25696 mediaRequests: {
25697 get: function get() {
25698 return _this3.masterPlaylistController_.mediaRequests_() || 0;
25699 },
25700 enumerable: true
25701 },
25702 mediaRequestsAborted: {
25703 get: function get() {
25704 return _this3.masterPlaylistController_.mediaRequestsAborted_() || 0;
25705 },
25706 enumerable: true
25707 },
25708 mediaRequestsTimedout: {
25709 get: function get() {
25710 return _this3.masterPlaylistController_.mediaRequestsTimedout_() || 0;
25711 },
25712 enumerable: true
25713 },
25714 mediaRequestsErrored: {
25715 get: function get() {
25716 return _this3.masterPlaylistController_.mediaRequestsErrored_() || 0;
25717 },
25718 enumerable: true
25719 },
25720 mediaTransferDuration: {
25721 get: function get() {
25722 return _this3.masterPlaylistController_.mediaTransferDuration_() || 0;
25723 },
25724 enumerable: true
25725 },
25726 mediaBytesTransferred: {
25727 get: function get() {
25728 return _this3.masterPlaylistController_.mediaBytesTransferred_() || 0;
25729 },
25730 enumerable: true
25731 },
25732 mediaSecondsLoaded: {
25733 get: function get() {
25734 return _this3.masterPlaylistController_.mediaSecondsLoaded_() || 0;
25735 },
25736 enumerable: true
25737 },
25738 mediaAppends: {
25739 get: function get() {
25740 return _this3.masterPlaylistController_.mediaAppends_() || 0;
25741 },
25742 enumerable: true
25743 },
25744 mainAppendsToLoadedData: {
25745 get: function get() {
25746 return _this3.masterPlaylistController_.mainAppendsToLoadedData_() || 0;
25747 },
25748 enumerable: true
25749 },
25750 audioAppendsToLoadedData: {
25751 get: function get() {
25752 return _this3.masterPlaylistController_.audioAppendsToLoadedData_() || 0;
25753 },
25754 enumerable: true
25755 },
25756 appendsToLoadedData: {
25757 get: function get() {
25758 return _this3.masterPlaylistController_.appendsToLoadedData_() || 0;
25759 },
25760 enumerable: true
25761 },
25762 timeToLoadedData: {
25763 get: function get() {
25764 return _this3.masterPlaylistController_.timeToLoadedData_() || 0;
25765 },
25766 enumerable: true
25767 },
25768 buffered: {
25769 get: function get() {
25770 return timeRangesToArray(_this3.tech_.buffered());
25771 },
25772 enumerable: true
25773 },
25774 currentTime: {
25775 get: function get() {
25776 return _this3.tech_.currentTime();
25777 },
25778 enumerable: true
25779 },
25780 currentSource: {
25781 get: function get() {
25782 return _this3.tech_.currentSource_;
25783 },
25784 enumerable: true
25785 },
25786 currentTech: {
25787 get: function get() {
25788 return _this3.tech_.name_;
25789 },
25790 enumerable: true
25791 },
25792 duration: {
25793 get: function get() {
25794 return _this3.tech_.duration();
25795 },
25796 enumerable: true
25797 },
25798 master: {
25799 get: function get() {
25800 return _this3.playlists.master;
25801 },
25802 enumerable: true
25803 },
25804 playerDimensions: {
25805 get: function get() {
25806 return _this3.tech_.currentDimensions();
25807 },
25808 enumerable: true
25809 },
25810 seekable: {
25811 get: function get() {
25812 return timeRangesToArray(_this3.tech_.seekable());
25813 },
25814 enumerable: true
25815 },
25816 timestamp: {
25817 get: function get() {
25818 return Date.now();
25819 },
25820 enumerable: true
25821 },
25822 videoPlaybackQuality: {
25823 get: function get() {
25824 return _this3.tech_.getVideoPlaybackQuality();
25825 },
25826 enumerable: true
25827 }
25828 });
25829 this.tech_.one('canplay', this.masterPlaylistController_.setupFirstPlay.bind(this.masterPlaylistController_));
25830 this.tech_.on('bandwidthupdate', function () {
25831 if (_this3.options_.useBandwidthFromLocalStorage) {
25832 updateVhsLocalStorage({
25833 bandwidth: _this3.bandwidth,
25834 throughput: Math.round(_this3.throughput)
25835 });
25836 }
25837 });
25838 this.masterPlaylistController_.on('selectedinitialmedia', function () {
25839 // Add the manual rendition mix-in to VhsHandler
25840 renditionSelectionMixin(_this3);
25841 });
25842 this.masterPlaylistController_.sourceUpdater_.on('createdsourcebuffers', function () {
25843 _this3.setupEme_();
25844 }); // the bandwidth of the primary segment loader is our best
25845 // estimate of overall bandwidth
25846
25847 this.on(this.masterPlaylistController_, 'progress', function () {
25848 this.tech_.trigger('progress');
25849 }); // In the live case, we need to ignore the very first `seeking` event since
25850 // that will be the result of the seek-to-live behavior
25851
25852 this.on(this.masterPlaylistController_, 'firstplay', function () {
25853 this.ignoreNextSeekingEvent_ = true;
25854 });
25855 this.setupQualityLevels_(); // do nothing if the tech has been disposed already
25856 // this can occur if someone sets the src in player.ready(), for instance
25857
25858 if (!this.tech_.el()) {
25859 return;
25860 }
25861
25862 this.mediaSourceUrl_ = window__default["default"].URL.createObjectURL(this.masterPlaylistController_.mediaSource);
25863 this.tech_.src(this.mediaSourceUrl_);
25864 }
25865 /**
25866 * If necessary and EME is available, sets up EME options and waits for key session
25867 * creation.
25868 *
25869 * This function also updates the source updater so taht it can be used, as for some
25870 * browsers, EME must be configured before content is appended (if appending unencrypted
25871 * content before encrypted content).
25872 */
25873 ;
25874
25875 _proto.setupEme_ = function setupEme_() {
25876 var _this4 = this;
25877
25878 var audioPlaylistLoader = this.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader;
25879 var didSetupEmeOptions = setupEmeOptions({
25880 player: this.player_,
25881 sourceKeySystems: this.source_.keySystems,
25882 media: this.playlists.media(),
25883 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media()
25884 });
25885 this.player_.tech_.on('keystatuschange', function (e) {
25886 if (e.status === 'output-restricted') {
25887 _this4.masterPlaylistController_.blacklistCurrentPlaylist({
25888 playlist: _this4.masterPlaylistController_.media(),
25889 message: "DRM keystatus changed to " + e.status + ". Playlist will fail to play. Check for HDCP content.",
25890 blacklistDuration: Infinity
25891 });
25892 }
25893 }); // In IE11 this is too early to initialize media keys, and IE11 does not support
25894 // promises.
25895
25896 if (videojs__default["default"].browser.IE_VERSION === 11 || !didSetupEmeOptions) {
25897 // If EME options were not set up, we've done all we could to initialize EME.
25898 this.masterPlaylistController_.sourceUpdater_.initializedEme();
25899 return;
25900 }
25901
25902 this.logger_('waiting for EME key session creation');
25903 waitForKeySessionCreation({
25904 player: this.player_,
25905 sourceKeySystems: this.source_.keySystems,
25906 audioMedia: audioPlaylistLoader && audioPlaylistLoader.media(),
25907 mainPlaylists: this.playlists.master.playlists
25908 }).then(function () {
25909 _this4.logger_('created EME key session');
25910
25911 _this4.masterPlaylistController_.sourceUpdater_.initializedEme();
25912 }).catch(function (err) {
25913 _this4.logger_('error while creating EME key session', err);
25914
25915 _this4.player_.error({
25916 message: 'Failed to initialize media keys for EME',
25917 code: 3
25918 });
25919 });
25920 }
25921 /**
25922 * Initializes the quality levels and sets listeners to update them.
25923 *
25924 * @method setupQualityLevels_
25925 * @private
25926 */
25927 ;
25928
25929 _proto.setupQualityLevels_ = function setupQualityLevels_() {
25930 var _this5 = this;
25931
25932 var player = videojs__default["default"].players[this.tech_.options_.playerId]; // if there isn't a player or there isn't a qualityLevels plugin
25933 // or qualityLevels_ listeners have already been setup, do nothing.
25934
25935 if (!player || !player.qualityLevels || this.qualityLevels_) {
25936 return;
25937 }
25938
25939 this.qualityLevels_ = player.qualityLevels();
25940 this.masterPlaylistController_.on('selectedinitialmedia', function () {
25941 handleVhsLoadedMetadata(_this5.qualityLevels_, _this5);
25942 });
25943 this.playlists.on('mediachange', function () {
25944 handleVhsMediaChange(_this5.qualityLevels_, _this5.playlists);
25945 });
25946 }
25947 /**
25948 * return the version
25949 */
25950 ;
25951
25952 VhsHandler.version = function version$5() {
25953 return {
25954 '@videojs/http-streaming': version$4,
25955 'mux.js': version$3,
25956 'mpd-parser': version$2,
25957 'm3u8-parser': version$1,
25958 'aes-decrypter': version
25959 };
25960 }
25961 /**
25962 * return the version
25963 */
25964 ;
25965
25966 _proto.version = function version() {
25967 return this.constructor.version();
25968 };
25969
25970 _proto.canChangeType = function canChangeType() {
25971 return SourceUpdater.canChangeType();
25972 }
25973 /**
25974 * Begin playing the video.
25975 */
25976 ;
25977
25978 _proto.play = function play() {
25979 this.masterPlaylistController_.play();
25980 }
25981 /**
25982 * a wrapper around the function in MasterPlaylistController
25983 */
25984 ;
25985
25986 _proto.setCurrentTime = function setCurrentTime(currentTime) {
25987 this.masterPlaylistController_.setCurrentTime(currentTime);
25988 }
25989 /**
25990 * a wrapper around the function in MasterPlaylistController
25991 */
25992 ;
25993
25994 _proto.duration = function duration() {
25995 return this.masterPlaylistController_.duration();
25996 }
25997 /**
25998 * a wrapper around the function in MasterPlaylistController
25999 */
26000 ;
26001
26002 _proto.seekable = function seekable() {
26003 return this.masterPlaylistController_.seekable();
26004 }
26005 /**
26006 * Abort all outstanding work and cleanup.
26007 */
26008 ;
26009
26010 _proto.dispose = function dispose() {
26011 if (this.playbackWatcher_) {
26012 this.playbackWatcher_.dispose();
26013 }
26014
26015 if (this.masterPlaylistController_) {
26016 this.masterPlaylistController_.dispose();
26017 }
26018
26019 if (this.qualityLevels_) {
26020 this.qualityLevels_.dispose();
26021 }
26022
26023 if (this.player_) {
26024 delete this.player_.vhs;
26025 delete this.player_.dash;
26026 delete this.player_.hls;
26027 }
26028
26029 if (this.tech_ && this.tech_.vhs) {
26030 delete this.tech_.vhs;
26031 } // don't check this.tech_.hls as it will log a deprecated warning
26032
26033
26034 if (this.tech_) {
26035 delete this.tech_.hls;
26036 }
26037
26038 if (this.mediaSourceUrl_ && window__default["default"].URL.revokeObjectURL) {
26039 window__default["default"].URL.revokeObjectURL(this.mediaSourceUrl_);
26040 this.mediaSourceUrl_ = null;
26041 }
26042
26043 _Component.prototype.dispose.call(this);
26044 };
26045
26046 _proto.convertToProgramTime = function convertToProgramTime(time, callback) {
26047 return getProgramTime({
26048 playlist: this.masterPlaylistController_.media(),
26049 time: time,
26050 callback: callback
26051 });
26052 } // the player must be playing before calling this
26053 ;
26054
26055 _proto.seekToProgramTime = function seekToProgramTime$1(programTime, callback, pauseAfterSeek, retryCount) {
26056 if (pauseAfterSeek === void 0) {
26057 pauseAfterSeek = true;
26058 }
26059
26060 if (retryCount === void 0) {
26061 retryCount = 2;
26062 }
26063
26064 return seekToProgramTime({
26065 programTime: programTime,
26066 playlist: this.masterPlaylistController_.media(),
26067 retryCount: retryCount,
26068 pauseAfterSeek: pauseAfterSeek,
26069 seekTo: this.options_.seekTo,
26070 tech: this.options_.tech,
26071 callback: callback
26072 });
26073 };
26074
26075 return VhsHandler;
26076}(Component);
26077/**
26078 * The Source Handler object, which informs video.js what additional
26079 * MIME types are supported and sets up playback. It is registered
26080 * automatically to the appropriate tech based on the capabilities of
26081 * the browser it is running in. It is not necessary to use or modify
26082 * this object in normal usage.
26083 */
26084
26085
26086var VhsSourceHandler = {
26087 name: 'videojs-http-streaming',
26088 VERSION: version$4,
26089 canHandleSource: function canHandleSource(srcObj, options) {
26090 if (options === void 0) {
26091 options = {};
26092 }
26093
26094 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
26095 return VhsSourceHandler.canPlayType(srcObj.type, localOptions);
26096 },
26097 handleSource: function handleSource(source, tech, options) {
26098 if (options === void 0) {
26099 options = {};
26100 }
26101
26102 var localOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options);
26103 tech.vhs = new VhsHandler(source, tech, localOptions);
26104
26105 if (!videojs__default["default"].hasOwnProperty('hls')) {
26106 Object.defineProperty(tech, 'hls', {
26107 get: function get() {
26108 videojs__default["default"].log.warn('player.tech().hls is deprecated. Use player.tech().vhs instead.');
26109 return tech.vhs;
26110 },
26111 configurable: true
26112 });
26113 }
26114
26115 tech.vhs.xhr = xhrFactory();
26116 tech.vhs.src(source.src, source.type);
26117 return tech.vhs;
26118 },
26119 canPlayType: function canPlayType(type, options) {
26120 if (options === void 0) {
26121 options = {};
26122 }
26123
26124 var _videojs$mergeOptions = videojs__default["default"].mergeOptions(videojs__default["default"].options, options),
26125 _videojs$mergeOptions2 = _videojs$mergeOptions.vhs.overrideNative,
26126 overrideNative = _videojs$mergeOptions2 === void 0 ? !videojs__default["default"].browser.IS_ANY_SAFARI : _videojs$mergeOptions2;
26127
26128 var supportedType = mediaTypes_js.simpleTypeFromSourceType(type);
26129 var canUseMsePlayback = supportedType && (!Vhs.supportsTypeNatively(supportedType) || overrideNative);
26130 return canUseMsePlayback ? 'maybe' : '';
26131 }
26132};
26133/**
26134 * Check to see if the native MediaSource object exists and supports
26135 * an MP4 container with both H.264 video and AAC-LC audio.
26136 *
26137 * @return {boolean} if native media sources are supported
26138 */
26139
26140var supportsNativeMediaSources = function supportsNativeMediaSources() {
26141 return codecs_js.browserSupportsCodec('avc1.4d400d,mp4a.40.2');
26142}; // register source handlers with the appropriate techs
26143
26144
26145if (supportsNativeMediaSources()) {
26146 videojs__default["default"].getTech('Html5').registerSourceHandler(VhsSourceHandler, 0);
26147}
26148
26149videojs__default["default"].VhsHandler = VhsHandler;
26150Object.defineProperty(videojs__default["default"], 'HlsHandler', {
26151 get: function get() {
26152 videojs__default["default"].log.warn('videojs.HlsHandler is deprecated. Use videojs.VhsHandler instead.');
26153 return VhsHandler;
26154 },
26155 configurable: true
26156});
26157videojs__default["default"].VhsSourceHandler = VhsSourceHandler;
26158Object.defineProperty(videojs__default["default"], 'HlsSourceHandler', {
26159 get: function get() {
26160 videojs__default["default"].log.warn('videojs.HlsSourceHandler is deprecated. ' + 'Use videojs.VhsSourceHandler instead.');
26161 return VhsSourceHandler;
26162 },
26163 configurable: true
26164});
26165videojs__default["default"].Vhs = Vhs;
26166Object.defineProperty(videojs__default["default"], 'Hls', {
26167 get: function get() {
26168 videojs__default["default"].log.warn('videojs.Hls is deprecated. Use videojs.Vhs instead.');
26169 return Vhs;
26170 },
26171 configurable: true
26172});
26173
26174if (!videojs__default["default"].use) {
26175 videojs__default["default"].registerComponent('Hls', Vhs);
26176 videojs__default["default"].registerComponent('Vhs', Vhs);
26177}
26178
26179videojs__default["default"].options.vhs = videojs__default["default"].options.vhs || {};
26180videojs__default["default"].options.hls = videojs__default["default"].options.hls || {};
26181
26182if (!videojs__default["default"].getPlugin || !videojs__default["default"].getPlugin('reloadSourceOnError')) {
26183 var registerPlugin = videojs__default["default"].registerPlugin || videojs__default["default"].plugin;
26184 registerPlugin('reloadSourceOnError', reloadSourceOnError);
26185}
26186
26187Object.defineProperty(exports, 'simpleTypeFromSourceType', {
26188 enumerable: true,
26189 get: function () { return mediaTypes_js.simpleTypeFromSourceType; }
26190});
26191exports.LOCAL_STORAGE_KEY = LOCAL_STORAGE_KEY;
26192exports.Vhs = Vhs;
26193exports.VhsHandler = VhsHandler;
26194exports.VhsSourceHandler = VhsSourceHandler;
26195exports.emeKeySystems = emeKeySystems;
26196exports.expandDataUri = expandDataUri;
26197exports.getAllPsshKeySystemsOptions = getAllPsshKeySystemsOptions;
26198exports.setupEmeOptions = setupEmeOptions;
26199exports.waitForKeySessionCreation = waitForKeySessionCreation;