UNPKG

33.5 kBJavaScriptView Raw
1import videojs from 'video.js';
2import { createTransferableMessage } from './bin-utils';
3import { stringToArrayBuffer } from './util/string-to-array-buffer';
4import { transmux } from './segment-transmuxer';
5import { segmentXhrHeaders } from './xhr';
6import {workerCallback} from './util/worker-callback.js';
7import {
8 detectContainerForBytes,
9 isLikelyFmp4MediaSegment
10} from '@videojs/vhs-utils/es/containers';
11
12export const REQUEST_ERRORS = {
13 FAILURE: 2,
14 TIMEOUT: -101,
15 ABORTED: -102
16};
17
18/**
19 * Abort all requests
20 *
21 * @param {Object} activeXhrs - an object that tracks all XHR requests
22 */
23const abortAll = (activeXhrs) => {
24 activeXhrs.forEach((xhr) => {
25 xhr.abort();
26 });
27};
28
29/**
30 * Gather important bandwidth stats once a request has completed
31 *
32 * @param {Object} request - the XHR request from which to gather stats
33 */
34const getRequestStats = (request) => {
35 return {
36 bandwidth: request.bandwidth,
37 bytesReceived: request.bytesReceived || 0,
38 roundTripTime: request.roundTripTime || 0
39 };
40};
41
42/**
43 * If possible gather bandwidth stats as a request is in
44 * progress
45 *
46 * @param {Event} progressEvent - an event object from an XHR's progress event
47 */
48const getProgressStats = (progressEvent) => {
49 const request = progressEvent.target;
50 const roundTripTime = Date.now() - request.requestTime;
51 const stats = {
52 bandwidth: Infinity,
53 bytesReceived: 0,
54 roundTripTime: roundTripTime || 0
55 };
56
57 stats.bytesReceived = progressEvent.loaded;
58 // This can result in Infinity if stats.roundTripTime is 0 but that is ok
59 // because we should only use bandwidth stats on progress to determine when
60 // abort a request early due to insufficient bandwidth
61 stats.bandwidth = Math.floor((stats.bytesReceived / stats.roundTripTime) * 8 * 1000);
62
63 return stats;
64};
65
66/**
67 * Handle all error conditions in one place and return an object
68 * with all the information
69 *
70 * @param {Error|null} error - if non-null signals an error occured with the XHR
71 * @param {Object} request - the XHR request that possibly generated the error
72 */
73const handleErrors = (error, request) => {
74 if (request.timedout) {
75 return {
76 status: request.status,
77 message: 'HLS request timed-out at URL: ' + request.uri,
78 code: REQUEST_ERRORS.TIMEOUT,
79 xhr: request
80 };
81 }
82
83 if (request.aborted) {
84 return {
85 status: request.status,
86 message: 'HLS request aborted at URL: ' + request.uri,
87 code: REQUEST_ERRORS.ABORTED,
88 xhr: request
89 };
90 }
91
92 if (error) {
93 return {
94 status: request.status,
95 message: 'HLS request errored at URL: ' + request.uri,
96 code: REQUEST_ERRORS.FAILURE,
97 xhr: request
98 };
99 }
100
101 if (request.responseType === 'arraybuffer' && request.response.byteLength === 0) {
102 return {
103 status: request.status,
104 message: 'Empty HLS response at URL: ' + request.uri,
105 code: REQUEST_ERRORS.FAILURE,
106 xhr: request
107 };
108 }
109
110 return null;
111};
112
113/**
114 * Handle responses for key data and convert the key data to the correct format
115 * for the decryption step later
116 *
117 * @param {Object} segment - a simplified copy of the segmentInfo object
118 * from SegmentLoader
119 * @param {Array} objects - objects to add the key bytes to.
120 * @param {Function} finishProcessingFn - a callback to execute to continue processing
121 * this request
122 */
123const handleKeyResponse = (segment, objects, finishProcessingFn) => (error, request) => {
124 const response = request.response;
125 const errorObj = handleErrors(error, request);
126
127 if (errorObj) {
128 return finishProcessingFn(errorObj, segment);
129 }
130
131 if (response.byteLength !== 16) {
132 return finishProcessingFn({
133 status: request.status,
134 message: 'Invalid HLS key at URL: ' + request.uri,
135 code: REQUEST_ERRORS.FAILURE,
136 xhr: request
137 }, segment);
138 }
139
140 const view = new DataView(response);
141 const bytes = new Uint32Array([
142 view.getUint32(0),
143 view.getUint32(4),
144 view.getUint32(8),
145 view.getUint32(12)
146 ]);
147
148 for (let i = 0; i < objects.length; i++) {
149 objects[i].bytes = bytes;
150 }
151
152 return finishProcessingFn(null, segment);
153};
154
155const parseInitSegment = (segment, callback) => {
156 const type = detectContainerForBytes(segment.map.bytes);
157
158 // TODO: We should also handle ts init segments here, but we
159 // only know how to parse mp4 init segments at the moment
160 if (type !== 'mp4') {
161 const uri = segment.map.resolvedUri || segment.map.uri;
162
163 return callback({
164 internal: true,
165 message: `Found unsupported ${type || 'unknown'} container for initialization segment at URL: ${uri}`,
166 code: REQUEST_ERRORS.FAILURE
167 });
168 }
169
170 workerCallback({
171 action: 'probeMp4Tracks',
172 data: segment.map.bytes,
173 transmuxer: segment.transmuxer,
174 callback: ({tracks, data}) => {
175 // transfer bytes back to us
176 segment.map.bytes = data;
177
178 tracks.forEach(function(track) {
179 segment.map.tracks = segment.map.tracks || {};
180
181 // only support one track of each type for now
182 if (segment.map.tracks[track.type]) {
183 return;
184 }
185
186 segment.map.tracks[track.type] = track;
187
188 if (typeof track.id === 'number' && track.timescale) {
189 segment.map.timescales = segment.map.timescales || {};
190 segment.map.timescales[track.id] = track.timescale;
191 }
192
193 });
194
195 return callback(null);
196 }
197 });
198};
199
200/**
201 * Handle init-segment responses
202 *
203 * @param {Object} segment - a simplified copy of the segmentInfo object
204 * from SegmentLoader
205 * @param {Function} finishProcessingFn - a callback to execute to continue processing
206 * this request
207 */
208const handleInitSegmentResponse =
209({segment, finishProcessingFn}) => (error, request) => {
210 const errorObj = handleErrors(error, request);
211
212 if (errorObj) {
213 return finishProcessingFn(errorObj, segment);
214 }
215 const bytes = new Uint8Array(request.response);
216
217 // init segment is encypted, we will have to wait
218 // until the key request is done to decrypt.
219 if (segment.map.key) {
220 segment.map.encryptedBytes = bytes;
221 return finishProcessingFn(null, segment);
222 }
223
224 segment.map.bytes = bytes;
225
226 parseInitSegment(segment, function(parseError) {
227 if (parseError) {
228 parseError.xhr = request;
229 parseError.status = request.status;
230
231 return finishProcessingFn(parseError, segment);
232 }
233
234 finishProcessingFn(null, segment);
235 });
236};
237
238/**
239 * Response handler for segment-requests being sure to set the correct
240 * property depending on whether the segment is encryped or not
241 * Also records and keeps track of stats that are used for ABR purposes
242 *
243 * @param {Object} segment - a simplified copy of the segmentInfo object
244 * from SegmentLoader
245 * @param {Function} finishProcessingFn - a callback to execute to continue processing
246 * this request
247 */
248const handleSegmentResponse = ({
249 segment,
250 finishProcessingFn,
251 responseType
252}) => (error, request) => {
253 const errorObj = handleErrors(error, request);
254
255 if (errorObj) {
256 return finishProcessingFn(errorObj, segment);
257 }
258
259 const newBytes =
260 // although responseText "should" exist, this guard serves to prevent an error being
261 // thrown for two primary cases:
262 // 1. the mime type override stops working, or is not implemented for a specific
263 // browser
264 // 2. when using mock XHR libraries like sinon that do not allow the override behavior
265 (responseType === 'arraybuffer' || !request.responseText) ?
266 request.response :
267 stringToArrayBuffer(request.responseText.substring(segment.lastReachedChar || 0));
268
269 segment.stats = getRequestStats(request);
270
271 if (segment.key) {
272 segment.encryptedBytes = new Uint8Array(newBytes);
273 } else {
274 segment.bytes = new Uint8Array(newBytes);
275 }
276
277 return finishProcessingFn(null, segment);
278};
279
280const transmuxAndNotify = ({
281 segment,
282 bytes,
283 trackInfoFn,
284 timingInfoFn,
285 videoSegmentTimingInfoFn,
286 audioSegmentTimingInfoFn,
287 id3Fn,
288 captionsFn,
289 isEndOfTimeline,
290 endedTimelineFn,
291 dataFn,
292 doneFn,
293 onTransmuxerLog
294}) => {
295 const fmp4Tracks = segment.map && segment.map.tracks || {};
296 const isMuxed = Boolean(fmp4Tracks.audio && fmp4Tracks.video);
297
298 // Keep references to each function so we can null them out after we're done with them.
299 // One reason for this is that in the case of full segments, we want to trust start
300 // times from the probe, rather than the transmuxer.
301 let audioStartFn = timingInfoFn.bind(null, segment, 'audio', 'start');
302 const audioEndFn = timingInfoFn.bind(null, segment, 'audio', 'end');
303 let videoStartFn = timingInfoFn.bind(null, segment, 'video', 'start');
304 const videoEndFn = timingInfoFn.bind(null, segment, 'video', 'end');
305
306 const finish = () => transmux({
307 bytes,
308 transmuxer: segment.transmuxer,
309 audioAppendStart: segment.audioAppendStart,
310 gopsToAlignWith: segment.gopsToAlignWith,
311 remux: isMuxed,
312 onData: (result) => {
313 result.type = result.type === 'combined' ? 'video' : result.type;
314 dataFn(segment, result);
315 },
316 onTrackInfo: (trackInfo) => {
317 if (trackInfoFn) {
318 if (isMuxed) {
319 trackInfo.isMuxed = true;
320 }
321 trackInfoFn(segment, trackInfo);
322 }
323 },
324 onAudioTimingInfo: (audioTimingInfo) => {
325 // we only want the first start value we encounter
326 if (audioStartFn && typeof audioTimingInfo.start !== 'undefined') {
327 audioStartFn(audioTimingInfo.start);
328 audioStartFn = null;
329 }
330 // we want to continually update the end time
331 if (audioEndFn && typeof audioTimingInfo.end !== 'undefined') {
332 audioEndFn(audioTimingInfo.end);
333 }
334 },
335 onVideoTimingInfo: (videoTimingInfo) => {
336 // we only want the first start value we encounter
337 if (videoStartFn && typeof videoTimingInfo.start !== 'undefined') {
338 videoStartFn(videoTimingInfo.start);
339 videoStartFn = null;
340 }
341 // we want to continually update the end time
342 if (videoEndFn && typeof videoTimingInfo.end !== 'undefined') {
343 videoEndFn(videoTimingInfo.end);
344 }
345 },
346 onVideoSegmentTimingInfo: (videoSegmentTimingInfo) => {
347 videoSegmentTimingInfoFn(videoSegmentTimingInfo);
348 },
349 onAudioSegmentTimingInfo: (audioSegmentTimingInfo) => {
350 audioSegmentTimingInfoFn(audioSegmentTimingInfo);
351 },
352 onId3: (id3Frames, dispatchType) => {
353 id3Fn(segment, id3Frames, dispatchType);
354 },
355 onCaptions: (captions) => {
356 captionsFn(segment, [captions]);
357 },
358 isEndOfTimeline,
359 onEndedTimeline: () => {
360 endedTimelineFn();
361 },
362 onTransmuxerLog,
363 onDone: (result) => {
364 if (!doneFn) {
365 return;
366 }
367 result.type = result.type === 'combined' ? 'video' : result.type;
368 doneFn(null, segment, result);
369 }
370 });
371
372 // In the transmuxer, we don't yet have the ability to extract a "proper" start time.
373 // Meaning cached frame data may corrupt our notion of where this segment
374 // really starts. To get around this, probe for the info needed.
375 workerCallback({
376 action: 'probeTs',
377 transmuxer: segment.transmuxer,
378 data: bytes,
379 baseStartTime: segment.baseStartTime,
380 callback: (data) => {
381 segment.bytes = bytes = data.data;
382
383 const probeResult = data.result;
384
385 if (probeResult) {
386 trackInfoFn(segment, {
387 hasAudio: probeResult.hasAudio,
388 hasVideo: probeResult.hasVideo,
389 isMuxed
390 });
391 trackInfoFn = null;
392
393 if (probeResult.hasAudio && !isMuxed) {
394 audioStartFn(probeResult.audioStart);
395 }
396 if (probeResult.hasVideo) {
397 videoStartFn(probeResult.videoStart);
398 }
399 audioStartFn = null;
400 videoStartFn = null;
401 }
402
403 finish();
404 }
405 });
406};
407
408const handleSegmentBytes = ({
409 segment,
410 bytes,
411 trackInfoFn,
412 timingInfoFn,
413 videoSegmentTimingInfoFn,
414 audioSegmentTimingInfoFn,
415 id3Fn,
416 captionsFn,
417 isEndOfTimeline,
418 endedTimelineFn,
419 dataFn,
420 doneFn,
421 onTransmuxerLog
422}) => {
423 let bytesAsUint8Array = new Uint8Array(bytes);
424
425 // TODO:
426 // We should have a handler that fetches the number of bytes required
427 // to check if something is fmp4. This will allow us to save bandwidth
428 // because we can only blacklist a playlist and abort requests
429 // by codec after trackinfo triggers.
430 if (isLikelyFmp4MediaSegment(bytesAsUint8Array)) {
431 segment.isFmp4 = true;
432 const {tracks} = segment.map;
433
434 const trackInfo = {
435 isFmp4: true,
436 hasVideo: !!tracks.video,
437 hasAudio: !!tracks.audio
438 };
439
440 // if we have a audio track, with a codec that is not set to
441 // encrypted audio
442 if (tracks.audio && tracks.audio.codec && tracks.audio.codec !== 'enca') {
443 trackInfo.audioCodec = tracks.audio.codec;
444 }
445
446 // if we have a video track, with a codec that is not set to
447 // encrypted video
448 if (tracks.video && tracks.video.codec && tracks.video.codec !== 'encv') {
449 trackInfo.videoCodec = tracks.video.codec;
450 }
451
452 if (tracks.video && tracks.audio) {
453 trackInfo.isMuxed = true;
454 }
455
456 // since we don't support appending fmp4 data on progress, we know we have the full
457 // segment here
458 trackInfoFn(segment, trackInfo);
459 // The probe doesn't provide the segment end time, so only callback with the start
460 // time. The end time can be roughly calculated by the receiver using the duration.
461 //
462 // Note that the start time returned by the probe reflects the baseMediaDecodeTime, as
463 // that is the true start of the segment (where the playback engine should begin
464 // decoding).
465 const finishLoading = (captions) => {
466 // if the track still has audio at this point it is only possible
467 // for it to be audio only. See `tracks.video && tracks.audio` if statement
468 // above.
469 // we make sure to use segment.bytes here as that
470 dataFn(segment, {
471 data: bytesAsUint8Array,
472 type: trackInfo.hasAudio && !trackInfo.isMuxed ? 'audio' : 'video'
473 });
474 if (captions && captions.length) {
475 captionsFn(segment, captions);
476 }
477 doneFn(null, segment, {});
478 };
479
480 workerCallback({
481 action: 'probeMp4StartTime',
482 timescales: segment.map.timescales,
483 data: bytesAsUint8Array,
484 transmuxer: segment.transmuxer,
485 callback: ({data, startTime}) => {
486 // transfer bytes back to us
487 bytes = data.buffer;
488 segment.bytes = bytesAsUint8Array = data;
489
490 if (trackInfo.hasAudio && !trackInfo.isMuxed) {
491 timingInfoFn(segment, 'audio', 'start', startTime);
492 }
493
494 if (trackInfo.hasVideo) {
495 timingInfoFn(segment, 'video', 'start', startTime);
496 }
497
498 // Run through the CaptionParser in case there are captions.
499 // Initialize CaptionParser if it hasn't been yet
500 if (!tracks.video || !data.byteLength || !segment.transmuxer) {
501 finishLoading();
502 return;
503 }
504
505 workerCallback({
506 action: 'pushMp4Captions',
507 endAction: 'mp4Captions',
508 transmuxer: segment.transmuxer,
509 data: bytesAsUint8Array,
510 timescales: segment.map.timescales,
511 trackIds: [tracks.video.id],
512 callback: (message) => {
513 // transfer bytes back to us
514 bytes = message.data.buffer;
515 segment.bytes = bytesAsUint8Array = message.data;
516 message.logs.forEach(function(log) {
517 onTransmuxerLog(videojs.mergeOptions(log, {stream: 'mp4CaptionParser'}));
518 });
519 finishLoading(message.captions);
520 }
521 });
522 }
523 });
524 return;
525 }
526
527 // VTT or other segments that don't need processing
528 if (!segment.transmuxer) {
529 doneFn(null, segment, {});
530 return;
531 }
532
533 if (typeof segment.container === 'undefined') {
534 segment.container = detectContainerForBytes(bytesAsUint8Array);
535 }
536
537 if (segment.container !== 'ts' && segment.container !== 'aac') {
538 trackInfoFn(segment, {hasAudio: false, hasVideo: false});
539 doneFn(null, segment, {});
540 return;
541 }
542
543 // ts or aac
544 transmuxAndNotify({
545 segment,
546 bytes,
547 trackInfoFn,
548 timingInfoFn,
549 videoSegmentTimingInfoFn,
550 audioSegmentTimingInfoFn,
551 id3Fn,
552 captionsFn,
553 isEndOfTimeline,
554 endedTimelineFn,
555 dataFn,
556 doneFn,
557 onTransmuxerLog
558 });
559};
560
561const decrypt = function({id, key, encryptedBytes, decryptionWorker}, callback) {
562 const decryptionHandler = (event) => {
563 if (event.data.source === id) {
564 decryptionWorker.removeEventListener('message', decryptionHandler);
565 const decrypted = event.data.decrypted;
566
567 callback(new Uint8Array(
568 decrypted.bytes,
569 decrypted.byteOffset,
570 decrypted.byteLength
571 ));
572 }
573 };
574
575 decryptionWorker.addEventListener('message', decryptionHandler);
576
577 let keyBytes;
578
579 if (key.bytes.slice) {
580 keyBytes = key.bytes.slice();
581 } else {
582 keyBytes = new Uint32Array(Array.prototype.slice.call(key.bytes));
583 }
584
585 // incrementally decrypt the bytes
586 decryptionWorker.postMessage(createTransferableMessage({
587 source: id,
588 encrypted: encryptedBytes,
589 key: keyBytes,
590 iv: key.iv
591 }), [
592 encryptedBytes.buffer,
593 keyBytes.buffer
594 ]);
595};
596
597/**
598 * Decrypt the segment via the decryption web worker
599 *
600 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
601 * routines
602 * @param {Object} segment - a simplified copy of the segmentInfo object
603 * from SegmentLoader
604 * @param {Function} trackInfoFn - a callback that receives track info
605 * @param {Function} timingInfoFn - a callback that receives timing info
606 * @param {Function} videoSegmentTimingInfoFn
607 * a callback that receives video timing info based on media times and
608 * any adjustments made by the transmuxer
609 * @param {Function} audioSegmentTimingInfoFn
610 * a callback that receives audio timing info based on media times and
611 * any adjustments made by the transmuxer
612 * @param {boolean} isEndOfTimeline
613 * true if this segment represents the last segment in a timeline
614 * @param {Function} endedTimelineFn
615 * a callback made when a timeline is ended, will only be called if
616 * isEndOfTimeline is true
617 * @param {Function} dataFn - a callback that is executed when segment bytes are available
618 * and ready to use
619 * @param {Function} doneFn - a callback that is executed after decryption has completed
620 */
621const decryptSegment = ({
622 decryptionWorker,
623 segment,
624 trackInfoFn,
625 timingInfoFn,
626 videoSegmentTimingInfoFn,
627 audioSegmentTimingInfoFn,
628 id3Fn,
629 captionsFn,
630 isEndOfTimeline,
631 endedTimelineFn,
632 dataFn,
633 doneFn,
634 onTransmuxerLog
635}) => {
636 decrypt({
637 id: segment.requestId,
638 key: segment.key,
639 encryptedBytes: segment.encryptedBytes,
640 decryptionWorker
641 }, (decryptedBytes) => {
642 segment.bytes = decryptedBytes;
643
644 handleSegmentBytes({
645 segment,
646 bytes: segment.bytes,
647 trackInfoFn,
648 timingInfoFn,
649 videoSegmentTimingInfoFn,
650 audioSegmentTimingInfoFn,
651 id3Fn,
652 captionsFn,
653 isEndOfTimeline,
654 endedTimelineFn,
655 dataFn,
656 doneFn,
657 onTransmuxerLog
658 });
659 });
660};
661
662/**
663 * This function waits for all XHRs to finish (with either success or failure)
664 * before continueing processing via it's callback. The function gathers errors
665 * from each request into a single errors array so that the error status for
666 * each request can be examined later.
667 *
668 * @param {Object} activeXhrs - an object that tracks all XHR requests
669 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128 decryption
670 * routines
671 * @param {Function} trackInfoFn - a callback that receives track info
672 * @param {Function} timingInfoFn - a callback that receives timing info
673 * @param {Function} videoSegmentTimingInfoFn
674 * a callback that receives video timing info based on media times and
675 * any adjustments made by the transmuxer
676 * @param {Function} audioSegmentTimingInfoFn
677 * a callback that receives audio timing info based on media times and
678 * any adjustments made by the transmuxer
679 * @param {Function} id3Fn - a callback that receives ID3 metadata
680 * @param {Function} captionsFn - a callback that receives captions
681 * @param {boolean} isEndOfTimeline
682 * true if this segment represents the last segment in a timeline
683 * @param {Function} endedTimelineFn
684 * a callback made when a timeline is ended, will only be called if
685 * isEndOfTimeline is true
686 * @param {Function} dataFn - a callback that is executed when segment bytes are available
687 * and ready to use
688 * @param {Function} doneFn - a callback that is executed after all resources have been
689 * downloaded and any decryption completed
690 */
691const waitForCompletion = ({
692 activeXhrs,
693 decryptionWorker,
694 trackInfoFn,
695 timingInfoFn,
696 videoSegmentTimingInfoFn,
697 audioSegmentTimingInfoFn,
698 id3Fn,
699 captionsFn,
700 isEndOfTimeline,
701 endedTimelineFn,
702 dataFn,
703 doneFn,
704 onTransmuxerLog
705}) => {
706 let count = 0;
707 let didError = false;
708
709 return (error, segment) => {
710 if (didError) {
711 return;
712 }
713
714 if (error) {
715 didError = true;
716 // If there are errors, we have to abort any outstanding requests
717 abortAll(activeXhrs);
718
719 // Even though the requests above are aborted, and in theory we could wait until we
720 // handle the aborted events from those requests, there are some cases where we may
721 // never get an aborted event. For instance, if the network connection is lost and
722 // there were two requests, the first may have triggered an error immediately, while
723 // the second request remains unsent. In that case, the aborted algorithm will not
724 // trigger an abort: see https://xhr.spec.whatwg.org/#the-abort()-method
725 //
726 // We also can't rely on the ready state of the XHR, since the request that
727 // triggered the connection error may also show as a ready state of 0 (unsent).
728 // Therefore, we have to finish this group of requests immediately after the first
729 // seen error.
730 return doneFn(error, segment);
731 }
732
733 count += 1;
734
735 if (count === activeXhrs.length) {
736 const segmentFinish = function() {
737 if (segment.encryptedBytes) {
738 return decryptSegment({
739 decryptionWorker,
740 segment,
741 trackInfoFn,
742 timingInfoFn,
743 videoSegmentTimingInfoFn,
744 audioSegmentTimingInfoFn,
745 id3Fn,
746 captionsFn,
747 isEndOfTimeline,
748 endedTimelineFn,
749 dataFn,
750 doneFn,
751 onTransmuxerLog
752 });
753 }
754 // Otherwise, everything is ready just continue
755 handleSegmentBytes({
756 segment,
757 bytes: segment.bytes,
758 trackInfoFn,
759 timingInfoFn,
760 videoSegmentTimingInfoFn,
761 audioSegmentTimingInfoFn,
762 id3Fn,
763 captionsFn,
764 isEndOfTimeline,
765 endedTimelineFn,
766 dataFn,
767 doneFn,
768 onTransmuxerLog
769 });
770 };
771
772 // Keep track of when *all* of the requests have completed
773 segment.endOfAllRequests = Date.now();
774 if (segment.map && segment.map.encryptedBytes && !segment.map.bytes) {
775 return decrypt({
776 decryptionWorker,
777 // add -init to the "id" to differentiate between segment
778 // and init segment decryption, just in case they happen
779 // at the same time at some point in the future.
780 id: segment.requestId + '-init',
781 encryptedBytes: segment.map.encryptedBytes,
782 key: segment.map.key
783 }, (decryptedBytes) => {
784 segment.map.bytes = decryptedBytes;
785
786 parseInitSegment(segment, (parseError) => {
787 if (parseError) {
788 abortAll(activeXhrs);
789 return doneFn(parseError, segment);
790 }
791
792 segmentFinish();
793 });
794
795 });
796 }
797
798 segmentFinish();
799 }
800 };
801};
802
803/**
804 * Calls the abort callback if any request within the batch was aborted. Will only call
805 * the callback once per batch of requests, even if multiple were aborted.
806 *
807 * @param {Object} loadendState - state to check to see if the abort function was called
808 * @param {Function} abortFn - callback to call for abort
809 */
810const handleLoadEnd = ({ loadendState, abortFn }) => (event) => {
811 const request = event.target;
812
813 if (request.aborted && abortFn && !loadendState.calledAbortFn) {
814 abortFn();
815 loadendState.calledAbortFn = true;
816 }
817};
818
819/**
820 * Simple progress event callback handler that gathers some stats before
821 * executing a provided callback with the `segment` object
822 *
823 * @param {Object} segment - a simplified copy of the segmentInfo object
824 * from SegmentLoader
825 * @param {Function} progressFn - a callback that is executed each time a progress event
826 * is received
827 * @param {Function} trackInfoFn - a callback that receives track info
828 * @param {Function} timingInfoFn - a callback that receives timing info
829 * @param {Function} videoSegmentTimingInfoFn
830 * a callback that receives video timing info based on media times and
831 * any adjustments made by the transmuxer
832 * @param {Function} audioSegmentTimingInfoFn
833 * a callback that receives audio timing info based on media times and
834 * any adjustments made by the transmuxer
835 * @param {boolean} isEndOfTimeline
836 * true if this segment represents the last segment in a timeline
837 * @param {Function} endedTimelineFn
838 * a callback made when a timeline is ended, will only be called if
839 * isEndOfTimeline is true
840 * @param {Function} dataFn - a callback that is executed when segment bytes are available
841 * and ready to use
842 * @param {Event} event - the progress event object from XMLHttpRequest
843 */
844const handleProgress = ({
845 segment,
846 progressFn,
847 trackInfoFn,
848 timingInfoFn,
849 videoSegmentTimingInfoFn,
850 audioSegmentTimingInfoFn,
851 id3Fn,
852 captionsFn,
853 isEndOfTimeline,
854 endedTimelineFn,
855 dataFn
856}) => (event) => {
857 const request = event.target;
858
859 if (request.aborted) {
860 return;
861 }
862
863 segment.stats = videojs.mergeOptions(segment.stats, getProgressStats(event));
864
865 // record the time that we receive the first byte of data
866 if (!segment.stats.firstBytesReceivedAt && segment.stats.bytesReceived) {
867 segment.stats.firstBytesReceivedAt = Date.now();
868 }
869
870 return progressFn(event, segment);
871};
872
873/**
874 * Load all resources and does any processing necessary for a media-segment
875 *
876 * Features:
877 * decrypts the media-segment if it has a key uri and an iv
878 * aborts *all* requests if *any* one request fails
879 *
880 * The segment object, at minimum, has the following format:
881 * {
882 * resolvedUri: String,
883 * [transmuxer]: Object,
884 * [byterange]: {
885 * offset: Number,
886 * length: Number
887 * },
888 * [key]: {
889 * resolvedUri: String
890 * [byterange]: {
891 * offset: Number,
892 * length: Number
893 * },
894 * iv: {
895 * bytes: Uint32Array
896 * }
897 * },
898 * [map]: {
899 * resolvedUri: String,
900 * [byterange]: {
901 * offset: Number,
902 * length: Number
903 * },
904 * [bytes]: Uint8Array
905 * }
906 * }
907 * ...where [name] denotes optional properties
908 *
909 * @param {Function} xhr - an instance of the xhr wrapper in xhr.js
910 * @param {Object} xhrOptions - the base options to provide to all xhr requests
911 * @param {WebWorker} decryptionWorker - a WebWorker interface to AES-128
912 * decryption routines
913 * @param {Object} segment - a simplified copy of the segmentInfo object
914 * from SegmentLoader
915 * @param {Function} abortFn - a callback called (only once) if any piece of a request was
916 * aborted
917 * @param {Function} progressFn - a callback that receives progress events from the main
918 * segment's xhr request
919 * @param {Function} trackInfoFn - a callback that receives track info
920 * @param {Function} timingInfoFn - a callback that receives timing info
921 * @param {Function} videoSegmentTimingInfoFn
922 * a callback that receives video timing info based on media times and
923 * any adjustments made by the transmuxer
924 * @param {Function} audioSegmentTimingInfoFn
925 * a callback that receives audio timing info based on media times and
926 * any adjustments made by the transmuxer
927 * @param {Function} id3Fn - a callback that receives ID3 metadata
928 * @param {Function} captionsFn - a callback that receives captions
929 * @param {boolean} isEndOfTimeline
930 * true if this segment represents the last segment in a timeline
931 * @param {Function} endedTimelineFn
932 * a callback made when a timeline is ended, will only be called if
933 * isEndOfTimeline is true
934 * @param {Function} dataFn - a callback that receives data from the main segment's xhr
935 * request, transmuxed if needed
936 * @param {Function} doneFn - a callback that is executed only once all requests have
937 * succeeded or failed
938 * @return {Function} a function that, when invoked, immediately aborts all
939 * outstanding requests
940 */
941export const mediaSegmentRequest = ({
942 xhr,
943 xhrOptions,
944 decryptionWorker,
945 segment,
946 abortFn,
947 progressFn,
948 trackInfoFn,
949 timingInfoFn,
950 videoSegmentTimingInfoFn,
951 audioSegmentTimingInfoFn,
952 id3Fn,
953 captionsFn,
954 isEndOfTimeline,
955 endedTimelineFn,
956 dataFn,
957 doneFn,
958 onTransmuxerLog
959}) => {
960 const activeXhrs = [];
961 const finishProcessingFn = waitForCompletion({
962 activeXhrs,
963 decryptionWorker,
964 trackInfoFn,
965 timingInfoFn,
966 videoSegmentTimingInfoFn,
967 audioSegmentTimingInfoFn,
968 id3Fn,
969 captionsFn,
970 isEndOfTimeline,
971 endedTimelineFn,
972 dataFn,
973 doneFn,
974 onTransmuxerLog
975 });
976
977 // optionally, request the decryption key
978 if (segment.key && !segment.key.bytes) {
979 const objects = [segment.key];
980
981 if (segment.map && !segment.map.bytes && segment.map.key && segment.map.key.resolvedUri === segment.key.resolvedUri) {
982 objects.push(segment.map.key);
983 }
984 const keyRequestOptions = videojs.mergeOptions(xhrOptions, {
985 uri: segment.key.resolvedUri,
986 responseType: 'arraybuffer'
987 });
988 const keyRequestCallback = handleKeyResponse(segment, objects, finishProcessingFn);
989 const keyXhr = xhr(keyRequestOptions, keyRequestCallback);
990
991 activeXhrs.push(keyXhr);
992 }
993
994 // optionally, request the associated media init segment
995 if (segment.map && !segment.map.bytes) {
996 const differentMapKey = segment.map.key && (!segment.key || segment.key.resolvedUri !== segment.map.key.resolvedUri);
997
998 if (differentMapKey) {
999 const mapKeyRequestOptions = videojs.mergeOptions(xhrOptions, {
1000 uri: segment.map.key.resolvedUri,
1001 responseType: 'arraybuffer'
1002 });
1003 const mapKeyRequestCallback = handleKeyResponse(segment, [segment.map.key], finishProcessingFn);
1004 const mapKeyXhr = xhr(mapKeyRequestOptions, mapKeyRequestCallback);
1005
1006 activeXhrs.push(mapKeyXhr);
1007 }
1008 const initSegmentOptions = videojs.mergeOptions(xhrOptions, {
1009 uri: segment.map.resolvedUri,
1010 responseType: 'arraybuffer',
1011 headers: segmentXhrHeaders(segment.map)
1012 });
1013 const initSegmentRequestCallback = handleInitSegmentResponse({segment, finishProcessingFn});
1014 const initSegmentXhr = xhr(initSegmentOptions, initSegmentRequestCallback);
1015
1016 activeXhrs.push(initSegmentXhr);
1017 }
1018
1019 const segmentRequestOptions = videojs.mergeOptions(xhrOptions, {
1020 uri: segment.part && segment.part.resolvedUri || segment.resolvedUri,
1021 responseType: 'arraybuffer',
1022 headers: segmentXhrHeaders(segment)
1023 });
1024
1025 const segmentRequestCallback = handleSegmentResponse({
1026 segment,
1027 finishProcessingFn,
1028 responseType: segmentRequestOptions.responseType
1029 });
1030 const segmentXhr = xhr(segmentRequestOptions, segmentRequestCallback);
1031
1032 segmentXhr.addEventListener(
1033 'progress',
1034 handleProgress({
1035 segment,
1036 progressFn,
1037 trackInfoFn,
1038 timingInfoFn,
1039 videoSegmentTimingInfoFn,
1040 audioSegmentTimingInfoFn,
1041 id3Fn,
1042 captionsFn,
1043 isEndOfTimeline,
1044 endedTimelineFn,
1045 dataFn
1046 })
1047 );
1048 activeXhrs.push(segmentXhr);
1049
1050 // since all parts of the request must be considered, but should not make callbacks
1051 // multiple times, provide a shared state object
1052 const loadendState = {};
1053
1054 activeXhrs.forEach((activeXhr) => {
1055 activeXhr.addEventListener(
1056 'loadend',
1057 handleLoadEnd({ loadendState, abortFn })
1058 );
1059 });
1060
1061 return () => abortAll(activeXhrs);
1062};