UNPKG

5.1 kBPlain TextView Raw
1import { SyntheticPlatformEmitter } from '@unimodules/core';
2
3import { AVPlaybackNativeSource, AVPlaybackStatus, AVPlaybackStatusToSet } from './AV';
4
5function getStatusFromMedia(media?: HTMLMediaElement): AVPlaybackStatus {
6 if (!media) {
7 return {
8 isLoaded: false,
9 error: undefined,
10 };
11 }
12
13 const isPlaying = !!(
14 media.currentTime > 0 &&
15 !media.paused &&
16 !media.ended &&
17 media.readyState > 2
18 );
19
20 const status: AVPlaybackStatus = {
21 isLoaded: true,
22 uri: media.src,
23 progressUpdateIntervalMillis: 100, //TODO: Bacon: Add interval between calls
24 durationMillis: media.duration * 1000,
25 positionMillis: media.currentTime * 1000,
26 // playableDurationMillis: media.buffered * 1000,
27 // seekMillisToleranceBefore?: number
28 // seekMillisToleranceAfter?: number
29 shouldPlay: media.autoplay,
30 isPlaying,
31 isBuffering: false, //media.waiting,
32 rate: media.playbackRate,
33 // TODO: Bacon: This seems too complicated right now: https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-frequency
34 shouldCorrectPitch: false,
35 volume: media.volume,
36 isMuted: media.muted,
37 isLooping: media.loop,
38 didJustFinish: media.ended,
39 };
40
41 return status;
42}
43
44function setStatusForMedia(
45 media: HTMLMediaElement,
46 status: AVPlaybackStatusToSet
47): AVPlaybackStatus {
48 if (status.positionMillis !== undefined) {
49 media.currentTime = status.positionMillis / 1000;
50 }
51 // if (status.progressUpdateIntervalMillis !== undefined) {
52 // media.progressUpdateIntervalMillis = status.progressUpdateIntervalMillis;
53 // }
54 // if (status.seekMillisToleranceBefore !== undefined) {
55 // media.seekMillisToleranceBefore = status.seekMillisToleranceBefore;
56 // }
57 // if (status.seekMillisToleranceAfter !== undefined) {
58 // media.seekMillisToleranceAfter = status.seekMillisToleranceAfter;
59 // }
60 // if (status.shouldCorrectPitch !== undefined) {
61 // media.shouldCorrectPitch = status.shouldCorrectPitch;
62 // }
63 if (status.shouldPlay !== undefined) {
64 if (status.shouldPlay) {
65 media.play();
66 } else {
67 media.pause();
68 }
69 }
70 if (status.rate !== undefined) {
71 media.playbackRate = status.rate;
72 }
73 if (status.volume !== undefined) {
74 media.volume = status.volume;
75 }
76 if (status.isMuted !== undefined) {
77 media.muted = status.isMuted;
78 }
79 if (status.isLooping !== undefined) {
80 media.loop = status.isLooping;
81 }
82
83 return getStatusFromMedia(media);
84}
85
86export default {
87 get name(): string {
88 return 'ExponentAV';
89 },
90 async getStatusForVideo(element: HTMLMediaElement): Promise<AVPlaybackStatus> {
91 return getStatusFromMedia(element);
92 },
93 async loadForVideo(
94 element: HTMLMediaElement,
95 nativeSource: AVPlaybackNativeSource,
96 fullInitialStatus: AVPlaybackStatusToSet
97 ): Promise<AVPlaybackStatus> {
98 return getStatusFromMedia(element);
99 },
100 async unloadForVideo(element: HTMLMediaElement): Promise<AVPlaybackStatus> {
101 return getStatusFromMedia(element);
102 },
103 async setStatusForVideo(
104 element: HTMLMediaElement,
105 status: AVPlaybackStatusToSet
106 ): Promise<AVPlaybackStatus> {
107 return setStatusForMedia(element, status);
108 },
109 async replayVideo(
110 element: HTMLMediaElement,
111 status: AVPlaybackStatusToSet
112 ): Promise<AVPlaybackStatus> {
113 return setStatusForMedia(element, status);
114 },
115 /* Audio */
116 async setAudioMode() {},
117 async setAudioIsEnabled() {},
118 async getStatusForSound(element: HTMLMediaElement) {
119 return getStatusFromMedia(element);
120 },
121 async loadForSound(
122 nativeSource: string | { uri: string; [key: string]: any },
123 fullInitialStatus: AVPlaybackStatusToSet
124 ): Promise<[HTMLMediaElement, AVPlaybackStatus]> {
125 const source = typeof nativeSource === 'string' ? nativeSource : nativeSource.uri;
126 const media = new Audio(source);
127
128 media.ontimeupdate = () => {
129 SyntheticPlatformEmitter.emit('didUpdatePlaybackStatus', {
130 key: media,
131 status: getStatusFromMedia(media),
132 });
133 };
134
135 media.onerror = () => {
136 SyntheticPlatformEmitter.emit('ExponentAV.onError', {
137 key: media,
138 error: media.error!.message,
139 });
140 };
141
142 const status = setStatusForMedia(media, fullInitialStatus);
143
144 return [media, status];
145 },
146 async unloadForSound(element: HTMLMediaElement) {
147 element.pause();
148 element.removeAttribute('src');
149 element.load();
150 return getStatusFromMedia(element);
151 },
152 async setStatusForSound(
153 element: HTMLMediaElement,
154 status: AVPlaybackStatusToSet
155 ): Promise<AVPlaybackStatus> {
156 return setStatusForMedia(element, status);
157 },
158 async replaySound(
159 element: HTMLMediaElement,
160 status: AVPlaybackStatusToSet
161 ): Promise<AVPlaybackStatus> {
162 return setStatusForMedia(element, status);
163 },
164
165 /* Recording */
166 // async setUnloadedCallbackForAndroidRecording() {},
167 async getAudioRecordingStatus() {},
168 async prepareAudioRecorder() {},
169 async startAudioRecording() {},
170 async pauseAudioRecording() {},
171 async stopAudioRecording() {},
172 async unloadAudioRecorder() {},
173};