1 | import { SyntheticPlatformEmitter } from '@unimodules/core';
|
2 | function getStatusFromMedia(media) {
|
3 | if (!media) {
|
4 | return {
|
5 | isLoaded: false,
|
6 | error: undefined,
|
7 | };
|
8 | }
|
9 | const isPlaying = !!(media.currentTime > 0 &&
|
10 | !media.paused &&
|
11 | !media.ended &&
|
12 | media.readyState > 2);
|
13 | const status = {
|
14 | isLoaded: true,
|
15 | uri: media.src,
|
16 | progressUpdateIntervalMillis: 100,
|
17 | durationMillis: media.duration * 1000,
|
18 | positionMillis: media.currentTime * 1000,
|
19 |
|
20 |
|
21 |
|
22 | shouldPlay: media.autoplay,
|
23 | isPlaying,
|
24 | isBuffering: false,
|
25 | rate: media.playbackRate,
|
26 |
|
27 | shouldCorrectPitch: false,
|
28 | volume: media.volume,
|
29 | isMuted: media.muted,
|
30 | isLooping: media.loop,
|
31 | didJustFinish: media.ended,
|
32 | };
|
33 | return status;
|
34 | }
|
35 | function setStatusForMedia(media, status) {
|
36 | if (status.positionMillis !== undefined) {
|
37 | media.currentTime = status.positionMillis / 1000;
|
38 | }
|
39 |
|
40 |
|
41 |
|
42 |
|
43 |
|
44 |
|
45 |
|
46 |
|
47 |
|
48 |
|
49 |
|
50 |
|
51 | if (status.shouldPlay !== undefined) {
|
52 | if (status.shouldPlay) {
|
53 | media.play();
|
54 | }
|
55 | else {
|
56 | media.pause();
|
57 | }
|
58 | }
|
59 | if (status.rate !== undefined) {
|
60 | media.playbackRate = status.rate;
|
61 | }
|
62 | if (status.volume !== undefined) {
|
63 | media.volume = status.volume;
|
64 | }
|
65 | if (status.isMuted !== undefined) {
|
66 | media.muted = status.isMuted;
|
67 | }
|
68 | if (status.isLooping !== undefined) {
|
69 | media.loop = status.isLooping;
|
70 | }
|
71 | return getStatusFromMedia(media);
|
72 | }
|
73 | export default {
|
74 | get name() {
|
75 | return 'ExponentAV';
|
76 | },
|
77 | async getStatusForVideo(element) {
|
78 | return getStatusFromMedia(element);
|
79 | },
|
80 | async loadForVideo(element, nativeSource, fullInitialStatus) {
|
81 | return getStatusFromMedia(element);
|
82 | },
|
83 | async unloadForVideo(element) {
|
84 | return getStatusFromMedia(element);
|
85 | },
|
86 | async setStatusForVideo(element, status) {
|
87 | return setStatusForMedia(element, status);
|
88 | },
|
89 | async replayVideo(element, status) {
|
90 | return setStatusForMedia(element, status);
|
91 | },
|
92 |
|
93 | async setAudioMode() { },
|
94 | async setAudioIsEnabled() { },
|
95 | async getStatusForSound(element) {
|
96 | return getStatusFromMedia(element);
|
97 | },
|
98 | async loadForSound(nativeSource, fullInitialStatus) {
|
99 | const source = typeof nativeSource === 'string' ? nativeSource : nativeSource.uri;
|
100 | const media = new Audio(source);
|
101 | media.ontimeupdate = () => {
|
102 | SyntheticPlatformEmitter.emit('didUpdatePlaybackStatus', {
|
103 | key: media,
|
104 | status: getStatusFromMedia(media),
|
105 | });
|
106 | };
|
107 | media.onerror = () => {
|
108 | SyntheticPlatformEmitter.emit('ExponentAV.onError', {
|
109 | key: media,
|
110 | error: media.error.message,
|
111 | });
|
112 | };
|
113 | const status = setStatusForMedia(media, fullInitialStatus);
|
114 | return [media, status];
|
115 | },
|
116 | async unloadForSound(element) {
|
117 | element.pause();
|
118 | element.removeAttribute('src');
|
119 | element.load();
|
120 | return getStatusFromMedia(element);
|
121 | },
|
122 | async setStatusForSound(element, status) {
|
123 | return setStatusForMedia(element, status);
|
124 | },
|
125 | async replaySound(element, status) {
|
126 | return setStatusForMedia(element, status);
|
127 | },
|
128 |
|
129 |
|
130 | async getAudioRecordingStatus() { },
|
131 | async prepareAudioRecorder() { },
|
132 | async startAudioRecording() { },
|
133 | async pauseAudioRecording() { },
|
134 | async stopAudioRecording() { },
|
135 | async unloadAudioRecorder() { },
|
136 | };
|
137 |
|
\ | No newline at end of file |