1 | import { TickerClockSource } from "../clock/Ticker.js";
|
2 | import { Seconds } from "../type/Units.js";
|
3 | import { AnyAudioContext } from "./AudioContext.js";
|
4 | import { BaseContext, ContextLatencyHint } from "./BaseContext.js";
|
5 | import type { DrawClass as Draw } from "../util/Draw.js";
|
6 | import type { DestinationClass as Destination } from "./Destination.js";
|
7 | import type { TransportClass as Transport } from "../clock/Transport.js";
|
8 | import type { ListenerClass as Listener } from "./Listener.js";
|
9 | export interface ContextOptions {
|
10 | clockSource: TickerClockSource;
|
11 | latencyHint: ContextLatencyHint;
|
12 | lookAhead: Seconds;
|
13 | updateInterval: Seconds;
|
14 | context: AnyAudioContext;
|
15 | }
|
16 | export interface ContextTimeoutEvent {
|
17 | callback: (...args: any[]) => void;
|
18 | id: number;
|
19 | time: Seconds;
|
20 | }
|
21 |
|
22 |
|
23 |
|
24 |
|
25 | export declare class Context extends BaseContext {
|
26 | readonly name: string;
|
27 | |
28 |
|
29 |
|
30 | protected readonly _context: AnyAudioContext;
|
31 | |
32 |
|
33 |
|
34 | private readonly _ticker;
|
35 | |
36 |
|
37 |
|
38 | private _latencyHint;
|
39 | |
40 |
|
41 |
|
42 | private _constants;
|
43 | |
44 |
|
45 |
|
46 | private _timeouts;
|
47 | |
48 |
|
49 |
|
50 | private _timeoutIds;
|
51 | |
52 |
|
53 |
|
54 | private _transport;
|
55 | |
56 |
|
57 |
|
58 | private _listener;
|
59 | |
60 |
|
61 |
|
62 | private _destination;
|
63 | |
64 |
|
65 |
|
66 | private _draw;
|
67 | |
68 |
|
69 |
|
70 | private _initialized;
|
71 | |
72 |
|
73 |
|
74 | private _closeStarted;
|
75 | |
76 |
|
77 |
|
78 | readonly isOffline: boolean;
|
79 | constructor(context?: AnyAudioContext);
|
80 | constructor(options?: Partial<ContextOptions>);
|
81 | static getDefaults(): ContextOptions;
|
82 | /**
|
83 | * Finish setting up the context. **You usually do not need to do this manually.**
|
84 | */
|
85 | private initialize;
|
86 | createAnalyser(): AnalyserNode;
|
87 | createOscillator(): OscillatorNode;
|
88 | createBufferSource(): AudioBufferSourceNode;
|
89 | createBiquadFilter(): BiquadFilterNode;
|
90 | createBuffer(numberOfChannels: number, length: number, sampleRate: number): AudioBuffer;
|
91 | createChannelMerger(numberOfInputs?: number | undefined): ChannelMergerNode;
|
92 | createChannelSplitter(numberOfOutputs?: number | undefined): ChannelSplitterNode;
|
93 | createConstantSource(): ConstantSourceNode;
|
94 | createConvolver(): ConvolverNode;
|
95 | createDelay(maxDelayTime?: number | undefined): DelayNode;
|
96 | createDynamicsCompressor(): DynamicsCompressorNode;
|
97 | createGain(): GainNode;
|
98 | createIIRFilter(feedForward: number[] | Float32Array, feedback: number[] | Float32Array): IIRFilterNode;
|
99 | createPanner(): PannerNode;
|
100 | createPeriodicWave(real: number[] | Float32Array, imag: number[] | Float32Array, constraints?: PeriodicWaveConstraints | undefined): PeriodicWave;
|
101 | createStereoPanner(): StereoPannerNode;
|
102 | createWaveShaper(): WaveShaperNode;
|
103 | createMediaStreamSource(stream: MediaStream): MediaStreamAudioSourceNode;
|
104 | createMediaElementSource(element: HTMLMediaElement): MediaElementAudioSourceNode;
|
105 | createMediaStreamDestination(): MediaStreamAudioDestinationNode;
|
106 | decodeAudioData(audioData: ArrayBuffer): Promise<AudioBuffer>;
|
107 | /**
|
108 | * The current time in seconds of the AudioContext.
|
109 | */
|
110 | get currentTime(): Seconds;
|
111 | /**
|
112 | * The current time in seconds of the AudioContext.
|
113 | */
|
114 | get state(): AudioContextState;
|
115 | /**
|
116 | * The current time in seconds of the AudioContext.
|
117 | */
|
118 | get sampleRate(): number;
|
119 | /**
|
120 | * The listener
|
121 | */
|
122 | get listener(): Listener;
|
123 | set listener(l: Listener);
|
124 | /**
|
125 | * There is only one Transport per Context. It is created on initialization.
|
126 | */
|
127 | get transport(): Transport;
|
128 | set transport(t: Transport);
|
129 | /**
|
130 | * This is the Draw object for the context which is useful for synchronizing the draw frame with the Tone.js clock.
|
131 | */
|
132 | get draw(): Draw;
|
133 | set draw(d: Draw);
|
134 | /**
|
135 | * A reference to the Context's destination node.
|
136 | */
|
137 | get destination(): Destination;
|
138 | set destination(d: Destination);
|
139 | /**
|
140 | * Maps a module name to promise of the addModule method
|
141 | */
|
142 | private _workletPromise;
|
143 | /**
|
144 | * Create an audio worklet node from a name and options. The module
|
145 | * must first be loaded using {@link addAudioWorkletModule}.
|
146 | */
|
147 | createAudioWorkletNode(name: string, options?: Partial<AudioWorkletNodeOptions>): AudioWorkletNode;
|
148 | /**
|
149 | * Add an AudioWorkletProcessor module
|
150 | * @param url The url of the module
|
151 | */
|
152 | addAudioWorkletModule(url: string): Promise<void>;
|
153 | /**
|
154 | * Returns a promise which resolves when all of the worklets have been loaded on this context
|
155 | */
|
156 | protected workletsAreReady(): Promise<void>;
|
157 | /**
|
158 | * How often the interval callback is invoked.
|
159 | * This number corresponds to how responsive the scheduling
|
160 | * can be. Setting to 0 will result in the lowest practial interval
|
161 | * based on context properties. context.updateInterval + context.lookAhead
|
162 | * gives you the total latency between scheduling an event and hearing it.
|
163 | */
|
164 | get updateInterval(): Seconds;
|
165 | set updateInterval(interval: Seconds);
|
166 | /**
|
167 | * What the source of the clock is, either "worker" (default),
|
168 | * "timeout", or "offline" (none).
|
169 | */
|
170 | get clockSource(): TickerClockSource;
|
171 | set clockSource(type: TickerClockSource);
|
172 | /**
|
173 | * The amount of time into the future events are scheduled. Giving Web Audio
|
174 | * a short amount of time into the future to schedule events can reduce clicks and
|
175 | * improve performance. This value can be set to 0 to get the lowest latency.
|
176 | * Adjusting this value also affects the {@link updateInterval}.
|
177 | */
|
178 | get lookAhead(): Seconds;
|
179 | set lookAhead(time: Seconds);
|
180 | private _lookAhead;
|
181 | |
182 |
|
183 |
|
184 |
|
185 |
|
186 |
|
187 |
|
188 |
|
189 |
|
190 |
|
191 |
|
192 |
|
193 |
|
194 |
|
195 |
|
196 | get latencyHint(): ContextLatencyHint | Seconds;
|
197 | |
198 |
|
199 |
|
200 | get rawContext(): AnyAudioContext;
|
201 | |
202 |
|
203 |
|
204 |
|
205 |
|
206 |
|
207 |
|
208 | now(): Seconds;
|
209 | |
210 |
|
211 |
|
212 |
|
213 |
|
214 |
|
215 |
|
216 | immediate(): Seconds;
|
217 | |
218 |
|
219 |
|
220 |
|
221 |
|
222 | resume(): Promise<void>;
|
223 | |
224 |
|
225 |
|
226 |
|
227 | close(): Promise<void>;
|
228 | |
229 |
|
230 |
|
231 | getConstant(val: number): AudioBufferSourceNode;
|
232 | |
233 |
|
234 |
|
235 | dispose(): this;
|
236 | |
237 |
|
238 |
|
239 |
|
240 | private _timeoutLoop;
|
241 | |
242 |
|
243 |
|
244 |
|
245 |
|
246 |
|
247 |
|
248 | setTimeout(fn: (...args: any[]) => void, timeout: Seconds): number;
|
249 | |
250 |
|
251 |
|
252 |
|
253 | clearTimeout(id: number): this;
|
254 | |
255 |
|
256 |
|
257 | clearInterval(id: number): this;
|
258 | |
259 |
|
260 |
|
261 | setInterval(fn: (...args: any[]) => void, interval: Seconds): number;
|
262 | }
|