import * as React from 'react'; import { ComponentOrHandle, SurfaceCreateEvent, GLSnapshot, ExpoWebGLRenderingContext, SnapshotOptions, BaseGLViewProps } from './GLView.types'; export declare type GLViewProps = { /** * Called when the OpenGL context is created, with the context object as a parameter. The context * object has an API mirroring WebGL's WebGLRenderingContext. */ onContextCreate(gl: ExpoWebGLRenderingContext): void; /** * [iOS only] Number of samples for Apple's built-in multisampling. */ msaaSamples: number; /** * A ref callback for the native GLView */ nativeRef_EXPERIMENTAL?(callback: ComponentOrHandle | null): any; } & BaseGLViewProps; /** * A component that acts as an OpenGL render target */ export declare class GLView extends React.Component { static NativeView: any; static defaultProps: { msaaSamples: number; }; static createContextAsync(): Promise; static destroyContextAsync(exgl?: ExpoWebGLRenderingContext | number): Promise; static takeSnapshotAsync(exgl?: ExpoWebGLRenderingContext | number, options?: SnapshotOptions): Promise; nativeRef: ComponentOrHandle; exglCtxId?: number; render(): JSX.Element; _setNativeRef: (nativeRef: ComponentOrHandle) => void; _onSurfaceCreate: ({ nativeEvent: { exglCtxId } }: SurfaceCreateEvent) => void; startARSessionAsync(): Promise; createCameraTextureAsync(cameraRefOrHandle: ComponentOrHandle): Promise; destroyObjectAsync(glObject: WebGLObject): Promise; takeSnapshotAsync(options?: SnapshotOptions): Promise; } declare type WebGLObjectId = any; export declare class WebGLObject { id: WebGLObjectId; constructor(id: WebGLObjectId); toString(): string; } declare class WebGLTexture extends WebGLObject { } export {};