/**
 * Logs a frame number.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_FRAME: "RenderFrame";
/**
 * Logs a frame time.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_FRAME_TIME: "RenderFrameTime";
/**
 * Logs basic information about generated render passes.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_PASS: "RenderPass";
/**
 * Logs additional detail for render passes.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_PASS_DETAIL: "RenderPassDetail";
/**
 * Logs render actions created by the layer composition. Only executes when the
 * layer composition changes.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_ACTION: "RenderAction";
/**
 * Logs the allocation of render targets.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_TARGET_ALLOC: "RenderTargetAlloc";
/**
 * Logs the allocation of textures.
 *
 * @category Debug
 */
declare const TRACEID_TEXTURE_ALLOC: "TextureAlloc";
/**
 * Logs the creation of shaders.
 *
 * @category Debug
 */
declare const TRACEID_SHADER_ALLOC: "ShaderAlloc";
/**
 * Logs the compilation time of shaders.
 *
 * @category Debug
 */
declare const TRACEID_SHADER_COMPILE: "ShaderCompile";
/**
 * Logs the vram use by the textures.
 *
 * @category Debug
 */
declare const TRACEID_VRAM_TEXTURE: "VRAM.Texture";
/**
 * Logs the vram use by the vertex buffers.
 *
 * @category Debug
 */
declare const TRACEID_VRAM_VB: "VRAM.Vb";
/**
 * Logs the vram use by the index buffers.
 *
 * @category Debug
 */
declare const TRACEID_VRAM_IB: "VRAM.Ib";
/**
 * Logs the vram use by the storage buffers.
 *
 * @category Debug
 */
declare const TRACEID_VRAM_SB: "VRAM.Sb";
/**
 * Logs the creation of bind groups.
 *
 * @category Debug
 */
declare const TRACEID_BINDGROUP_ALLOC: "BindGroupAlloc";
/**
 * Logs the creation of bind group formats.
 *
 * @category Debug
 */
declare const TRACEID_BINDGROUPFORMAT_ALLOC: "BindGroupFormatAlloc";
/**
 * Logs the creation of render pipelines. WebGPU only.
 *
 * @category Debug
 */
declare const TRACEID_RENDERPIPELINE_ALLOC: "RenderPipelineAlloc";
/**
 * Logs the creation of compute pipelines. WebGPU only.
 *
 * @category Debug
 */
declare const TRACEID_COMPUTEPIPELINE_ALLOC: "ComputePipelineAlloc";
/**
 * Logs the creation of pipeline layouts. WebGPU only.
 *
 * @category Debug
 */
declare const TRACEID_PIPELINELAYOUT_ALLOC: "PipelineLayoutAlloc";
/**
 * Logs the internal debug information for Elements.
 *
 * @category Debug
 */
declare const TRACEID_ELEMENT: "Element";
/**
 * Logs the vram use by all textures in memory.
 *
 * @category Debug
 */
declare const TRACEID_TEXTURES: "Textures";
/**
 * Logs all assets in the asset registry.
 *
 * @category Debug
 */
declare const TRACEID_ASSETS: "Assets";
/**
 * Logs the render queue commands.
 *
 * @category Debug
 */
declare const TRACEID_RENDER_QUEUE: "RenderQueue";
/**
 * Logs the loaded GSplat resources for individual LOD levels of an octree.
 *
 * @category Debug
 */
declare const TRACEID_OCTREE_RESOURCES: "OctreeResources";
/**
 * Logs the GPU timings.
 *
 * @category Debug
 */
declare const TRACEID_GPU_TIMINGS: "GpuTimings";

/**
 * A linear interpolation scheme.
 *
 * @category Math
 */
declare const CURVE_LINEAR: 0;
/**
 * A smooth step interpolation scheme.
 *
 * @category Math
 */
declare const CURVE_SMOOTHSTEP: 1;
/**
 * Cardinal spline interpolation scheme. For a Catmull-Rom spline, specify a curve tension of 0.5.
 *
 * @category Math
 */
declare const CURVE_SPLINE: 4;
/**
 * A stepped interpolator that does not perform any blending.
 *
 * @category Math
 */
declare const CURVE_STEP: 5;

/**
 * Ignores the integer part of texture coordinates, using only the fractional part.
 *
 * @category Graphics
 */
declare const ADDRESS_REPEAT: 0;
/**
 * Clamps texture coordinate to the range 0 to 1.
 *
 * @category Graphics
 */
declare const ADDRESS_CLAMP_TO_EDGE: 1;
/**
 * Texture coordinate to be set to the fractional part if the integer part is even. If the integer
 * part is odd, then the texture coordinate is set to 1 minus the fractional part.
 *
 * @category Graphics
 */
declare const ADDRESS_MIRRORED_REPEAT: 2;
/**
 * Multiply all fragment components by zero.
 *
 * @category Graphics
 */
declare const BLENDMODE_ZERO: 0;
/**
 * Multiply all fragment components by one.
 *
 * @category Graphics
 */
declare const BLENDMODE_ONE: 1;
/**
 * Multiply all fragment components by the components of the source fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_SRC_COLOR: 2;
/**
 * Multiply all fragment components by one minus the components of the source fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_ONE_MINUS_SRC_COLOR: 3;
/**
 * Multiply all fragment components by the components of the destination fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_DST_COLOR: 4;
/**
 * Multiply all fragment components by one minus the components of the destination fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_ONE_MINUS_DST_COLOR: 5;
/**
 * Multiply all fragment components by the alpha value of the source fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_SRC_ALPHA: 6;
/**
 * Multiply all fragment components by the alpha value of the source fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_SRC_ALPHA_SATURATE: 7;
/**
 * Multiply all fragment components by one minus the alpha value of the source fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_ONE_MINUS_SRC_ALPHA: 8;
/**
 * Multiply all fragment components by the alpha value of the destination fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_DST_ALPHA: 9;
/**
 * Multiply all fragment components by one minus the alpha value of the destination fragment.
 *
 * @category Graphics
 */
declare const BLENDMODE_ONE_MINUS_DST_ALPHA: 10;
/**
 * Multiplies all fragment components by a constant.
 *
 * @category Graphics
 */
declare const BLENDMODE_CONSTANT: 11;
/**
 * Multiplies all fragment components by 1 minus a constant.
 *
 * @category Graphics
 */
declare const BLENDMODE_ONE_MINUS_CONSTANT: 12;
/**
 * Add the results of the source and destination fragment multiplies.
 *
 * @category Graphics
 */
declare const BLENDEQUATION_ADD: 0;
/**
 * Subtract the results of the source and destination fragment multiplies.
 *
 * @category Graphics
 */
declare const BLENDEQUATION_SUBTRACT: 1;
/**
 * Reverse and subtract the results of the source and destination fragment multiplies.
 *
 * @category Graphics
 */
declare const BLENDEQUATION_REVERSE_SUBTRACT: 2;
/**
 * Use the smallest value.
 *
 * @category Graphics
 */
declare const BLENDEQUATION_MIN: 3;
/**
 * Use the largest value.
 *
 * @category Graphics
 */
declare const BLENDEQUATION_MAX: 4;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to make it available for read
 * access by CPU.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_READ: 1;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to make it available for write
 * access by CPU.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_WRITE: 2;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to ensure its compatibility
 * when used as a source of a copy operation.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_COPY_SRC: 4;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to ensure its compatibility
 * when used as a destination of a copy operation, or as a target of a write operation.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_COPY_DST: 8;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to ensure its compatibility
 * when used as an index buffer.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_INDEX: 16;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to ensure its compatibility
 * when used as a vertex buffer.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_VERTEX: 32;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to ensure its compatibility
 * when used as an uniform buffer.
 *
 * @category Graphics
 */
declare const BUFFERUSAGE_UNIFORM: 64;
/**
 * An internal flag utilized during the construction of a {@link StorageBuffer} to ensure its
 * compatibility when used as a storage buffer.
 * This flag is hidden as it's automatically used by the StorageBuffer constructor.
 *
 * @category Graphics
 * @ignore
 */
declare const BUFFERUSAGE_STORAGE: 128;
/**
 * A flag utilized during the construction of a {@link StorageBuffer} to allow it to store indirect
 * command arguments.
 * TODO: This flag is hidden till the feature is implemented.
 *
 * @category Graphics
 * @ignore
 */
declare const BUFFERUSAGE_INDIRECT: 256;
/**
 * The data store contents will be modified once and used many times.
 *
 * @category Graphics
 */
declare const BUFFER_STATIC: 0;
/**
 * The data store contents will be modified repeatedly and used many times.
 *
 * @category Graphics
 */
declare const BUFFER_DYNAMIC: 1;
/**
 * The data store contents will be modified once and used at most a few times.
 *
 * @category Graphics
 */
declare const BUFFER_STREAM: 2;
/**
 * The data store contents will be modified repeatedly on the GPU and used many times. Optimal for
 * transform feedback usage.
 *
 * @category Graphics
 */
declare const BUFFER_GPUDYNAMIC: 3;
/**
 * Clear the color buffer.
 *
 * @category Graphics
 */
declare const CLEARFLAG_COLOR: 1;
/**
 * Clear the depth buffer.
 *
 * @category Graphics
 */
declare const CLEARFLAG_DEPTH: 2;
/**
 * Clear the stencil buffer.
 *
 * @category Graphics
 */
declare const CLEARFLAG_STENCIL: 4;
/**
 * The positive X face of a cubemap.
 *
 * @category Graphics
 */
declare const CUBEFACE_POSX: 0;
/**
 * The negative X face of a cubemap.
 *
 * @category Graphics
 */
declare const CUBEFACE_NEGX: 1;
/**
 * The positive Y face of a cubemap.
 *
 * @category Graphics
 */
declare const CUBEFACE_POSY: 2;
/**
 * The negative Y face of a cubemap.
 *
 * @category Graphics
 */
declare const CUBEFACE_NEGY: 3;
/**
 * The positive Z face of a cubemap.
 *
 * @category Graphics
 */
declare const CUBEFACE_POSZ: 4;
/**
 * The negative Z face of a cubemap.
 *
 * @category Graphics
 */
declare const CUBEFACE_NEGZ: 5;
/**
 * No triangles are culled.
 *
 * @category Graphics
 */
declare const CULLFACE_NONE: 0;
/**
 * Triangles facing away from the view direction are culled.
 *
 * @category Graphics
 */
declare const CULLFACE_BACK: 1;
/**
 * Triangles facing the view direction are culled.
 *
 * @category Graphics
 */
declare const CULLFACE_FRONT: 2;
/**
 * Triangles are culled regardless of their orientation with respect to the view direction. Note
 * that point or line primitives are unaffected by this render state.
 *
 * @ignore
 * @category Graphics
 */
declare const CULLFACE_FRONTANDBACK: 3;
/**
 * The counterclockwise winding. Specifies whether polygons are front- or back-facing by setting a winding orientation.
 *
 * @category Graphics
 */
declare const FRONTFACE_CCW: 0;
/**
 * The clockwise winding. Specifies whether polygons are front- or back-facing by setting a winding orientation.
 *
 * @category Graphics
 */
declare const FRONTFACE_CW: 1;
/**
 * Point sample filtering.
 *
 * @category Graphics
 */
declare const FILTER_NEAREST: 0;
/**
 * Bilinear filtering.
 *
 * @category Graphics
 */
declare const FILTER_LINEAR: 1;
/**
 * Use the nearest neighbor in the nearest mipmap level.
 *
 * @category Graphics
 */
declare const FILTER_NEAREST_MIPMAP_NEAREST: 2;
/**
 * Linearly interpolate in the nearest mipmap level.
 *
 * @category Graphics
 */
declare const FILTER_NEAREST_MIPMAP_LINEAR: 3;
/**
 * Use the nearest neighbor after linearly interpolating between mipmap levels.
 *
 * @category Graphics
 */
declare const FILTER_LINEAR_MIPMAP_NEAREST: 4;
/**
 * Linearly interpolate both the mipmap levels and between texels.
 *
 * @category Graphics
 */
declare const FILTER_LINEAR_MIPMAP_LINEAR: 5;
/**
 * Never pass.
 *
 * @category Graphics
 */
declare const FUNC_NEVER: 0;
/**
 * Pass if (ref & mask) < (stencil & mask).
 *
 * @category Graphics
 */
declare const FUNC_LESS: 1;
/**
 * Pass if (ref & mask) == (stencil & mask).
 *
 * @category Graphics
 */
declare const FUNC_EQUAL: 2;
/**
 * Pass if (ref & mask) <= (stencil & mask).
 *
 * @category Graphics
 */
declare const FUNC_LESSEQUAL: 3;
/**
 * Pass if (ref & mask) > (stencil & mask).
 *
 * @category Graphics
 */
declare const FUNC_GREATER: 4;
/**
 * Pass if (ref & mask) != (stencil & mask).
 *
 * @category Graphics
 */
declare const FUNC_NOTEQUAL: 5;
/**
 * Pass if (ref & mask) >= (stencil & mask).
 *
 * @category Graphics
 */
declare const FUNC_GREATEREQUAL: 6;
/**
 * Always pass.
 *
 * @category Graphics
 */
declare const FUNC_ALWAYS: 7;
/**
 * 8-bit unsigned vertex indices (0 to 255).
 *
 * @category Graphics
 */
declare const INDEXFORMAT_UINT8: 0;
/**
 * 16-bit unsigned vertex indices (0 to 65,535).
 *
 * @category Graphics
 */
declare const INDEXFORMAT_UINT16: 1;
/**
 * 32-bit unsigned vertex indices (0 to 4,294,967,295).
 *
 * @category Graphics
 */
declare const INDEXFORMAT_UINT32: 2;
/**
 * Byte size of index formats.
 *
 * @category Graphics
 * @ignore
 */
declare const indexFormatByteSize: number[];
declare const PIXELFORMAT_A8: 0;
declare const PIXELFORMAT_L8: 1;
declare const PIXELFORMAT_LA8: 2;
/**
 * 16-bit RGB (5-bits for red channel, 6 for green and 5 for blue).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB565: 3;
/**
 * 16-bit RGBA (5-bits for red channel, 5 for green, 5 for blue with 1-bit alpha).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA5551: 4;
/**
 * 16-bit RGBA (4-bits for red channel, 4 for green, 4 for blue with 4-bit alpha).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA4: 5;
/**
 * 24-bit RGB (8-bits for red channel, 8 for green and 8 for blue).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB8: 6;
/**
 * 32-bit RGBA (8-bits for red channel, 8 for green, 8 for blue with 8-bit alpha).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA8: 7;
/**
 * Block compressed format storing 16 input pixels in 64 bits of output, consisting of two 16-bit
 * RGB 5:6:5 color values and a 4x4 two bit lookup table.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DXT1: 8;
/**
 * Block compressed format storing 16 input pixels (corresponding to a 4x4 pixel block) into 128
 * bits of output, consisting of 64 bits of alpha channel data (4 bits for each pixel) followed by
 * 64 bits of color data; encoded the same way as DXT1.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DXT3: 9;
/**
 * Block compressed format storing 16 input pixels into 128 bits of output, consisting of 64 bits
 * of alpha channel data (two 8 bit alpha values and a 4x4 3 bit lookup table) followed by 64 bits
 * of color data (encoded the same way as DXT1).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DXT5: 10;
/**
 * 16-bit floating point RGB (16-bit float for each red, green and blue channels).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB16F: 11;
/**
 * 16-bit floating point RGBA (16-bit float for each red, green, blue and alpha channels).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA16F: 12;
/**
 * 32-bit floating point RGB (32-bit float for each red, green and blue channels).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB32F: 13;
/**
 * 32-bit floating point RGBA (32-bit float for each red, green, blue and alpha channels).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA32F: 14;
/**
 * 32-bit floating point single channel format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R32F: 15;
/**
 * A readable depth buffer format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DEPTH: 16;
/**
 * A readable depth/stencil buffer format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DEPTHSTENCIL: 17;
/**
 * A floating-point color-only format with 11 bits for red and green channels and 10 bits for the
 * blue channel.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_111110F: 18;
/**
 * Color-only sRGB format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_SRGB8: 19;
/**
 * Color sRGB format with additional alpha channel.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_SRGBA8: 20;
/**
 * ETC1 compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ETC1: 21;
/**
 * ETC2 (RGB) compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ETC2_RGB: 22;
/**
 * ETC2 (RGBA) compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ETC2_RGBA: 23;
/**
 * PVRTC (2BPP RGB) compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_PVRTC_2BPP_RGB_1: 24;
/**
 * PVRTC (2BPP RGBA) compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_PVRTC_2BPP_RGBA_1: 25;
/**
 * PVRTC (4BPP RGB) compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_PVRTC_4BPP_RGB_1: 26;
/**
 * PVRTC (4BPP RGBA) compressed format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_PVRTC_4BPP_RGBA_1: 27;
/**
 * ATC compressed format with alpha channel in blocks of 4x4.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ASTC_4x4: 28;
/**
 * ATC compressed format with no alpha channel.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ATC_RGB: 29;
/**
 * ATC compressed format with alpha channel.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ATC_RGBA: 30;
/**
 * 32-bit BGRA (8-bits for blue channel, 8 for green, 8 for red with 8-bit alpha). This is an
 * internal format used by the WebGPU's backbuffer only.
 *
 * @ignore
 * @category Graphics
 */
declare const PIXELFORMAT_BGRA8: 31;
/**
 * 8-bit signed integer single-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R8I: 32;
/**
 * 8-bit unsigned integer single-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R8U: 33;
/**
 * 16-bit signed integer single-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R16I: 34;
/**
 * 16-bit unsigned integer single-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R16U: 35;
/**
 * 32-bit signed integer single-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R32I: 36;
/**
 * 32-bit unsigned integer single-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R32U: 37;
/**
 * 8-bit per-channel signed integer (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG8I: 38;
/**
 * 8-bit per-channel unsigned integer (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG8U: 39;
/**
 * 16-bit per-channel signed integer (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG16I: 40;
/**
 * 16-bit per-channel unsigned integer (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG16U: 41;
/**
 * 32-bit per-channel signed integer (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG32I: 42;
/**
 * 32-bit per-channel unsigned integer (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG32U: 43;
/**
 * 8-bit per-channel signed integer (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA8I: 44;
/**
 * 8-bit per-channel unsigned integer (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA8U: 45;
/**
 * 16-bit per-channel signed integer (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA16I: 46;
/**
 * 16-bit per-channel unsigned integer (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA16U: 47;
/**
 * 32-bit per-channel signed integer (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA32I: 48;
/**
 * 32-bit per-channel unsigned integer (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA32U: 49;
/**
 * 16-bit floating point R (16-bit float for red channel).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R16F: 50;
/**
 * 16-bit floating point RG (16-bit float for each red and green channels).
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG16F: 51;
/**
 * 8-bit per-channel (R) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_R8: 52;
/**
 * 8-bit per-channel (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG8: 53;
/**
 * Format equivalent to {@link PIXELFORMAT_DXT1} but sampled in linear color space.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DXT1_SRGB: 54;
/**
 * Format equivalent to {@link PIXELFORMAT_DXT3} but sampled in linear color space.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DXT3_SRGBA: 55;
/**
 * Format equivalent to {@link PIXELFORMAT_DXT5} but sampled in linear color space.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DXT5_SRGBA: 56;
/**
 * Format equivalent to {@link PIXELFORMAT_ETC2_RGB} but sampled in linear color space.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ETC2_SRGB: 61;
/**
 * Format equivalent to {@link PIXELFORMAT_ETC2_RGBA} but sampled in linear color space.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ETC2_SRGBA: 62;
/**
 * Format equivalent to {@link PIXELFORMAT_ASTC_4x4} but sampled in linear color space.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_ASTC_4x4_SRGB: 63;
/**
 * 32-bit BGRA sRGB format. This is an internal format used by the WebGPU's backbuffer only.
 *
 * @ignore
 * @category Graphics
 */
declare const PIXELFORMAT_SBGRA8: 64;
/**
 * Compressed high dynamic range signed floating point format storing RGB values.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_BC6F: 65;
/**
 * Compressed high dynamic range unsigned floating point format storing RGB values.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_BC6UF: 66;
/**
 * Compressed 8-bit fixed-point data. Each 4x4 block of texels consists of 128 bits of RGBA data.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_BC7: 67;
/**
 * Compressed 8-bit fixed-point data. Each 4x4 block of texels consists of 128 bits of SRGB_ALPHA
 * data.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_BC7_SRGBA: 68;
/**
 * A 16-bit depth buffer format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_DEPTH16: 69;
/**
 * 32-bit floating point RG (32-bit float for each red and green channels). WebGPU only.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG32F: 70;
/**
 * 32-bit RGB format with shared 5-bit exponent (9 bits each for RGB mantissa). HDR format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB9E5: 71;
/**
 * 8-bit per-channel signed normalized (RG) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RG8S: 72;
/**
 * 8-bit per-channel signed normalized (RGBA) format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGBA8S: 73;
/**
 * 10-bit RGB with 2-bit alpha unsigned normalized format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB10A2: 74;
/**
 * 10-bit RGB with 2-bit alpha unsigned integer format.
 *
 * @category Graphics
 */
declare const PIXELFORMAT_RGB10A2U: 75;
/**
 * Information about pixel formats.
 *
 * ldr: whether the format is low dynamic range (LDR), which typically means it's not HDR, and uses
 * sRGB color space to store the color values
 * srgbFormat: the corresponding sRGB format (which automatically converts the sRGB value to linear)
 *
 * @type {Map<number, { name: string, size?: number, blockSize?: number, ldr?: boolean, srgb?: boolean, srgbFormat?: number, isInt?: boolean, isUint?: boolean }>}
 * @ignore
 */
declare const pixelFormatInfo: Map<number, {
    name: string;
    size?: number;
    blockSize?: number;
    ldr?: boolean;
    srgb?: boolean;
    srgbFormat?: number;
    isInt?: boolean;
    isUint?: boolean;
}>;
declare function isCompressedPixelFormat(format: any): boolean;
declare function isSrgbPixelFormat(format: any): boolean;
declare function isIntegerPixelFormat(format: any): boolean;
declare function getGlslShaderType(format: number): {
    sampler: string;
    returnType: string;
};
declare function getWgslShaderType(format: number): {
    textureType: string;
    returnType: string;
};
declare function pixelFormatLinearToGamma(format: number): number;
declare function pixelFormatGammaToLinear(format: number): number;
declare function requiresManualGamma(format: number): boolean;
declare function getPixelFormatArrayType(format: any): Int8ArrayConstructor | Uint8ArrayConstructor | Int16ArrayConstructor | Uint16ArrayConstructor | Int32ArrayConstructor | Uint32ArrayConstructor | Float32ArrayConstructor;
/**
 * List of distinct points.
 *
 * @category Graphics
 */
declare const PRIMITIVE_POINTS: 0;
/**
 * Discrete list of line segments.
 *
 * @category Graphics
 */
declare const PRIMITIVE_LINES: 1;
/**
 * List of points that are linked sequentially by line segments, with a closing line segment
 * between the last and first points.
 *
 * @category Graphics
 */
declare const PRIMITIVE_LINELOOP: 2;
/**
 * List of points that are linked sequentially by line segments.
 *
 * @category Graphics
 */
declare const PRIMITIVE_LINESTRIP: 3;
/**
 * Discrete list of triangles.
 *
 * @category Graphics
 */
declare const PRIMITIVE_TRIANGLES: 4;
/**
 * Connected strip of triangles where a specified vertex forms a triangle using the previous two.
 *
 * @category Graphics
 */
declare const PRIMITIVE_TRISTRIP: 5;
/**
 * Connected fan of triangles where the first vertex forms triangles with the following pairs of vertices.
 *
 * @category Graphics
 */
declare const PRIMITIVE_TRIFAN: 6;
/**
 * Vertex attribute to be treated as a position.
 *
 * @category Graphics
 */
declare const SEMANTIC_POSITION: "POSITION";
/**
 * Vertex attribute to be treated as a normal.
 *
 * @category Graphics
 */
declare const SEMANTIC_NORMAL: "NORMAL";
/**
 * Vertex attribute to be treated as a tangent.
 *
 * @category Graphics
 */
declare const SEMANTIC_TANGENT: "TANGENT";
/**
 * Vertex attribute to be treated as skin blend weights.
 *
 * @category Graphics
 */
declare const SEMANTIC_BLENDWEIGHT: "BLENDWEIGHT";
/**
 * Vertex attribute to be treated as skin blend indices.
 *
 * @category Graphics
 */
declare const SEMANTIC_BLENDINDICES: "BLENDINDICES";
/**
 * Vertex attribute to be treated as a color.
 *
 * @category Graphics
 */
declare const SEMANTIC_COLOR: "COLOR";
declare const SEMANTIC_TEXCOORD: "TEXCOORD";
/**
 * Vertex attribute to be treated as a texture coordinate (set 0).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD0: "TEXCOORD0";
/**
 * Vertex attribute to be treated as a texture coordinate (set 1).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD1: "TEXCOORD1";
/**
 * Vertex attribute to be treated as a texture coordinate (set 2).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD2: "TEXCOORD2";
/**
 * Vertex attribute to be treated as a texture coordinate (set 3).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD3: "TEXCOORD3";
/**
 * Vertex attribute to be treated as a texture coordinate (set 4).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD4: "TEXCOORD4";
/**
 * Vertex attribute to be treated as a texture coordinate (set 5).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD5: "TEXCOORD5";
/**
 * Vertex attribute to be treated as a texture coordinate (set 6).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD6: "TEXCOORD6";
/**
 * Vertex attribute to be treated as a texture coordinate (set 7).
 *
 * @category Graphics
 */
declare const SEMANTIC_TEXCOORD7: "TEXCOORD7";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR0: "ATTR0";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR1: "ATTR1";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR2: "ATTR2";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR3: "ATTR3";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR4: "ATTR4";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR5: "ATTR5";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR6: "ATTR6";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR7: "ATTR7";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR8: "ATTR8";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR9: "ATTR9";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR10: "ATTR10";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR11: "ATTR11";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR12: "ATTR12";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR13: "ATTR13";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR14: "ATTR14";
/**
 * Vertex attribute with a user defined semantic.
 *
 * @category Graphics
 */
declare const SEMANTIC_ATTR15: "ATTR15";
declare const SHADERTAG_MATERIAL: 1;
/**
 * Don't change the stencil buffer value.
 *
 * @category Graphics
 */
declare const STENCILOP_KEEP: 0;
/**
 * Set value to zero.
 *
 * @category Graphics
 */
declare const STENCILOP_ZERO: 1;
/**
 * Replace value with the reference value (see {@link StencilParameters}).
 *
 * @category Graphics
 */
declare const STENCILOP_REPLACE: 2;
/**
 * Increment the value.
 *
 * @category Graphics
 */
declare const STENCILOP_INCREMENT: 3;
/**
 * Increment the value but wrap it to zero when it's larger than a maximum representable value.
 *
 * @category Graphics
 */
declare const STENCILOP_INCREMENTWRAP: 4;
/**
 * Decrement the value.
 *
 * @category Graphics
 */
declare const STENCILOP_DECREMENT: 5;
/**
 * Decrement the value but wrap it to a maximum representable value if the current value is 0.
 *
 * @category Graphics
 */
declare const STENCILOP_DECREMENTWRAP: 6;
/**
 * Invert the value bitwise.
 *
 * @category Graphics
 */
declare const STENCILOP_INVERT: 7;
/**
 * The texture is not in a locked state.
 *
 * @category Graphics
 */
declare const TEXTURELOCK_NONE: 0;
/**
 * Read only. Any changes to the locked mip level's pixels will not update the texture.
 *
 * @category Graphics
 */
declare const TEXTURELOCK_READ: 1;
/**
 * Write only. The contents of the specified mip level will be entirely replaced.
 *
 * @category Graphics
 */
declare const TEXTURELOCK_WRITE: 2;
/**
 * Texture is a default type.
 *
 * @category Graphics
 */
declare const TEXTURETYPE_DEFAULT: "default";
/**
 * Texture stores high dynamic range data in RGBM format.
 *
 * @category Graphics
 */
declare const TEXTURETYPE_RGBM: "rgbm";
/**
 * Texture stores high dynamic range data in RGBE format.
 *
 * @category Graphics
 */
declare const TEXTURETYPE_RGBE: "rgbe";
/**
 * Texture stores high dynamic range data in RGBP encoding.
 *
 * @category Graphics
 */
declare const TEXTURETYPE_RGBP: "rgbp";
/**
 * Texture stores normalmap data swizzled in GGGR format. This is used for tangent space normal
 * maps. The R component is stored in alpha and G is stored in RGB. This packing can result in
 * higher quality when the texture data is compressed.
 *
 * @category Graphics
 */
declare const TEXTURETYPE_SWIZZLEGGGR: "swizzleGGGR";
declare const TEXHINT_NONE: 0;
declare const TEXHINT_SHADOWMAP: 1;
declare const TEXHINT_ASSET: 2;
declare const TEXHINT_LIGHTMAP: 3;
/**
 * Texture data is stored in a 1-dimensional texture.
 *
 * @category Graphics
 */
declare const TEXTUREDIMENSION_1D: "1d";
/**
 * Texture data is stored in a 2-dimensional texture.
 *
 * @category Graphics
 */
declare const TEXTUREDIMENSION_2D: "2d";
/**
 * Texture data is stored in an array of 2-dimensional textures.
 *
 * @category Graphics
 */
declare const TEXTUREDIMENSION_2D_ARRAY: "2d-array";
/**
 * Texture data is stored in a cube texture.
 *
 * @category Graphics
 */
declare const TEXTUREDIMENSION_CUBE: "cube";
/**
 * Texture data is stored in an array of cube textures.
 *
 * @category Graphics
 */
declare const TEXTUREDIMENSION_CUBE_ARRAY: "cube-array";
/**
 * Texture data is stored in a 3-dimensional texture.
 *
 * @category Graphics
 */
declare const TEXTUREDIMENSION_3D: "3d";
/**
 * A sampler type of a texture that contains floating-point data. Typically stored for color
 * textures, where data can be filtered.
 *
 * @category Graphics
 */
declare const SAMPLETYPE_FLOAT: 0;
/**
 * A sampler type of a texture that contains floating-point data, but cannot be filtered. Typically
 * used for textures storing data that cannot be interpolated.
 *
 * @category Graphics
 */
declare const SAMPLETYPE_UNFILTERABLE_FLOAT: 1;
/**
 * A sampler type of a texture that contains depth data. Typically used for depth textures.
 *
 * @category Graphics
 */
declare const SAMPLETYPE_DEPTH: 2;
/**
 * A sampler type of a texture that contains signed integer data.
 *
 * @category Graphics
 */
declare const SAMPLETYPE_INT: 3;
/**
 * A sampler type of a texture that contains unsigned integer data.
 *
 * @category Graphics
 */
declare const SAMPLETYPE_UINT: 4;
/**
 * Texture data is not stored a specific projection format.
 *
 * @category Graphics
 */
declare const TEXTUREPROJECTION_NONE: "none";
/**
 * Texture data is stored in cubemap projection format.
 *
 * @category Graphics
 */
declare const TEXTUREPROJECTION_CUBE: "cube";
/**
 * Texture data is stored in equirectangular projection format.
 *
 * @category Graphics
 */
declare const TEXTUREPROJECTION_EQUIRECT: "equirect";
/**
 * Texture data is stored in octahedral projection format.
 *
 * @category Graphics
 */
declare const TEXTUREPROJECTION_OCTAHEDRAL: "octahedral";
/**
 * Shader source code uses GLSL language.
 *
 * @category Graphics
 */
declare const SHADERLANGUAGE_GLSL: "glsl";
/**
 * Shader source code uses WGSL language.
 *
 * @category Graphics
 */
declare const SHADERLANGUAGE_WGSL: "wgsl";
/**
 * Signed byte vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_INT8: 0;
/**
 * Unsigned byte vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_UINT8: 1;
/**
 * Signed short vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_INT16: 2;
/**
 * Unsigned short vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_UINT16: 3;
/**
 * Signed integer vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_INT32: 4;
/**
 * Unsigned integer vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_UINT32: 5;
/**
 * Floating point vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_FLOAT32: 6;
/**
 * 16-bit floating point vertex element type.
 *
 * @category Graphics
 */
declare const TYPE_FLOAT16: 7;
/**
 * Boolean uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_BOOL: 0;
/**
 * Integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_INT: 1;
/**
 * Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_FLOAT: 2;
/**
 * 2 x Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_VEC2: 3;
/**
 * 3 x Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_VEC3: 4;
/**
 * 4 x Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_VEC4: 5;
/**
 * 2 x Integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_IVEC2: 6;
/**
 * 3 x Integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_IVEC3: 7;
/**
 * 4 x Integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_IVEC4: 8;
/**
 * 2 x Boolean uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_BVEC2: 9;
/**
 * 3 x Boolean uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_BVEC3: 10;
/**
 * 4 x Boolean uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_BVEC4: 11;
/**
 * 2 x 2 x Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_MAT2: 12;
/**
 * 3 x 3 x Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_MAT3: 13;
/**
 * 4 x 4 x Float uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_MAT4: 14;
declare const UNIFORMTYPE_TEXTURE2D: 15;
declare const UNIFORMTYPE_TEXTURECUBE: 16;
declare const UNIFORMTYPE_FLOATARRAY: 17;
declare const UNIFORMTYPE_TEXTURE2D_SHADOW: 18;
declare const UNIFORMTYPE_TEXTURECUBE_SHADOW: 19;
declare const UNIFORMTYPE_TEXTURE3D: 20;
declare const UNIFORMTYPE_VEC2ARRAY: 21;
declare const UNIFORMTYPE_VEC3ARRAY: 22;
declare const UNIFORMTYPE_VEC4ARRAY: 23;
declare const UNIFORMTYPE_MAT4ARRAY: 24;
declare const UNIFORMTYPE_TEXTURE2D_ARRAY: 25;
/**
 * Unsigned integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_UINT: 26;
/**
 * 2 x Unsigned integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_UVEC2: 27;
/**
 * 3 x Unsigned integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_UVEC3: 28;
/**
 * 4 x Unsigned integer uniform type.
 *
 * @category Graphics
 */
declare const UNIFORMTYPE_UVEC4: 29;
declare const UNIFORMTYPE_INTARRAY: 30;
declare const UNIFORMTYPE_UINTARRAY: 31;
declare const UNIFORMTYPE_BOOLARRAY: 32;
declare const UNIFORMTYPE_IVEC2ARRAY: 33;
declare const UNIFORMTYPE_UVEC2ARRAY: 34;
declare const UNIFORMTYPE_BVEC2ARRAY: 35;
declare const UNIFORMTYPE_IVEC3ARRAY: 36;
declare const UNIFORMTYPE_UVEC3ARRAY: 37;
declare const UNIFORMTYPE_BVEC3ARRAY: 38;
declare const UNIFORMTYPE_IVEC4ARRAY: 39;
declare const UNIFORMTYPE_UVEC4ARRAY: 40;
declare const UNIFORMTYPE_BVEC4ARRAY: 41;
declare const UNIFORMTYPE_ITEXTURE2D: 42;
declare const UNIFORMTYPE_UTEXTURE2D: 43;
declare const UNIFORMTYPE_ITEXTURECUBE: 44;
declare const UNIFORMTYPE_UTEXTURECUBE: 45;
declare const UNIFORMTYPE_ITEXTURE3D: 46;
declare const UNIFORMTYPE_UTEXTURE3D: 47;
declare const UNIFORMTYPE_ITEXTURE2D_ARRAY: 48;
declare const UNIFORMTYPE_UTEXTURE2D_ARRAY: 49;
declare const uniformTypeToName: string[];
declare const uniformTypeToNameWGSL: string[][];
declare const uniformTypeToNameMapWGSL: Map<any, any>;
declare const uniformTypeToStorage: Uint8Array<ArrayBuffer>;
/**
 * A WebGL 2 device type.
 *
 * @category Graphics
 */
declare const DEVICETYPE_WEBGL2: "webgl2";
/**
 * A WebGPU device type.
 *
 * @category Graphics
 */
declare const DEVICETYPE_WEBGPU: "webgpu";
/**
 * A WebGPU device type with no optional features requested and default spec limits. Useful for
 * testing engine behavior on the most constrained WebGPU devices (e.g. no compressed textures, no
 * float32-filterable, no timestamp-query).
 *
 * @category Graphics
 */
declare const DEVICETYPE_WEBGPU_BARE: "webgpu:bare";
/**
 * A Null device type.
 *
 * @category Graphics
 */
declare const DEVICETYPE_NULL: "null";
/**
 * The resource is visible to the vertex shader.
 *
 * @category Graphics
 */
declare const SHADERSTAGE_VERTEX: 1;
/**
 * The resource is visible to the fragment shader.
 *
 * @category Graphics
 */
declare const SHADERSTAGE_FRAGMENT: 2;
/**
 * The resource is visible to the compute shader.
 *
 * @category Graphics
 */
declare const SHADERSTAGE_COMPUTE: 4;
/**
 * Display format for low dynamic range data. This is always supported; however, due to the cost, it
 * does not implement linear alpha blending on the main framebuffer. Instead, alpha blending occurs
 * in sRGB space.
 *
 * @category Graphics
 */
declare const DISPLAYFORMAT_LDR: "ldr";
/**
 * Display format for low dynamic range data in the sRGB color space. This format correctly
 * implements linear alpha blending on the main framebuffer, with the alpha blending occurring in
 * linear space. This is currently supported on WebGPU platform only. On unsupported platforms, it
 * silently falls back to {@link DISPLAYFORMAT_LDR}.
 *
 * @category Graphics
 */
declare const DISPLAYFORMAT_LDR_SRGB: "ldr_srgb";
/**
 * Display format for high dynamic range data, using 16bit floating point values.
 * Note: This is supported on WebGPU platform only, and ignored on other platforms. On displays
 * without HDR support, it silently falls back to {@link DISPLAYFORMAT_LDR}. Use
 * {@link GraphicsDevice.isHdr} to see if the HDR format is used. When it is, it's recommended to
 * use {@link TONEMAP_NONE} for the tonemapping mode, to avoid it clipping the high dynamic range.
 *
 * @category Graphics
 */
declare const DISPLAYFORMAT_HDR: "hdr";
declare const TEXPROPERTY_MIN_FILTER: 1;
declare const TEXPROPERTY_MAG_FILTER: 2;
declare const TEXPROPERTY_ADDRESS_U: 4;
declare const TEXPROPERTY_ADDRESS_V: 8;
declare const TEXPROPERTY_ADDRESS_W: 16;
declare const TEXPROPERTY_COMPARE_ON_READ: 32;
declare const TEXPROPERTY_COMPARE_FUNC: 64;
declare const TEXPROPERTY_ANISOTROPY: 128;
declare const TEXPROPERTY_ALL: 255;
declare const BINDGROUP_VIEW: 0;
declare const BINDGROUP_MESH: 1;
declare const BINDGROUP_MESH_UB: 2;
declare const bindGroupNames: string[];
declare const UNIFORM_BUFFER_DEFAULT_SLOT_NAME: "default";
declare const UNUSED_UNIFORM_NAME: "_unused_float_uniform";
declare const typedArrayTypes: (Int8ArrayConstructor | Uint8ArrayConstructor | Int16ArrayConstructor | Uint16ArrayConstructor | Int32ArrayConstructor | Uint32ArrayConstructor | Float32ArrayConstructor)[];
declare const typedArrayTypesByteSize: number[];
declare const vertexTypesNames: string[];
declare namespace typedArrayToType {
    export { TYPE_INT8 as Int8Array };
    export { TYPE_UINT8 as Uint8Array };
    export { TYPE_INT16 as Int16Array };
    export { TYPE_UINT16 as Uint16Array };
    export { TYPE_INT32 as Int32Array };
    export { TYPE_UINT32 as Uint32Array };
    export { TYPE_FLOAT32 as Float32Array };
}
declare const typedArrayIndexFormats: (Uint8ArrayConstructor | Uint16ArrayConstructor | Uint32ArrayConstructor)[];
declare const typedArrayIndexFormatsByteSize: number[];
declare const primitiveGlslToWgslTypeMap: Map<string, string>;
/**
 * Map of engine semantics into location on device in range 0..15 (note - semantics mapping to the
 * same location cannot be used at the same time) organized in a way that ATTR0-ATTR7 do not
 * overlap with common important semantics.
 *
 * @type {object}
 * @ignore
 * @category Graphics
 */
declare const semanticToLocation: object;

declare const ACTION_MOUSE: "mouse";
declare const ACTION_KEYBOARD: "keyboard";
declare const ACTION_GAMEPAD: "gamepad";
declare const AXIS_MOUSE_X: "mousex";
declare const AXIS_MOUSE_Y: "mousey";
declare const AXIS_PAD_L_X: "padlx";
declare const AXIS_PAD_L_Y: "padly";
declare const AXIS_PAD_R_X: "padrx";
declare const AXIS_PAD_R_Y: "padry";
declare const AXIS_KEY: "key";
/**
 * @type {number}
 * @category Input
 */
declare const KEY_BACKSPACE: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_TAB: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_RETURN: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_ENTER: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_SHIFT: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_CONTROL: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_ALT: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_PAUSE: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_CAPS_LOCK: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_ESCAPE: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_SPACE: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_PAGE_UP: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_PAGE_DOWN: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_END: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_HOME: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_LEFT: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_UP: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_RIGHT: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_DOWN: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_PRINT_SCREEN: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_INSERT: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_DELETE: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_0: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_1: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_2: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_3: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_4: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_5: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_6: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_7: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_8: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_9: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_SEMICOLON: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_EQUAL: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_A: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_B: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_C: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_D: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_E: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_G: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_H: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_I: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_J: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_K: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_L: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_M: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_N: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_O: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_P: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_Q: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_R: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_S: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_T: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_U: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_V: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_W: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_X: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_Y: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_Z: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_WINDOWS: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_CONTEXT_MENU: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_0: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_1: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_2: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_3: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_4: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_5: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_6: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_7: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_8: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_NUMPAD_9: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_MULTIPLY: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_ADD: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_SEPARATOR: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_SUBTRACT: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_DECIMAL: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_DIVIDE: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F1: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F2: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F3: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F4: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F5: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F6: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F7: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F8: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F9: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F10: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F11: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_F12: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_COMMA: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_PERIOD: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_SLASH: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_OPEN_BRACKET: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_BACK_SLASH: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_CLOSE_BRACKET: number;
/**
 * @type {number}
 * @category Input
 */
declare const KEY_META: number;
/**
 * No mouse buttons pressed.
 *
 * @category Input
 */
declare const MOUSEBUTTON_NONE: -1;
/**
 * The left mouse button.
 *
 * @category Input
 */
declare const MOUSEBUTTON_LEFT: 0;
/**
 * The middle mouse button.
 *
 * @category Input
 */
declare const MOUSEBUTTON_MIDDLE: 1;
/**
 * The right mouse button.
 *
 * @category Input
 */
declare const MOUSEBUTTON_RIGHT: 2;
/**
 * Index for pad 1.
 *
 * @category Input
 */
declare const PAD_1: 0;
/**
 * Index for pad 2.
 *
 * @category Input
 */
declare const PAD_2: 1;
/**
 * Index for pad 3.
 *
 * @category Input
 */
declare const PAD_3: 2;
/**
 * Index for pad 4.
 *
 * @category Input
 */
declare const PAD_4: 3;
/**
 * The first face button, from bottom going clockwise.
 *
 * @category Input
 */
declare const PAD_FACE_1: 0;
/**
 * The second face button, from bottom going clockwise.
 *
 * @category Input
 */
declare const PAD_FACE_2: 1;
/**
 * The third face button, from bottom going clockwise.
 *
 * @category Input
 */
declare const PAD_FACE_3: 2;
/**
 * The fourth face button, from bottom going clockwise.
 *
 * @category Input
 */
declare const PAD_FACE_4: 3;
/**
 * The first shoulder button on the left.
 *
 * @category Input
 */
declare const PAD_L_SHOULDER_1: 4;
/**
 * The first shoulder button on the right.
 *
 * @category Input
 */
declare const PAD_R_SHOULDER_1: 5;
/**
 * The second shoulder button on the left.
 *
 * @category Input
 */
declare const PAD_L_SHOULDER_2: 6;
/**
 * The second shoulder button on the right.
 *
 * @category Input
 */
declare const PAD_R_SHOULDER_2: 7;
/**
 * The select button.
 *
 * @category Input
 */
declare const PAD_SELECT: 8;
/**
 * The start button.
 *
 * @category Input
 */
declare const PAD_START: 9;
/**
 * The button when depressing the left analogue stick.
 *
 * @category Input
 */
declare const PAD_L_STICK_BUTTON: 10;
/**
 * The button when depressing the right analogue stick.
 *
 * @category Input
 */
declare const PAD_R_STICK_BUTTON: 11;
/**
 * Direction pad up.
 *
 * @category Input
 */
declare const PAD_UP: 12;
/**
 * Direction pad down.
 *
 * @category Input
 */
declare const PAD_DOWN: 13;
/**
 * Direction pad left.
 *
 * @category Input
 */
declare const PAD_LEFT: 14;
/**
 * Direction pad right.
 *
 * @category Input
 */
declare const PAD_RIGHT: 15;
/**
 * Vendor specific button.
 *
 * @category Input
 */
declare const PAD_VENDOR: 16;
/**
 * Horizontal axis on the left analogue stick.
 *
 * @category Input
 */
declare const PAD_L_STICK_X: 0;
/**
 * Vertical axis on the left analogue stick.
 *
 * @category Input
 */
declare const PAD_L_STICK_Y: 1;
/**
 * Horizontal axis on the right analogue stick.
 *
 * @category Input
 */
declare const PAD_R_STICK_X: 2;
/**
 * Vertical axis on the right analogue stick.
 *
 * @category Input
 */
declare const PAD_R_STICK_Y: 3;
/**
 * Horizontal axis on the touchpad of a XR pad.
 *
 * @category Input
 */
declare const XRPAD_TOUCHPAD_X: 0;
/**
 * Vertical axis on the thouchpad of a XR pad.
 *
 * @category Input
 */
declare const XRPAD_TOUCHPAD_Y: 1;
/**
 * Horizontal axis on the stick of a XR pad.
 *
 * @category Input
 */
declare const XRPAD_STICK_X: 2;
/**
 * Vertical axis on the stick of a XR pad.
 *
 * @category Input
 */
declare const XRPAD_STICK_Y: 3;
/**
 * The button when pressing the XR pad's touchpad.
 *
 * @category Input
 */
declare const XRPAD_TOUCHPAD_BUTTON: 2;
/**
 * The trigger button from XR pad.
 *
 * @category Input
 */
declare const XRPAD_TRIGGER: 0;
/**
 * The squeeze button from XR pad.
 *
 * @category Input
 */
declare const XRPAD_SQUEEZE: 1;
/**
 * The button when pressing the XR pad's stick.
 *
 * @category Input
 */
declare const XRPAD_STICK_BUTTON: 3;
/**
 * The A button from XR pad.
 *
 * @category Input
 */
declare const XRPAD_A: 4;
/**
 * The B button from XR pad.
 *
 * @category Input
 */
declare const XRPAD_B: 5;

/**
 * Linear distance model.
 *
 * @category Sound
 */
declare const DISTANCE_LINEAR: "linear";
/**
 * Inverse distance model.
 *
 * @category Sound
 */
declare const DISTANCE_INVERSE: "inverse";
/**
 * Exponential distance model.
 *
 * @category Sound
 */
declare const DISTANCE_EXPONENTIAL: "exponential";

/**
 * Subtract the color of the source fragment from the destination fragment and write the result to
 * the frame buffer.
 *
 * @category Graphics
 */
declare const BLEND_SUBTRACTIVE: 0;
/**
 * Add the color of the source fragment to the destination fragment and write the result to the
 * frame buffer.
 *
 * @category Graphics
 */
declare const BLEND_ADDITIVE: 1;
/**
 * Enable simple translucency for materials such as glass. This is equivalent to enabling a source
 * blend mode of {@link BLENDMODE_SRC_ALPHA} and a destination blend mode of
 * {@link BLENDMODE_ONE_MINUS_SRC_ALPHA}.
 *
 * @category Graphics
 */
declare const BLEND_NORMAL: 2;
/**
 * Disable blending.
 *
 * @category Graphics
 */
declare const BLEND_NONE: 3;
/**
 * Similar to {@link BLEND_NORMAL} expect the source fragment is assumed to have already been
 * multiplied by the source alpha value.
 *
 * @category Graphics
 */
declare const BLEND_PREMULTIPLIED: 4;
/**
 * Multiply the color of the source fragment by the color of the destination fragment and write the
 * result to the frame buffer.
 *
 * @category Graphics
 */
declare const BLEND_MULTIPLICATIVE: 5;
/**
 * Same as {@link BLEND_ADDITIVE} except the source RGB is multiplied by the source alpha.
 *
 * @category Graphics
 */
declare const BLEND_ADDITIVEALPHA: 6;
/**
 * Multiplies colors and doubles the result.
 *
 * @category Graphics
 */
declare const BLEND_MULTIPLICATIVE2X: 7;
/**
 * Softer version of additive.
 *
 * @category Graphics
 */
declare const BLEND_SCREEN: 8;
/**
 * Minimum color.
 *
 * @category Graphics
 */
declare const BLEND_MIN: 9;
/**
 * Maximum color.
 *
 * @category Graphics
 */
declare const BLEND_MAX: 10;
declare const blendNames: {
    0: string;
    1: string;
    2: string;
    3: string;
    4: string;
    5: string;
    6: string;
    7: string;
    8: string;
    9: string;
    10: string;
};
/**
 * No fog is applied to the scene.
 *
 * @category Graphics
 */
declare const FOG_NONE: "none";
/**
 * Fog rises linearly from zero to 1 between a start and end depth.
 *
 * @category Graphics
 */
declare const FOG_LINEAR: "linear";
/**
 * Fog rises according to an exponential curve controlled by a density value.
 *
 * @category Graphics
 */
declare const FOG_EXP: "exp";
/**
 * Fog rises according to an exponential curve controlled by a density value.
 *
 * @category Graphics
 */
declare const FOG_EXP2: "exp2";
/**
 * No Fresnel.
 *
 * @category Graphics
 */
declare const FRESNEL_NONE: 0;
/**
 * Schlick's approximation of Fresnel.
 *
 * @category Graphics
 */
declare const FRESNEL_SCHLICK: 2;
declare const fresnelNames: {
    0: string;
    2: string;
};
declare const LAYER_HUD: 0;
declare const LAYER_GIZMO: 1;
declare const LAYER_WORLD: 15;
/**
 * The world layer.
 *
 * @category Graphics
 */
declare const LAYERID_WORLD: 0;
/**
 * The depth layer.
 *
 * @category Graphics
 */
declare const LAYERID_DEPTH: 1;
/**
 * The skybox layer.
 *
 * @category Graphics
 */
declare const LAYERID_SKYBOX: 2;
/**
 * The immediate layer.
 *
 * @category Graphics
 */
declare const LAYERID_IMMEDIATE: 3;
/**
 * The UI layer.
 *
 * @category Graphics
 */
declare const LAYERID_UI: 4;
/**
 * Directional (global) light source.
 *
 * @category Graphics
 */
declare const LIGHTTYPE_DIRECTIONAL: 0;
/**
 * Omni-directional (local) light source.
 *
 * @category Graphics
 */
declare const LIGHTTYPE_OMNI: 1;
/**
 * Point (local) light source.
 *
 * @ignore
 * @category Graphics
 */
declare const LIGHTTYPE_POINT: 1;
/**
 * Spot (local) light source.
 *
 * @category Graphics
 */
declare const LIGHTTYPE_SPOT: 2;
declare const LIGHTTYPE_COUNT: 3;
declare const lightTypeNames: {
    0: string;
    1: string;
    2: string;
};
declare const LIGHT_COLOR_DIVIDER: 100;
/**
 * Infinitesimally small point light source shape.
 *
 * @category Graphics
 */
declare const LIGHTSHAPE_PUNCTUAL: 0;
/**
 * Rectangle shape of light source.
 *
 * @category Graphics
 */
declare const LIGHTSHAPE_RECT: 1;
/**
 * Disk shape of light source.
 *
 * @category Graphics
 */
declare const LIGHTSHAPE_DISK: 2;
/**
 * Sphere shape of light source.
 *
 * @category Graphics
 */
declare const LIGHTSHAPE_SPHERE: 3;
declare const lightShapeNames: {
    0: string;
    1: string;
    2: string;
    3: string;
};
/**
 * Linear distance falloff model for light attenuation.
 *
 * @category Graphics
 */
declare const LIGHTFALLOFF_LINEAR: 0;
/**
 * Inverse squared distance falloff model for light attenuation.
 *
 * @category Graphics
 */
declare const LIGHTFALLOFF_INVERSESQUARED: 1;
declare const lightFalloffNames: {
    0: string;
    1: string;
};
/**
 * A shadow sampling technique using 32bit shadow map that averages depth comparisons from a 3x3
 * grid of texels for softened shadow edges.
 *
 * @category Graphics
 */
declare const SHADOW_PCF3_32F: 0;
/**
 * @deprecated
 * @ignore
 */
declare const SHADOW_PCF3: 0;
/**
 * A shadow sampling technique using a 16-bit exponential variance shadow map that leverages
 * variance to approximate shadow boundaries, enabling soft shadows. Only supported when
 * {@link GraphicsDevice#textureHalfFloatRenderable} is true. Falls back to {@link SHADOW_PCF3_32F},
 * if not supported.
 *
 * @category Graphics
 */
declare const SHADOW_VSM_16F: 2;
/**
 * @deprecated
 * @ignore
 */
declare const SHADOW_VSM16: 2;
/**
 * A shadow sampling technique using a 32-bit exponential variance shadow map that leverages
 * variance to approximate shadow boundaries, enabling soft shadows. Only supported when
 * {@link GraphicsDevice#textureFloatRenderable} is true. Falls back to {@link SHADOW_VSM_16F}, if
 * not supported.
 *
 * @category Graphics
 */
declare const SHADOW_VSM_32F: 3;
/**
 * @deprecated
 * @ignore
 */
declare const SHADOW_VSM32: 3;
/**
 * A shadow sampling technique using 32bit shadow map that averages depth comparisons from a 5x5
 * grid of texels for softened shadow edges.
 *
 * @category Graphics
 */
declare const SHADOW_PCF5_32F: 4;
/**
 * @deprecated
 * @ignore
 */
declare const SHADOW_PCF5: 4;
/**
 * A shadow sampling technique using a 32-bit shadow map that performs a single depth comparison for
 * sharp shadow edges.
 *
 * @category Graphics
 */
declare const SHADOW_PCF1_32F: 5;
/**
 * @deprecated
 * @ignore
 */
declare const SHADOW_PCF1: 5;
/**
 * A shadow sampling technique using a 32-bit shadow map that adjusts filter size based on blocker
 * distance, producing realistic, soft shadow edges that vary with the light's occlusion. Note that
 * this technique requires both {@link GraphicsDevice#textureFloatRenderable} and
 * {@link GraphicsDevice#textureFloatFilterable} to be true, and falls back to
 * {@link SHADOW_PCF3_32F} otherwise.
 *
 * @category Graphics
 */
declare const SHADOW_PCSS_32F: 6;
/**
 * A shadow sampling technique using a 16-bit shadow map that performs a single depth comparison for
 * sharp shadow edges.
 *
 * @category Graphics
 */
declare const SHADOW_PCF1_16F: 7;
/**
 * A shadow sampling technique using 16-bit shadow map that averages depth comparisons from a 3x3
 * grid of texels for softened shadow edges.
 *
 * @category Graphics
 */
declare const SHADOW_PCF3_16F: 8;
/**
 * A shadow sampling technique using 16-bit shadow map that averages depth comparisons from a 3x3
 * grid of texels for softened shadow edges.
 *
 * @category Graphics
 */
declare const SHADOW_PCF5_16F: 9;
/**
 * Information about shadow types.
 *
 * @type {Map<number, { name: string, format: number, pcf?: boolean, vsm?: boolean }>}
 * @ignore
 */
declare const shadowTypeInfo: Map<number, {
    name: string;
    format: number;
    pcf?: boolean;
    vsm?: boolean;
}>;
/**
 * The flag that controls shadow rendering for the 0 cascade
 *
 * @category Graphics
 */
declare const SHADOW_CASCADE_0: 1;
/**
 * The flag that controls shadow rendering for the 1 cascade
 *
 * @category Graphics
 */
declare const SHADOW_CASCADE_1: 2;
/**
 * The flag that controls shadow rendering for the 2 cascade
 *
 * @category Graphics
 */
declare const SHADOW_CASCADE_2: 4;
/**
 * The flag that controls shadow rendering for the 3 cascade
 *
 * @category Graphics
 */
declare const SHADOW_CASCADE_3: 8;
/**
 * The flag that controls shadow rendering for the all cascades
 *
 * @category Graphics
 */
declare const SHADOW_CASCADE_ALL: 255;
/**
 * Box filter.
 *
 * @category Graphics
 */
declare const BLUR_BOX: 0;
/**
 * Gaussian filter. May look smoother than box, but requires more samples.
 *
 * @category Graphics
 */
declare const BLUR_GAUSSIAN: 1;
/**
 * No sorting, particles are drawn in arbitrary order. Can be simulated on GPU.
 *
 * @category Graphics
 */
declare const PARTICLESORT_NONE: 0;
/**
 * Sorting based on distance to the camera. CPU only.
 *
 * @category Graphics
 */
declare const PARTICLESORT_DISTANCE: 1;
/**
 * Newer particles are drawn first. CPU only.
 *
 * @category Graphics
 */
declare const PARTICLESORT_NEWER_FIRST: 2;
/**
 * Older particles are drawn first. CPU only.
 *
 * @category Graphics
 */
declare const PARTICLESORT_OLDER_FIRST: 3;
declare const PARTICLEMODE_GPU: 0;
declare const PARTICLEMODE_CPU: 1;
/**
 * Box shape parameterized by emitterExtents. Initial velocity is directed towards local Z axis.
 *
 * @category Graphics
 */
declare const EMITTERSHAPE_BOX: 0;
/**
 * Sphere shape parameterized by emitterRadius. Initial velocity is directed outwards from the
 * center.
 *
 * @category Graphics
 */
declare const EMITTERSHAPE_SPHERE: 1;
/**
 * Particles are facing camera.
 *
 * @category Graphics
 */
declare const PARTICLEORIENTATION_SCREEN: 0;
/**
 * User defines world space normal (particleNormal) to set planes orientation.
 *
 * @category Graphics
 */
declare const PARTICLEORIENTATION_WORLD: 1;
/**
 * Similar to previous, but the normal is affected by emitter(entity) transformation.
 *
 * @category Graphics
 */
declare const PARTICLEORIENTATION_EMITTER: 2;
/**
 * A perspective camera projection where the frustum shape is essentially pyramidal.
 *
 * @category Graphics
 */
declare const PROJECTION_PERSPECTIVE: 0;
/**
 * An orthographic camera projection where the frustum shape is essentially a cuboid.
 *
 * @category Graphics
 */
declare const PROJECTION_ORTHOGRAPHIC: 1;
/**
 * Render mesh instance as solid geometry.
 *
 * @category Graphics
 */
declare const RENDERSTYLE_SOLID: 0;
/**
 * Render mesh instance as wireframe.
 *
 * @category Graphics
 */
declare const RENDERSTYLE_WIREFRAME: 1;
/**
 * Render mesh instance as points.
 *
 * @category Graphics
 */
declare const RENDERSTYLE_POINTS: 2;
/**
 * The cube map is treated as if it is infinitely far away.
 *
 * @category Graphics
 */
declare const CUBEPROJ_NONE: 0;
/**
 * The cube map is box-projected based on a world space axis-aligned bounding box.
 *
 * @category Graphics
 */
declare const CUBEPROJ_BOX: 1;
declare const cubemaProjectionNames: {
    0: string;
    1: string;
};
/**
 * Multiply together the primary and secondary colors.
 *
 * @category Graphics
 */
declare const DETAILMODE_MUL: "mul";
/**
 * Add together the primary and secondary colors.
 *
 * @category Graphics
 */
declare const DETAILMODE_ADD: "add";
/**
 * Softer version of {@link DETAILMODE_ADD}.
 *
 * @category Graphics
 */
declare const DETAILMODE_SCREEN: "screen";
/**
 * Multiplies or screens the colors, depending on the primary color.
 *
 * @category Graphics
 */
declare const DETAILMODE_OVERLAY: "overlay";
/**
 * Select whichever of the primary and secondary colors is darker, component-wise.
 *
 * @category Graphics
 */
declare const DETAILMODE_MIN: "min";
/**
 * Select whichever of the primary and secondary colors is lighter, component-wise.
 *
 * @category Graphics
 */
declare const DETAILMODE_MAX: "max";
/**
 * No gamma correction.
 *
 * @category Graphics
 */
declare const GAMMA_NONE: 0;
/**
 * Apply sRGB gamma correction.
 *
 * @category Graphics
 */
declare const GAMMA_SRGB: 1;
declare const gammaNames: {
    0: string;
    1: string;
};
/**
 * Linear tonemapping. The colors are preserved, but the exposure is applied.
 *
 * @category Graphics
 */
declare const TONEMAP_LINEAR: 0;
/**
 * Filmic tonemapping curve.
 *
 * @category Graphics
 */
declare const TONEMAP_FILMIC: 1;
/**
 * Hejl filmic tonemapping curve.
 *
 * @category Graphics
 */
declare const TONEMAP_HEJL: 2;
/**
 * ACES filmic tonemapping curve.
 *
 * @category Graphics
 */
declare const TONEMAP_ACES: 3;
/**
 * ACES v2 filmic tonemapping curve.
 *
 * @category Graphics
 */
declare const TONEMAP_ACES2: 4;
/**
 * Khronos PBR Neutral tonemapping curve.
 *
 * @category Graphics
 */
declare const TONEMAP_NEUTRAL: 5;
/**
 * No tonemapping or exposure is applied. Used for HDR rendering.
 *
 * @category Graphics
 */
declare const TONEMAP_NONE: 6;
declare const tonemapNames: string[];
/**
 * No specular occlusion.
 *
 * @category Graphics
 */
declare const SPECOCC_NONE: 0;
/**
 * Use AO directly to occlude specular.
 *
 * @category Graphics
 */
declare const SPECOCC_AO: 1;
/**
 * Modify AO based on material glossiness/view angle to occlude specular.
 *
 * @category Graphics
 */
declare const SPECOCC_GLOSSDEPENDENT: 2;
declare const specularOcclusionNames: {
    0: string;
    1: string;
    2: string;
};
declare const REFLECTIONSRC_NONE: "none";
declare const REFLECTIONSRC_ENVATLAS: "envAtlas";
declare const REFLECTIONSRC_ENVATLASHQ: "envAtlasHQ";
declare const REFLECTIONSRC_CUBEMAP: "cubeMap";
declare const REFLECTIONSRC_SPHEREMAP: "sphereMap";
declare namespace reflectionSrcNames {
    let none: string;
    let envAtlas: string;
    let envAtlasHQ: string;
    let cubeMap: string;
    let sphereMap: string;
}
declare const AMBIENTSRC_AMBIENTSH: "ambientSH";
declare const AMBIENTSRC_ENVALATLAS: "envAtlas";
declare const AMBIENTSRC_CONSTANT: "constant";
declare namespace ambientSrcNames {
    export let ambientSH: string;
    let envAtlas_1: string;
    export { envAtlas_1 as envAtlas };
    export let constant: string;
}
declare const SHADERDEF_NOSHADOW: 1;
declare const SHADERDEF_SKIN: 2;
declare const SHADERDEF_UV0: 4;
declare const SHADERDEF_UV1: 8;
declare const SHADERDEF_VCOLOR: 16;
declare const SHADERDEF_INSTANCING: 32;
declare const SHADERDEF_LM: 64;
declare const SHADERDEF_DIRLM: 128;
declare const SHADERDEF_SCREENSPACE: 256;
declare const SHADERDEF_TANGENTS: 512;
declare const SHADERDEF_MORPH_POSITION: 1024;
declare const SHADERDEF_MORPH_NORMAL: 2048;
declare const SHADERDEF_LMAMBIENT: 4096;
declare const SHADERDEF_MORPH_TEXTURE_BASED_INT: 8192;
declare const SHADERDEF_BATCH: 16384;
/**
 * The shadow map is not to be updated.
 *
 * @category Graphics
 */
declare const SHADOWUPDATE_NONE: 0;
/**
 * The shadow map is regenerated this frame and not on subsequent frames.
 *
 * @category Graphics
 */
declare const SHADOWUPDATE_THISFRAME: 1;
/**
 * The shadow map is regenerated every frame.
 *
 * @category Graphics
 */
declare const SHADOWUPDATE_REALTIME: 2;
declare const MASK_AFFECT_DYNAMIC: 1;
declare const MASK_AFFECT_LIGHTMAPPED: 2;
declare const MASK_BAKE: 4;
/**
 * Render shaded materials using forward rendering.
 *
 * @category Graphics
 */
declare const SHADER_FORWARD: 0;
declare const SHADER_PREPASS: 1;
declare const SHADER_SHADOW: 2;
declare const SHADER_PICK: 3;
declare const SHADER_DEPTH_PICK: 4;
/**
 * Shader that performs forward rendering.
 *
 * @category Graphics
 */
declare const SHADERPASS_FORWARD: "forward";
/**
 * Shader used for debug rendering of albedo.
 *
 * @category Graphics
 */
declare const SHADERPASS_ALBEDO: "debug_albedo";
/**
 * Shader used for debug rendering of world normal.
 *
 * @category Graphics
 */
declare const SHADERPASS_WORLDNORMAL: "debug_world_normal";
/**
 * Shader used for debug rendering of opacity.
 *
 * @category Graphics
 */
declare const SHADERPASS_OPACITY: "debug_opacity";
/**
 * Shader used for debug rendering of specularity.
 *
 * @category Graphics
 */
declare const SHADERPASS_SPECULARITY: "debug_specularity";
/**
 * Shader used for debug rendering of gloss.
 *
 * @category Graphics
 */
declare const SHADERPASS_GLOSS: "debug_gloss";
/**
 * Shader used for debug rendering of metalness.
 *
 * @category Graphics
 */
declare const SHADERPASS_METALNESS: "debug_metalness";
/**
 * Shader used for debug rendering of ao.
 *
 * @category Graphics
 */
declare const SHADERPASS_AO: "debug_ao";
/**
 * Shader used for debug rendering of emission.
 *
 * @category Graphics
 */
declare const SHADERPASS_EMISSION: "debug_emission";
/**
 * Shader used for debug rendering of lighting.
 *
 * @category Graphics
 */
declare const SHADERPASS_LIGHTING: "debug_lighting";
/**
 * Shader used for debug rendering of UV0 texture coordinates.
 *
 * @category Graphics
 */
declare const SHADERPASS_UV0: "debug_uv0";
/**
 * This mode renders a sprite as a simple quad.
 *
 * @category Graphics
 */
declare const SPRITE_RENDERMODE_SIMPLE: 0;
/**
 * This mode renders a sprite using 9-slicing in 'sliced' mode. Sliced mode stretches the top and
 * bottom regions of the sprite horizontally, the left and right regions vertically and the middle
 * region both horizontally and vertically.
 *
 * @category Graphics
 */
declare const SPRITE_RENDERMODE_SLICED: 1;
/**
 * This mode renders a sprite using 9-slicing in 'tiled' mode. Tiled mode tiles the top and bottom
 * regions of the sprite horizontally, the left and right regions vertically and the middle region
 * both horizontally and vertically.
 *
 * @category Graphics
 */
declare const SPRITE_RENDERMODE_TILED: 2;
declare const spriteRenderModeNames: {
    0: string;
    1: string;
    2: string;
};
/**
 * Single color lightmap.
 *
 * @category Graphics
 */
declare const BAKE_COLOR: 0;
/**
 * Single color lightmap + dominant light direction (used for bump/specular).
 *
 * @category Graphics
 */
declare const BAKE_COLORDIR: 1;
/**
 * Center of view.
 *
 * @category Graphics
 */
declare const VIEW_CENTER: 0;
/**
 * Left of view. Only used in stereo rendering.
 *
 * @category Graphics
 */
declare const VIEW_LEFT: 1;
/**
 * Right of view. Only used in stereo rendering.
 *
 * @category Graphics
 */
declare const VIEW_RIGHT: 2;
/**
 * No sorting is applied. Mesh instances are rendered in the same order they were added to a layer.
 *
 * @category Graphics
 */
declare const SORTMODE_NONE: 0;
/**
 * Mesh instances are sorted based on {@link MeshInstance#drawOrder}.
 *
 * @category Graphics
 */
declare const SORTMODE_MANUAL: 1;
/**
 * Mesh instances are sorted to minimize switching between materials and meshes to improve
 * rendering performance.
 *
 * @category Graphics
 */
declare const SORTMODE_MATERIALMESH: 2;
/**
 * Mesh instances are sorted back to front. This is the way to properly render many
 * semi-transparent objects on different depth, one is blended on top of another.
 *
 * @category Graphics
 */
declare const SORTMODE_BACK2FRONT: 3;
/**
 * Mesh instances are sorted front to back. Depending on GPU and the scene, this option may give
 * better performance than {@link SORTMODE_MATERIALMESH} due to reduced overdraw.
 *
 * @category Graphics
 */
declare const SORTMODE_FRONT2BACK: 4;
/**
 * Provide custom functions for sorting drawcalls and calculating distance.
 *
 * @ignore
 * @category Graphics
 */
declare const SORTMODE_CUSTOM: 5;
/**
 * Automatically set aspect ratio to current render target's width divided by height.
 *
 * @category Graphics
 */
declare const ASPECT_AUTO: 0;
/**
 * Use the manual aspect ratio value.
 *
 * @category Graphics
 */
declare const ASPECT_MANUAL: 1;
/**
 * Horizontal orientation.
 *
 * @category Graphics
 */
declare const ORIENTATION_HORIZONTAL: 0;
/**
 * Vertical orientation.
 *
 * @category Graphics
 */
declare const ORIENTATION_VERTICAL: 1;
/**
 * A sky texture is rendered using an infinite projection.
 *
 * @category Graphics
 */
declare const SKYTYPE_INFINITE: "infinite";
/**
 * A sky texture is rendered using a box projection. This is generally suitable for interior
 * environments.
 *
 * @category Graphics
 */
declare const SKYTYPE_BOX: "box";
/**
 *  A sky texture is rendered using a dome projection. This is generally suitable for exterior
 * environments.
 *
 * @category Graphics
 */
declare const SKYTYPE_DOME: "dome";
/**
 * Opacity dithering is disabled.
 *
 * @category Graphics
 */
declare const DITHER_NONE: "none";
/**
 * Opacity is dithered using a Bayer 8 matrix.
 *
 * @category Graphics
 */
declare const DITHER_BAYER8: "bayer8";
/**
 * Opacity is dithered using a blue noise.
 *
 * @category Graphics
 */
declare const DITHER_BLUENOISE: "bluenoise";
/**
 * Opacity is dithered using an interleaved gradient noise.
 *
 * @category Graphics
 */
declare const DITHER_IGNNOISE: "ignnoise";
declare namespace ditherNames {
    let none_1: string;
    export { none_1 as none };
    export let bayer8: string;
    export let bluenoise: string;
    export let ignnoise: string;
}
/**
 * Name of event fired before the camera renders the scene.
 *
 * @ignore
 */
declare const EVENT_PRERENDER: "prerender";
/**
 * Name of event fired after the camera renders the scene.
 *
 * @ignore
 */
declare const EVENT_POSTRENDER: "postrender";
/**
 * Name of event fired before a layer is rendered by a camera.
 *
 * @ignore
 */
declare const EVENT_PRERENDER_LAYER: "prerender:layer";
/**
 * Name of event fired after a layer is rendered by a camera.
 *
 * @ignore
 */
declare const EVENT_POSTRENDER_LAYER: "postrender:layer";
/**
 * Name of event fired before visibility culling is performed for the camera.
 *
 * @ignore
 */
declare const EVENT_PRECULL: "precull";
/**
 * Name of event after visibility culling is performed for the camera.
 *
 * @ignore
 */
declare const EVENT_POSTCULL: "postcull";
/**
 * Name of event after the engine has finished culling all cameras.
 *
 * @ignore
 */
declare const EVENT_CULL_END: "cull:end";
/**
 * @ignore
 */
declare const GSPLAT_FORWARD: 1;
/**
 * @ignore
 */
declare const GSPLAT_SHADOW: 2;
/**
 * @ignore
 */
declare const SHADOWCAMERA_NAME: "pcShadowCamera";
/**
 * Work buffer is updated only when needed (transform, format, LOD changes, new gsplat etc).
 *
 * @type {number}
 * @category Graphics
 */
declare const WORKBUFFER_UPDATE_AUTO: number;
/**
 * Work buffer is updated once on the next frame, then automatically switches to
 * {@link WORKBUFFER_UPDATE_AUTO}.
 *
 * @type {number}
 * @category Graphics
 */
declare const WORKBUFFER_UPDATE_ONCE: number;
/**
 * Work buffer is updated every frame. Useful for custom shader code via
 * {@link GSplatComponent#setWorkBufferModifier} that depends on time or animated uniforms.
 *
 * @type {number}
 * @category Graphics
 */
declare const WORKBUFFER_UPDATE_ALWAYS: number;
/**
 * Stream texture is stored at resource level, shared across all component instances.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_STREAM_RESOURCE: number;
/**
 * Stream texture is stored per gsplat component instance.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_STREAM_INSTANCE: number;
/**
 * Large work buffer data format with full precision. Uses RGBA16F color, float16
 * rotation and float16 scale. 32 bytes per splat.
 *
 * @type {string}
 * @category Graphics
 */
declare const GSPLATDATA_LARGE: string;
/**
 * Compact work buffer data format optimized for reduced memory and bandwidth. Uses 11+11+10 bit
 * RGB color, half-angle quaternion rotation and log-encoded scale. 20 bytes per splat.
 *
 * @type {string}
 * @category Graphics
 */
declare const GSPLATDATA_COMPACT: string;
/**
 * Automatically selects the best rendering pipeline for the current platform.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_RENDERER_AUTO: number;
/**
 * Rasterization-based rendering with CPU-side sorting.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_RENDERER_RASTER_CPU_SORT: number;
/**
 * Rasterization-based rendering with compute shader sorting. WebGPU only. Experimental with
 * limited functionality.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_RENDERER_RASTER_GPU_SORT: number;
/**
 * Full compute pipeline for rendering. WebGPU only. Experimental with limited functionality.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_RENDERER_COMPUTE: number;
/**
 * No debug rendering for Gaussian splats. Normal rendering mode.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_DEBUG_NONE: number;
/**
 * Debug rendering that colorizes Gaussian splats by their selected LOD level.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_DEBUG_LOD: number;
/**
 * Debug rendering that assigns a random color per spherical harmonics update pass,
 * visualizing when SH color updates occur.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_DEBUG_SH_UPDATE: number;
/**
 * Debug heatmap rendering for the compute rasterizer. Visualizes the average number of splats
 * processed per pixel in each tile as a blue-to-red color ramp. Only supported with
 * {@link GSPLAT_RENDERER_COMPUTE}.
 *
 * @type {number}
 * @category Graphics
 */
declare const GSPLAT_DEBUG_HEATMAP: number;

/**
 * When resizing the window the size of the canvas will not change.
 */
declare const FILLMODE_NONE: "NONE";
/**
 * When resizing the window the size of the canvas will change to fill the window exactly.
 */
declare const FILLMODE_FILL_WINDOW: "FILL_WINDOW";
/**
 * When resizing the window the size of the canvas will change to fill the window as best it can,
 * while maintaining the same aspect ratio.
 */
declare const FILLMODE_KEEP_ASPECT: "KEEP_ASPECT";
/**
 * When the canvas is resized the resolution of the canvas will change to match the size of the
 * canvas.
 */
declare const RESOLUTION_AUTO: "AUTO";
/**
 * When the canvas is resized the resolution of the canvas will remain at the same value and the
 * output will just be scaled to fit the canvas.
 */
declare const RESOLUTION_FIXED: "FIXED";

/**
 * Specifies different color tints for the hover, pressed and inactive states.
 *
 * @category User Interface
 */
declare const BUTTON_TRANSITION_MODE_TINT: 0;
/**
 * Specifies different sprites for the hover, pressed and inactive states.
 *
 * @category User Interface
 */
declare const BUTTON_TRANSITION_MODE_SPRITE_CHANGE: 1;

/**
 * A {@link ElementComponent} that contains child {@link ElementComponent}s.
 *
 * @category User Interface
 */
declare const ELEMENTTYPE_GROUP: "group";
/**
 * A {@link ElementComponent} that displays an image.
 *
 * @category User Interface
 */
declare const ELEMENTTYPE_IMAGE: "image";
/**
 * A {@link ElementComponent} that displays text.
 *
 * @category User Interface
 */
declare const ELEMENTTYPE_TEXT: "text";
/**
 * Fit the content exactly to Element's bounding box.
 *
 * @category User Interface
 */
declare const FITMODE_STRETCH: "stretch";
/**
 * Fit the content within the Element's bounding box while preserving its Aspect Ratio.
 *
 * @category User Interface
 */
declare const FITMODE_CONTAIN: "contain";
/**
 * Fit the content to cover the entire Element's bounding box while preserving its Aspect Ratio.
 *
 * @category User Interface
 */
declare const FITMODE_COVER: "cover";

/**
 * Specified degree of freedom has free movement.
 *
 * @ignore
 */
declare const MOTION_FREE: "free";
/**
 * Specified degree of freedom has limited movement.
 *
 * @ignore
 */
declare const MOTION_LIMITED: "limited";
/**
 * Specified degree of freedom is locked and allows no movement.
 *
 * @ignore
 */
declare const MOTION_LOCKED: "locked";

/**
 * Disable all fitting logic.
 *
 * @category User Interface
 */
declare const FITTING_NONE: 0;
/**
 * Stretch child elements to fit the parent container.
 *
 * @category User Interface
 */
declare const FITTING_STRETCH: 1;
/**
 * Shrink child elements to fit the parent container.
 *
 * @category User Interface
 */
declare const FITTING_SHRINK: 2;
/**
 * Apply both STRETCH and SHRINK fitting logic where applicable.
 *
 * @category User Interface
 */
declare const FITTING_BOTH: 3;

/**
 * Rigid body has infinite mass and cannot move.
 *
 * @category Physics
 */
declare const BODYTYPE_STATIC: "static";
/**
 * Rigid body is simulated according to applied forces.
 *
 * @category Physics
 */
declare const BODYTYPE_DYNAMIC: "dynamic";
/**
 * Rigid body has infinite mass and does not respond to forces but can still be moved by setting
 * their velocity or position.
 *
 * @category Physics
 */
declare const BODYTYPE_KINEMATIC: "kinematic";
declare const BODYFLAG_STATIC_OBJECT: 1;
declare const BODYFLAG_KINEMATIC_OBJECT: 2;
declare const BODYFLAG_NORESPONSE_OBJECT: 4;
declare const BODYSTATE_ACTIVE_TAG: 1;
declare const BODYSTATE_ISLAND_SLEEPING: 2;
declare const BODYSTATE_WANTS_DEACTIVATION: 3;
declare const BODYSTATE_DISABLE_DEACTIVATION: 4;
declare const BODYSTATE_DISABLE_SIMULATION: 5;
declare const BODYGROUP_NONE: 0;
declare const BODYGROUP_DEFAULT: 1;
declare const BODYGROUP_DYNAMIC: 1;
declare const BODYGROUP_STATIC: 2;
declare const BODYGROUP_KINEMATIC: 4;
declare const BODYGROUP_ENGINE_1: 8;
declare const BODYGROUP_TRIGGER: 16;
declare const BODYGROUP_ENGINE_2: 32;
declare const BODYGROUP_ENGINE_3: 64;
declare const BODYGROUP_USER_1: 128;
declare const BODYGROUP_USER_2: 256;
declare const BODYGROUP_USER_3: 512;
declare const BODYGROUP_USER_4: 1024;
declare const BODYGROUP_USER_5: 2048;
declare const BODYGROUP_USER_6: 4096;
declare const BODYGROUP_USER_7: 8192;
declare const BODYGROUP_USER_8: 16384;
declare const BODYMASK_NONE: 0;
declare const BODYMASK_ALL: 65535;
declare const BODYMASK_STATIC: 2;
declare const BODYMASK_NOT_STATIC: number;
declare const BODYMASK_NOT_STATIC_KINEMATIC: number;

/**
 * Always use the application's resolution as the resolution for the {@link ScreenComponent}.
 *
 * @category User Interface
 */
declare const SCALEMODE_NONE: "none";
/**
 * Scale the {@link ScreenComponent} when the application's resolution is different than the
 * ScreenComponent's referenceResolution.
 *
 * @category User Interface
 */
declare const SCALEMODE_BLEND: "blend";

/**
 * Content does not scroll any further than its bounds.
 *
 * @category User Interface
 */
declare const SCROLL_MODE_CLAMP: 0;
/**
 * Content scrolls past its bounds and then gently bounces back.
 *
 * @category User Interface
 */
declare const SCROLL_MODE_BOUNCE: 1;
/**
 * Content can scroll forever.
 *
 * @category User Interface
 */
declare const SCROLL_MODE_INFINITE: 2;
/**
 * The scrollbar will be visible all the time.
 *
 * @category User Interface
 */
declare const SCROLLBAR_VISIBILITY_SHOW_ALWAYS: 0;
/**
 * The scrollbar will be visible only when content exceeds the size of the viewport.
 *
 * @category User Interface
 */
declare const SCROLLBAR_VISIBILITY_SHOW_WHEN_REQUIRED: 1;

/**
 * A {@link SpriteComponent} that displays a single frame from a sprite asset.
 *
 * @category Graphics
 */
declare const SPRITETYPE_SIMPLE: "simple";
/**
 * A {@link SpriteComponent} that renders sprite animations.
 *
 * @category Graphics
 */
declare const SPRITETYPE_ANIMATED: "animated";

/**
 * A stepped interpolation scheme.
 *
 * @category Animation
 */
declare const INTERPOLATION_STEP: 0;
/**
 * A linear interpolation scheme.
 *
 * @category Animation
 */
declare const INTERPOLATION_LINEAR: 1;
/**
 * A cubic spline interpolation scheme.
 *
 * @category Animation
 */
declare const INTERPOLATION_CUBIC: 2;

/**
 * Used to set the anim state graph transition interruption source to no state.
 *
 * @category Animation
 */
declare const ANIM_INTERRUPTION_NONE: "NONE";
/**
 * Used to set the anim state graph transition interruption source as the previous state only.
 *
 * @category Animation
 */
declare const ANIM_INTERRUPTION_PREV: "PREV_STATE";
/**
 * Used to set the anim state graph transition interruption source as the next state only.
 *
 * @category Animation
 */
declare const ANIM_INTERRUPTION_NEXT: "NEXT_STATE";
/**
 * Used to set the anim state graph transition interruption sources as the previous state followed
 * by the next state.
 *
 * @category Animation
 */
declare const ANIM_INTERRUPTION_PREV_NEXT: "PREV_STATE_NEXT_STATE";
/**
 * Used to set the anim state graph transition interruption sources as the next state followed by
 * the previous state.
 *
 * @category Animation
 */
declare const ANIM_INTERRUPTION_NEXT_PREV: "NEXT_STATE_PREV_STATE";
/**
 * Used to set an anim state graph transition condition predicate as '>'.
 *
 * @category Animation
 */
declare const ANIM_GREATER_THAN: "GREATER_THAN";
/**
 * Used to set an anim state graph transition condition predicate as '<'.
 *
 * @category Animation
 */
declare const ANIM_LESS_THAN: "LESS_THAN";
/**
 * Used to set an anim state graph transition condition predicate as '>='.
 *
 * @category Animation
 */
declare const ANIM_GREATER_THAN_EQUAL_TO: "GREATER_THAN_EQUAL_TO";
/**
 * Used to set an anim state graph transition condition predicate as '<='.
 *
 * @category Animation
 */
declare const ANIM_LESS_THAN_EQUAL_TO: "LESS_THAN_EQUAL_TO";
/**
 * Used to set an anim state graph transition condition predicate as '==='.
 *
 * @category Animation
 */
declare const ANIM_EQUAL_TO: "EQUAL_TO";
/**
 * Used to set an anim state graph transition condition predicate as '!=='.
 *
 * @category Animation
 */
declare const ANIM_NOT_EQUAL_TO: "NOT_EQUAL_TO";
/**
 * Used to set an anim state graph parameter as type integer.
 *
 * @category Animation
 */
declare const ANIM_PARAMETER_INTEGER: "INTEGER";
/**
 * Used to set an anim state graph parameter as type float.
 *
 * @category Animation
 */
declare const ANIM_PARAMETER_FLOAT: "FLOAT";
/**
 * Used to set an anim state graph parameter as type boolean.
 *
 * @category Animation
 */
declare const ANIM_PARAMETER_BOOLEAN: "BOOLEAN";
/**
 * Used to set an anim state graph parameter as type trigger.
 *
 * @category Animation
 */
declare const ANIM_PARAMETER_TRIGGER: "TRIGGER";
/**
 * @type {string}
 * @category Animation
 */
declare const ANIM_BLEND_1D: string;
/**
 * @type {string}
 * @category Animation
 */
declare const ANIM_BLEND_2D_DIRECTIONAL: string;
/**
 * @type {string}
 * @category Animation
 */
declare const ANIM_BLEND_2D_CARTESIAN: string;
/**
 * @type {string}
 * @category Animation
 */
declare const ANIM_BLEND_DIRECT: string;
/**
 * The starting state in an anim state graph layer.
 *
 * @category Animation
 */
declare const ANIM_STATE_START: "START";
/**
 * The ending state in an anim state graph layer.
 *
 * @category Animation
 */
declare const ANIM_STATE_END: "END";
/**
 * Used to indicate any state in an anim state graph layer.
 *
 * @category Animation
 */
declare const ANIM_STATE_ANY: "ANY";
declare const ANIM_CONTROL_STATES: string[];
/**
 * Used to indicate that a layers animations should overwrite all previous layers.
 *
 * @category Animation
 */
declare const ANIM_LAYER_OVERWRITE: "OVERWRITE";
/**
 * Used to indicate that a layers animations should blend additively with previous layers.
 *
 * @category Animation
 */
declare const ANIM_LAYER_ADDITIVE: "ADDITIVE";

declare const ABSOLUTE_URL: RegExp;
/**
 * Asset type name for animation.
 *
 * @category Asset
 */
declare const ASSET_ANIMATION: "animation";
/**
 * Asset type name for audio.
 *
 * @category Asset
 */
declare const ASSET_AUDIO: "audio";
/**
 * Asset type name for image.
 *
 * @category Asset
 */
declare const ASSET_IMAGE: "image";
/**
 * Asset type name for json.
 *
 * @category Asset
 */
declare const ASSET_JSON: "json";
/**
 * Asset type name for model.
 *
 * @category Asset
 */
declare const ASSET_MODEL: "model";
/**
 * Asset type name for material.
 *
 * @category Asset
 */
declare const ASSET_MATERIAL: "material";
/**
 * Asset type name for text.
 *
 * @category Asset
 */
declare const ASSET_TEXT: "text";
/**
 * Asset type name for texture.
 *
 * @category Asset
 */
declare const ASSET_TEXTURE: "texture";
/**
 * Asset type name for textureatlas.
 *
 * @category Asset
 */
declare const ASSET_TEXTUREATLAS: "textureatlas";
/**
 * Asset type name for cubemap.
 *
 * @category Asset
 */
declare const ASSET_CUBEMAP: "cubemap";
/**
 * Asset type name for shader.
 *
 * @category Asset
 */
declare const ASSET_SHADER: "shader";
/**
 * Asset type name for CSS.
 *
 * @category Asset
 */
declare const ASSET_CSS: "css";
/**
 * Asset type name for HTML.
 *
 * @category Asset
 */
declare const ASSET_HTML: "html";
/**
 * Asset type name for script.
 *
 * @category Asset
 */
declare const ASSET_SCRIPT: "script";
/**
 * Asset type name for a container.
 *
 * @category Asset
 */
declare const ASSET_CONTAINER: "container";

declare const FONT_MSDF: "msdf";
declare const FONT_BITMAP: "bitmap";

/**
 * Inline - always available type of session. It has limited features availability and is rendered
 * into HTML element.
 *
 * @category XR
 */
declare const XRTYPE_INLINE: "inline";
/**
 * Immersive VR - session that provides exclusive access to VR device with best available tracking
 * features.
 *
 * @category XR
 */
declare const XRTYPE_VR: "immersive-vr";
/**
 * Immersive AR - session that provides exclusive access to VR/AR device that is intended to be
 * blended with real-world environment.
 *
 * @category XR
 */
declare const XRTYPE_AR: "immersive-ar";
/**
 * Viewer - always supported space with some basic tracking capabilities.
 *
 * @category XR
 */
declare const XRSPACE_VIEWER: "viewer";
/**
 * Local - represents a tracking space with a native origin near the viewer at the time of
 * creation. The exact position and orientation will be initialized based on the conventions of the
 * underlying platform. When using this reference space the user is not expected to move beyond
 * their initial position much, if at all, and tracking is optimized for that purpose. For devices
 * with 6DoF tracking, local reference spaces should emphasize keeping the origin stable relative
 * to the user's environment.
 *
 * @category XR
 */
declare const XRSPACE_LOCAL: "local";
/**
 * Local Floor - represents a tracking space with a native origin at the floor in a safe position
 * for the user to stand. The y axis equals 0 at floor level, with the x and z position and
 * orientation initialized based on the conventions of the underlying platform. Floor level value
 * might be estimated by the underlying platform. When using this reference space, the user is not
 * expected to move beyond their initial position much, if at all, and tracking is optimized for
 * that purpose. For devices with 6DoF tracking, local-floor reference spaces should emphasize
 * keeping the origin stable relative to the user's environment.
 *
 * @category XR
 */
declare const XRSPACE_LOCALFLOOR: "local-floor";
/**
 * Bounded Floor - represents a tracking space with its native origin at the floor, where the user
 * is expected to move within a pre-established boundary. Tracking in a bounded-floor reference
 * space is optimized for keeping the native origin and bounds geometry stable relative to the
 * user's environment.
 *
 * @category XR
 */
declare const XRSPACE_BOUNDEDFLOOR: "bounded-floor";
/**
 * Unbounded - represents a tracking space where the user is expected to move freely around their
 * environment, potentially even long distances from their starting point. Tracking in an unbounded
 * reference space is optimized for stability around the user's current position, and as such the
 * native origin may drift over time.
 *
 * @category XR
 */
declare const XRSPACE_UNBOUNDED: "unbounded";
/**
 * Gaze - indicates the target ray will originate at the viewer and follow the direction it is
 * facing. This is commonly referred to as a "gaze input" device in the context of head-mounted
 * displays.
 *
 * @category XR
 */
declare const XRTARGETRAY_GAZE: "gaze";
/**
 * Screen - indicates that the input source was an interaction with the canvas element associated
 * with an inline session's output context, such as a mouse click or touch event.
 *
 * @category XR
 */
declare const XRTARGETRAY_SCREEN: "screen";
/**
 * Tracked Pointer - indicates that the target ray originates from either a handheld device or
 * other hand-tracking mechanism and represents that the user is using their hands or the held
 * device for pointing.
 *
 * @category XR
 */
declare const XRTARGETRAY_POINTER: "tracked-pointer";
/**
 * None - view associated with a monoscopic screen, such as mobile phone screens.
 *
 * @category XR
 */
declare const XREYE_NONE: "none";
/**
 * Left - view associated with left eye.
 *
 * @category XR
 */
declare const XREYE_LEFT: "left";
/**
 * Right - view associated with right eye.
 *
 * @category XR
 */
declare const XREYE_RIGHT: "right";
/**
 * None - input source is not meant to be held in hands.
 *
 * @category XR
 */
declare const XRHAND_NONE: "none";
/**
 * Left - indicates that input source is meant to be held in left hand.
 *
 * @category XR
 */
declare const XRHAND_LEFT: "left";
/**
 * Right - indicates that input source is meant to be held in right hand.
 *
 * @category XR
 */
declare const XRHAND_RIGHT: "right";
/**
 * Point - indicates that the hit test results will be computed based on the feature points
 * detected by the underlying Augmented Reality system.
 *
 * @category XR
 */
declare const XRTRACKABLE_POINT: "point";
/**
 * Plane - indicates that the hit test results will be computed based on the planes detected by the
 * underlying Augmented Reality system.
 *
 * @category XR
 */
declare const XRTRACKABLE_PLANE: "plane";
/**
 * Mesh - indicates that the hit test results will be computed based on the meshes detected by the
 * underlying Augmented Reality system.
 *
 * @category XR
 */
declare const XRTRACKABLE_MESH: "mesh";
/**
 * CPU - indicates that depth sensing preferred usage is CPU. This usage path is guaranteed to be
 * supported.
 *
 * @category XR
 */
declare const XRDEPTHSENSINGUSAGE_CPU: "cpu-optimized";
/**
 * GPU - indicates that depth sensing preferred usage is GPU.
 *
 * @category XR
 */
declare const XRDEPTHSENSINGUSAGE_GPU: "gpu-optimized";
/**
 * Luminance Alpha - indicates that depth sensing preferred raw data format is Luminance Alpha (8bit + 8bit).
 * This format is guaranteed to be supported.
 *
 * @category XR
 */
declare const XRDEPTHSENSINGFORMAT_L8A8: "luminance-alpha";
/**
 * Unsigned Short - indicates that depth sensing preferred raw data format is Unsigned Short (16 bit).
 *
 * @category XR
 */
declare const XRDEPTHSENSINGFORMAT_R16U: "unsigned-short";
/**
 * Float 32 - indicates that depth sensing preferred raw data format is Float (32 bit).
 *
 * @category XR
 */
declare const XRDEPTHSENSINGFORMAT_F32: "float32";

/**
 * Base class that implements reference counting for objects.
 */
declare class RefCountedObject {
    /**
     * @type {number}
     * @private
     */
    private _refCount;
    /**
     * Increments the reference counter.
     */
    incRefCount(): void;
    /**
     * Decrements the reference counter.
     */
    decRefCount(): void;
    /**
     * Gets the current reference count.
     *
     * @type {number}
     */
    get refCount(): number;
}

declare class Version {
    globalId: number;
    revision: number;
    equals(other: any): boolean;
    copy(other: any): void;
    reset(): void;
}

declare class VersionedObject {
    version: Version;
    increment(): void;
}

/**
 * The scope for a variable.
 *
 * @category Graphics
 */
declare class ScopeId {
    /**
     * Create a new ScopeId instance.
     *
     * @param {string} name - The variable name.
     */
    constructor(name: string);
    /**
     * The variable name.
     *
     * @type {string}
     */
    name: string;
    value: any;
    versionObject: VersionedObject;
    toJSON(key: any): any;
    /**
     * Set variable value.
     *
     * @param {*} value - The value.
     */
    setValue(value: any): void;
    /**
     * Get variable value.
     *
     * @returns {*} The value.
     */
    getValue(): any;
}

/**
 * @import { EventHandler } from './event-handler.js'
 * @import { HandleEventCallback } from './event-handler.js'
 */
/**
 * Event Handle that is created by {@link EventHandler} and can be used for easier event removal
 * and management.
 *
 * @example
 * const evt = obj.on('test', (a, b) => {
 *     console.log(a + b);
 * });
 * obj.fire('test');
 *
 * evt.off(); // easy way to remove this event
 * obj.fire('test'); // this will not trigger an event
 * @example
 * // store an array of event handles
 * let events = [];
 *
 * events.push(objA.on('testA', () => {}));
 * events.push(objB.on('testB', () => {}));
 *
 * // when needed, remove all events
 * events.forEach((evt) => {
 *     evt.off();
 * });
 * events = [];
 */
declare class EventHandle {
    /**
     * @param {EventHandler} handler - source object of the event.
     * @param {string} name - Name of the event.
     * @param {HandleEventCallback} callback - Function that is called when event is fired.
     * @param {object} scope - Object that is used as `this` when event is fired.
     * @param {boolean} [once] - If this is a single event and will be removed after event is fired.
     */
    constructor(handler: EventHandler, name: string, callback: HandleEventCallback, scope: object, once?: boolean);
    /**
     * @type {EventHandler}
     * @private
     */
    private handler;
    /**
     * @type {string}
     * @ignore
     */
    name: string;
    /**
     * @type {HandleEventCallback}
     * @ignore
     */
    callback: HandleEventCallback;
    /**
     * @type {object}
     * @ignore
     */
    scope: object;
    /**
     * @type {boolean}
     * @ignore
     */
    _once: boolean;
    /**
     * True if event has been removed.
     * @type {boolean}
     * @private
     */
    private _removed;
    /**
     * Remove this event from its handler.
     */
    off(): void;
    on(name: any, callback: any, scope?: this): EventHandle;
    once(name: any, callback: any, scope?: this): EventHandle;
    /**
     * Mark if event has been removed.
     *
     * @type {boolean}
     * @ignore
     */
    set removed(value: boolean);
    /**
     * True if event has been removed.
     *
     * @type {boolean}
     * @ignore
     */
    get removed(): boolean;
    toJSON(key: any): any;
}

/**
 * Callback used by {@link EventHandler} functions. Note the callback is limited to 8 arguments.
 */
type HandleEventCallback = (arg1?: any, arg2?: any, arg3?: any, arg4?: any, arg5?: any, arg6?: any, arg7?: any, arg8?: any) => void;
/**
 * @callback HandleEventCallback
 * Callback used by {@link EventHandler} functions. Note the callback is limited to 8 arguments.
 * @param {any} [arg1] - First argument that is passed from caller.
 * @param {any} [arg2] - Second argument that is passed from caller.
 * @param {any} [arg3] - Third argument that is passed from caller.
 * @param {any} [arg4] - Fourth argument that is passed from caller.
 * @param {any} [arg5] - Fifth argument that is passed from caller.
 * @param {any} [arg6] - Sixth argument that is passed from caller.
 * @param {any} [arg7] - Seventh argument that is passed from caller.
 * @param {any} [arg8] - Eighth argument that is passed from caller.
 * @returns {void}
 */
/**
 * Abstract base class that implements functionality for event handling.
 *
 * ```javascript
 * const obj = new EventHandlerSubclass();
 *
 * // subscribe to an event
 * obj.on('hello', (str) => {
 *     console.log('event hello is fired', str);
 * });
 *
 * // fire event
 * obj.fire('hello', 'world');
 * ```
 */
declare class EventHandler {
    /**
     * @type {Map<string,Array<EventHandle>>}
     * @private
     */
    private _callbacks;
    /**
     * @type {Map<string,Array<EventHandle>>}
     * @private
     */
    private _callbackActive;
    /**
     * Reinitialize the event handler.
     * @ignore
     */
    initEventHandler(): void;
    /**
     * Registers a new event handler.
     *
     * @param {string} name - Name of the event to bind the callback to.
     * @param {HandleEventCallback} callback - Function that is called when event is fired. Note
     * the callback is limited to 8 arguments.
     * @param {object} scope - Object to use as 'this' when the event is fired, defaults to
     * current this.
     * @param {boolean} once - If true, the callback will be unbound after being fired once.
     * @returns {EventHandle} Created {@link EventHandle}.
     * @ignore
     */
    _addCallback(name: string, callback: HandleEventCallback, scope: object, once: boolean): EventHandle;
    /**
     * Attach an event handler to an event.
     *
     * @param {string} name - Name of the event to bind the callback to.
     * @param {HandleEventCallback} callback - Function that is called when event is fired. Note
     * the callback is limited to 8 arguments.
     * @param {object} [scope] - Object to use as 'this' when the event is fired, defaults to
     * current this.
     * @returns {EventHandle} Can be used for removing event in the future.
     * @example
     * obj.on('test', (a, b) => {
     *     console.log(a + b);
     * });
     * obj.fire('test', 1, 2); // prints 3 to the console
     * @example
     * const evt = obj.on('test', (a, b) => {
     *     console.log(a + b);
     * });
     * // some time later
     * evt.off();
     */
    on(name: string, callback: HandleEventCallback, scope?: object): EventHandle;
    /**
     * Attach an event handler to an event. This handler will be removed after being fired once.
     *
     * @param {string} name - Name of the event to bind the callback to.
     * @param {HandleEventCallback} callback - Function that is called when event is fired. Note
     * the callback is limited to 8 arguments.
     * @param {object} [scope] - Object to use as 'this' when the event is fired, defaults to
     * current this.
     * @returns {EventHandle} Can be used for removing event in the future.
     * @example
     * obj.once('test', (a, b) => {
     *     console.log(a + b);
     * });
     * obj.fire('test', 1, 2); // prints 3 to the console
     * obj.fire('test', 1, 2); // not going to get handled
     */
    once(name: string, callback: HandleEventCallback, scope?: object): EventHandle;
    /**
     * Detach an event handler from an event. If callback is not provided then all callbacks are
     * unbound from the event, if scope is not provided then all events with the callback will be
     * unbound.
     *
     * @param {string} [name] - Name of the event to unbind.
     * @param {HandleEventCallback} [callback] - Function to be unbound.
     * @param {object} [scope] - Scope that was used as the this when the event is fired.
     * @returns {EventHandler} Self for chaining.
     * @example
     * const handler = () => {};
     * obj.on('test', handler);
     *
     * obj.off(); // Removes all events
     * obj.off('test'); // Removes all events called 'test'
     * obj.off('test', handler); // Removes all handler functions, called 'test'
     * obj.off('test', handler, this); // Removes all handler functions, called 'test' with scope this
     */
    off(name?: string, callback?: HandleEventCallback, scope?: object): EventHandler;
    /**
     * Detach an event handler from an event using EventHandle instance. More optimal remove
     * as it does not have to scan callbacks array.
     *
     * @param {EventHandle} handle - Handle of event.
     * @ignore
     */
    offByHandle(handle: EventHandle): this;
    /**
     * Fire an event, all additional arguments are passed on to the event listener.
     *
     * @param {string} name - Name of event to fire.
     * @param {any} [arg1] - First argument that is passed to the event handler.
     * @param {any} [arg2] - Second argument that is passed to the event handler.
     * @param {any} [arg3] - Third argument that is passed to the event handler.
     * @param {any} [arg4] - Fourth argument that is passed to the event handler.
     * @param {any} [arg5] - Fifth argument that is passed to the event handler.
     * @param {any} [arg6] - Sixth argument that is passed to the event handler.
     * @param {any} [arg7] - Seventh argument that is passed to the event handler.
     * @param {any} [arg8] - Eighth argument that is passed to the event handler.
     * @returns {EventHandler} Self for chaining.
     * @example
     * obj.fire('test', 'This is the message');
     */
    fire(name: string, arg1?: any, arg2?: any, arg3?: any, arg4?: any, arg5?: any, arg6?: any, arg7?: any, arg8?: any): EventHandler;
    /**
     * Test if there are any handlers bound to an event name.
     *
     * @param {string} name - The name of the event to test.
     * @returns {boolean} True if the object has handlers bound to the specified event name.
     * @example
     * obj.on('test', () => {}); // bind an event to 'test'
     * obj.hasEvent('test'); // returns true
     * obj.hasEvent('hello'); // returns false
     */
    hasEvent(name: string): boolean;
}

/**
 * A TextureView specifies a texture and a subset of its mip levels and array layers. It is used
 * when binding textures to compute shaders to specify which portion of the texture should be
 * accessed. Create a TextureView using {@link Texture#getView}.
 *
 * Note: TextureView is only supported on WebGPU. On WebGL, the full texture is always bound and
 * this class has no effect.
 *
 * @category Graphics
 */
declare class TextureView {
    /**
     * Create a new TextureView instance. Use {@link Texture#getView} instead of calling this
     * constructor directly.
     *
     * @param {Texture} texture - The texture this view references.
     * @param {number} [baseMipLevel] - The first mip level accessible to the view. Defaults to 0.
     * @param {number} [mipLevelCount] - The number of mip levels accessible to the view. Defaults
     * to 1.
     * @param {number} [baseArrayLayer] - The first array layer accessible to the view. Defaults to
     * 0.
     * @param {number} [arrayLayerCount] - The number of array layers accessible to the view.
     * Defaults to 1.
     * @ignore
     */
    constructor(texture: Texture, baseMipLevel?: number, mipLevelCount?: number, baseArrayLayer?: number, arrayLayerCount?: number);
    /**
     * The texture this view references.
     *
     * @type {Texture}
     * @readonly
     */
    readonly texture: Texture;
    /**
     * The first mip level accessible to the view.
     *
     * @type {number}
     * @readonly
     */
    readonly baseMipLevel: number;
    /**
     * The number of mip levels accessible to the view.
     *
     * @type {number}
     * @readonly
     */
    readonly mipLevelCount: number;
    /**
     * The first array layer accessible to the view.
     *
     * @type {number}
     * @readonly
     */
    readonly baseArrayLayer: number;
    /**
     * The number of array layers accessible to the view.
     *
     * @type {number}
     * @readonly
     */
    readonly arrayLayerCount: number;
    /**
     * A unique numeric key for this view configuration, used for caching.
     *
     * @type {number}
     * @ignore
     */
    key: number;
}

/**
 * Represents a texture, which is typically an image composed of pixels (texels). Textures are
 * fundamental resources for rendering graphical objects. They are commonly used by
 * {@link Material}s and sampled in {@link Shader}s (usually fragment shaders) to define the visual
 * appearance of a 3D model's surface. Beyond storing color images, textures can hold various data
 * types like normal maps, environment maps (cubemaps), or custom data for shader computations. Key
 * properties control how the texture data is sampled, including filtering modes and coordinate
 * wrapping.
 *
 * Note on **HDR texture format** support:
 * 1. **As textures**:
 *     - float (i.e. {@link PIXELFORMAT_RGBA32F}), half-float (i.e. {@link PIXELFORMAT_RGBA16F}) and
 * small-float ({@link PIXELFORMAT_111110F}) formats are always supported on both WebGL2 and WebGPU
 * with point sampling.
 *     - half-float and small-float formats are always supported on WebGL2 and WebGPU with linear
 * sampling.
 *     - float formats are supported on WebGL2 and WebGPU with linear sampling only if
 * {@link GraphicsDevice#textureFloatFilterable} is true.
 *     - {@link PIXELFORMAT_RGB9E5} is a compact HDR format with shared exponent, supported for
 * sampling on both WebGL2 and WebGPU, but cannot be used as a render target.
 *
 * 2. **As renderable textures** that can be used as color buffers in a {@link RenderTarget}:
 *     - on WebGPU, rendering to float and half-float formats is always supported.
 *     - on WebGPU, rendering to small-float format is supported only if
 * {@link GraphicsDevice#textureRG11B10Renderable} is true.
 *     - on WebGL2, rendering to these 3 formats formats is supported only if
 * {@link GraphicsDevice#textureFloatRenderable} is true.
 *     - on WebGL2, if {@link GraphicsDevice#textureFloatRenderable} is false, but
 * {@link GraphicsDevice#textureHalfFloatRenderable} is true, rendering to half-float formats only
 * is supported. This is the case of many mobile iOS devices.
 *     - you can determine available renderable HDR format using
 * {@link GraphicsDevice#getRenderableHdrFormat}.
 *     - {@link PIXELFORMAT_RGB10A2} provides 10 bits per RGB channel with 2-bit alpha, offering
 * higher precision than {@link PIXELFORMAT_RGBA8} at the same memory cost. It is renderable on
 * both WebGL2 and WebGPU. {@link PIXELFORMAT_RGB10A2U} is the unsigned integer variant.
 * @category Graphics
 */
declare class Texture {
    /**
     * Creates a 2D data texture with nearest filtering, clamp-to-edge addressing and no mipmaps.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this texture.
     * @param {string} name - The name of the texture.
     * @param {number} width - The width of the texture in pixels.
     * @param {number} height - The height of the texture in pixels.
     * @param {number} format - The pixel format of the texture.
     * @param {Uint8Array[]|Uint16Array[]|Uint32Array[]|Float32Array[]|HTMLCanvasElement[]|HTMLImageElement[]|HTMLVideoElement[]|Uint8Array[][]} [levels]
     * - Optional initial mip level data.
     * @returns {Texture} The created texture.
     * @ignore
     */
    static createDataTexture2D(graphicsDevice: GraphicsDevice, name: string, width: number, height: number, format: number, levels?: Uint8Array[] | Uint16Array[] | Uint32Array[] | Float32Array[] | HTMLCanvasElement[] | HTMLImageElement[] | HTMLVideoElement[] | Uint8Array[][]): Texture;
    /**
     * Create a new Texture instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this texture.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {string} [options.name] - The name of the texture. Defaults to null.
     * @param {number} [options.width] - The width of the texture in pixels. Defaults to 4.
     * @param {number} [options.height] - The height of the texture in pixels. Defaults to 4.
     * @param {number} [options.depth] - The number of depth slices in a 3D texture.
     * @param {number} [options.format] - The pixel format of the texture. Can be:
     *
     * - {@link PIXELFORMAT_R8}
     * - {@link PIXELFORMAT_RG8}
     * - {@link PIXELFORMAT_RGB565}
     * - {@link PIXELFORMAT_RGBA5551}
     * - {@link PIXELFORMAT_RGBA4}
     * - {@link PIXELFORMAT_RGB8}
     * - {@link PIXELFORMAT_RGBA8}
     * - {@link PIXELFORMAT_DXT1}
     * - {@link PIXELFORMAT_DXT3}
     * - {@link PIXELFORMAT_DXT5}
     * - {@link PIXELFORMAT_RGB16F}
     * - {@link PIXELFORMAT_RGBA16F}
     * - {@link PIXELFORMAT_RGB32F}
     * - {@link PIXELFORMAT_RGBA32F}
     * - {@link PIXELFORMAT_ETC1}
     * - {@link PIXELFORMAT_PVRTC_2BPP_RGB_1}
     * - {@link PIXELFORMAT_PVRTC_2BPP_RGBA_1}
     * - {@link PIXELFORMAT_PVRTC_4BPP_RGB_1}
     * - {@link PIXELFORMAT_PVRTC_4BPP_RGBA_1}
     * - {@link PIXELFORMAT_111110F}
     * - {@link PIXELFORMAT_ASTC_4x4}
     * - {@link PIXELFORMAT_ATC_RGB}
     * - {@link PIXELFORMAT_ATC_RGBA}
     *
     * Defaults to {@link PIXELFORMAT_RGBA8}.
     * @param {string} [options.projection] - The projection type of the texture, used when the
     * texture represents an environment. Can be:
     *
     * - {@link TEXTUREPROJECTION_NONE}
     * - {@link TEXTUREPROJECTION_CUBE}
     * - {@link TEXTUREPROJECTION_EQUIRECT}
     * - {@link TEXTUREPROJECTION_OCTAHEDRAL}
     *
     * Defaults to {@link TEXTUREPROJECTION_CUBE} if options.cubemap is true, otherwise
     * {@link TEXTUREPROJECTION_NONE}.
     * @param {number} [options.minFilter] - The minification filter type to use. Defaults to
     * {@link FILTER_LINEAR_MIPMAP_LINEAR}.
     * @param {number} [options.magFilter] - The magnification filter type to use. Defaults to
     * {@link FILTER_LINEAR}.
     * @param {number} [options.anisotropy] - The level of anisotropic filtering to use. Defaults
     * to 1.
     * @param {number} [options.addressU] - The repeat mode to use in the U direction. Defaults to
     * {@link ADDRESS_REPEAT}.
     * @param {number} [options.addressV] - The repeat mode to use in the V direction. Defaults to
     * {@link ADDRESS_REPEAT}.
     * @param {number} [options.addressW] - The repeat mode to use in the W direction. Defaults to
     * {@link ADDRESS_REPEAT}.
     * @param {boolean} [options.mipmaps] - When enabled try to generate or use mipmaps for this
     * texture. Default is true.
     * @param {number} [options.numLevels] - Specifies the number of mip levels to generate. If not
     * specified, the number is calculated based on the texture size. When this property is set,
     * the mipmaps property is ignored.
     * @param {boolean} [options.cubemap] - Specifies whether the texture is to be a cubemap.
     * Defaults to false.
     * @param {number} [options.arrayLength] - Specifies whether the texture is to be a 2D texture array.
     * When passed in as undefined or < 1, this is not an array texture. If >= 1, this is an array texture.
     * Defaults to undefined.
     * @param {boolean} [options.volume] - Specifies whether the texture is to be a 3D volume.
     * Defaults to false.
     * @param {string} [options.type] - Specifies the texture type.  Can be:
     *
     * - {@link TEXTURETYPE_DEFAULT}
     * - {@link TEXTURETYPE_RGBM}
     * - {@link TEXTURETYPE_RGBE}
     * - {@link TEXTURETYPE_RGBP}
     * - {@link TEXTURETYPE_SWIZZLEGGGR}
     *
     * Defaults to {@link TEXTURETYPE_DEFAULT}.
     * @param {boolean} [options.flipY] - Specifies whether the texture should be flipped in the
     * Y-direction. Only affects textures with a source that is an image, canvas or video element.
     * Does not affect cubemaps, compressed textures or textures set from raw pixel data. Defaults
     * to false.
     * @param {boolean} [options.premultiplyAlpha] - If true, the alpha channel of the texture (if
     * present) is multiplied into the color channels. Defaults to false.
     * @param {boolean} [options.compareOnRead] - When enabled, and if texture format is
     * {@link PIXELFORMAT_DEPTH} or {@link PIXELFORMAT_DEPTHSTENCIL}, hardware PCF is enabled for
     * this texture, and you can get filtered results of comparison using texture() in your shader.
     * Defaults to false.
     * @param {number} [options.compareFunc] - Comparison function when compareOnRead is enabled.
     * Can be:
     *
     * - {@link FUNC_LESS}
     * - {@link FUNC_LESSEQUAL}
     * - {@link FUNC_GREATER}
     * - {@link FUNC_GREATEREQUAL}
     * - {@link FUNC_EQUAL}
     * - {@link FUNC_NOTEQUAL}
     *
     * Defaults to {@link FUNC_LESS}.
     * @param {Uint8Array[]|Uint16Array[]|Uint32Array[]|Float32Array[]|HTMLCanvasElement[]|HTMLImageElement[]|HTMLVideoElement[]|Uint8Array[][]} [options.levels]
     * - Array of Uint8Array or other supported browser interface; or a two-dimensional array
     * of Uint8Array if options.arrayLength is defined and greater than zero.
     * @param {boolean} [options.storage] - Defines if texture can be used as a storage texture by
     * a compute shader. Defaults to false.
     * @example
     * // Create a 8x8x24-bit texture
     * const texture = new pc.Texture(graphicsDevice, {
     *     width: 8,
     *     height: 8,
     *     format: pc.PIXELFORMAT_RGB8
     * });
     *
     * // Fill the texture with a gradient
     * const pixels = texture.lock();
     * const count = 0;
     * for (let i = 0; i < 8; i++) {
     *     for (let j = 0; j < 8; j++) {
     *         pixels[count++] = i * 32;
     *         pixels[count++] = j * 32;
     *         pixels[count++] = 255;
     *     }
     * }
     * texture.unlock();
     */
    constructor(graphicsDevice: GraphicsDevice, options?: {
        name?: string;
        width?: number;
        height?: number;
        depth?: number;
        format?: number;
        projection?: string;
        minFilter?: number;
        magFilter?: number;
        anisotropy?: number;
        addressU?: number;
        addressV?: number;
        addressW?: number;
        mipmaps?: boolean;
        numLevels?: number;
        cubemap?: boolean;
        arrayLength?: number;
        volume?: boolean;
        type?: string;
        flipY?: boolean;
        premultiplyAlpha?: boolean;
        compareOnRead?: boolean;
        compareFunc?: number;
        levels?: Uint8Array[] | Uint16Array[] | Uint32Array[] | Float32Array[] | HTMLCanvasElement[] | HTMLImageElement[] | HTMLVideoElement[] | Uint8Array[][];
        storage?: boolean;
    });
    /**
     * The name of the texture.
     *
     * @type {string}
     */
    name: string;
    /** @ignore */
    _gpuSize: number;
    /** @protected */
    protected id: number;
    /** @protected */
    protected _invalid: boolean;
    /** @protected */
    protected _lockedLevel: number;
    /** @protected */
    protected _lockedMode: number;
    /**
     * A render version used to track the last time the texture properties requiring bind group
     * to be updated were changed.
     *
     * @type {number}
     * @ignore
     */
    renderVersionDirty: number;
    /** @protected */
    protected _storage: boolean;
    /** @protected */
    protected _numLevels: number;
    /** @protected */
    protected _numLevelsRequested: number;
    device: GraphicsDevice;
    _width: number;
    _height: number;
    _format: number;
    _compressed: boolean;
    _integerFormat: boolean;
    _volume: boolean;
    _depth: number;
    _arrayLength: number;
    _cubemap: boolean;
    _flipY: boolean;
    _premultiplyAlpha: boolean;
    _mipmaps: boolean;
    _minFilter: number;
    _magFilter: number;
    _anisotropy: number;
    _addressU: number;
    _addressV: number;
    _addressW: number;
    _compareOnRead: boolean;
    _compareFunc: number;
    _type: string;
    projection: string;
    profilerHint: any;
    _levels: Uint8Array<ArrayBufferLike>[] | Uint16Array<ArrayBufferLike>[] | Uint32Array<ArrayBufferLike>[] | Float32Array<ArrayBufferLike>[] | HTMLCanvasElement[] | HTMLImageElement[] | HTMLVideoElement[] | Uint8Array<ArrayBufferLike>[][];
    /**
     * Frees resources associated with this texture.
     */
    destroy(): void;
    recreateImpl(upload?: boolean): void;
    impl: any;
    _clearLevels(): void;
    /**
     * Resizes the texture. This operation is supported for render target textures, and it resizes
     * the allocated buffer used for rendering, not the existing content of the texture.
     *
     * It is also supported for textures with data provided via the {@link lock} method. After
     * resizing, the appropriately sized data must be assigned by calling {@link lock} again.
     *
     * @param {number} width - The new width of the texture.
     * @param {number} height - The new height of the texture.
     * @param {number} [depth] - The new depth of the texture. Defaults to 1.
     * @ignore
     */
    resize(width: number, height: number, depth?: number): void;
    /**
     * Called when the rendering context was lost. It releases all context related resources.
     *
     * @ignore
     */
    loseContext(): void;
    /**
     * Updates vram size tracking for the texture, size can be positive to add or negative to subtract
     *
     * @ignore
     */
    adjustVramSizeTracking(vram: any, size: any): void;
    propertyChanged(flag: any): void;
    _updateNumLevels(): void;
    /**
     * Returns the current lock mode. One of:
     *
     * - {@link TEXTURELOCK_NONE}
     * - {@link TEXTURELOCK_READ}
     * - {@link TEXTURELOCK_WRITE}
     *
     * @ignore
     * @type {number}
     */
    get lockedMode(): number;
    /**
     * Sets the minification filter to be applied to the texture. Can be:
     *
     * - {@link FILTER_NEAREST}
     * - {@link FILTER_LINEAR}
     * - {@link FILTER_NEAREST_MIPMAP_NEAREST}
     * - {@link FILTER_NEAREST_MIPMAP_LINEAR}
     * - {@link FILTER_LINEAR_MIPMAP_NEAREST}
     * - {@link FILTER_LINEAR_MIPMAP_LINEAR}
     *
     * @type {number}
     */
    set minFilter(v: number);
    /**
     * Gets the minification filter to be applied to the texture.
     *
     * @type {number}
     */
    get minFilter(): number;
    /**
     * Sets the magnification filter to be applied to the texture. Can be:
     *
     * - {@link FILTER_NEAREST}
     * - {@link FILTER_LINEAR}
     *
     * @type {number}
     */
    set magFilter(v: number);
    /**
     * Gets the magnification filter to be applied to the texture.
     *
     * @type {number}
     */
    get magFilter(): number;
    /**
     * Sets the addressing mode to be applied to the texture horizontally. Can be:
     *
     * - {@link ADDRESS_REPEAT}
     * - {@link ADDRESS_CLAMP_TO_EDGE}
     * - {@link ADDRESS_MIRRORED_REPEAT}
     *
     * @type {number}
     */
    set addressU(v: number);
    /**
     * Gets the addressing mode to be applied to the texture horizontally.
     *
     * @type {number}
     */
    get addressU(): number;
    /**
     * Sets the addressing mode to be applied to the texture vertically. Can be:
     *
     * - {@link ADDRESS_REPEAT}
     * - {@link ADDRESS_CLAMP_TO_EDGE}
     * - {@link ADDRESS_MIRRORED_REPEAT}
     *
     * @type {number}
     */
    set addressV(v: number);
    /**
     * Gets the addressing mode to be applied to the texture vertically.
     *
     * @type {number}
     */
    get addressV(): number;
    /**
     * Sets the addressing mode to be applied to the 3D texture depth. Can be:
     *
     * - {@link ADDRESS_REPEAT}
     * - {@link ADDRESS_CLAMP_TO_EDGE}
     * - {@link ADDRESS_MIRRORED_REPEAT}
     *
     * @type {number}
     */
    set addressW(addressW: number);
    /**
     * Gets the addressing mode to be applied to the 3D texture depth.
     *
     * @type {number}
     */
    get addressW(): number;
    /**
     * When enabled, and if texture format is {@link PIXELFORMAT_DEPTH} or
     * {@link PIXELFORMAT_DEPTHSTENCIL}, hardware PCF is enabled for this texture, and you can get
     * filtered results of comparison using texture() in your shader.
     *
     * @type {boolean}
     */
    set compareOnRead(v: boolean);
    /**
     * Gets whether you can get filtered results of comparison using texture() in your shader.
     *
     * @type {boolean}
     */
    get compareOnRead(): boolean;
    /**
     * Sets the comparison function when {@link compareOnRead} is enabled. Possible values:
     *
     * - {@link FUNC_LESS}
     * - {@link FUNC_LESSEQUAL}
     * - {@link FUNC_GREATER}
     * - {@link FUNC_GREATEREQUAL}
     * - {@link FUNC_EQUAL}
     * - {@link FUNC_NOTEQUAL}
     *
     * @type {number}
     */
    set compareFunc(v: number);
    /**
     * Gets the comparison function when {@link compareOnRead} is enabled.
     *
     * @type {number}
     */
    get compareFunc(): number;
    /**
     * Sets the integer value specifying the level of anisotropy to apply to the texture. The value
     * ranges from 1 (no anisotropic filtering) to the maximum anisotropy supported by the graphics
     * device (see {@link GraphicsDevice#maxAnisotropy}).
     *
     * @type {number}
     */
    set anisotropy(v: number);
    /**
     * Gets the integer value specifying the level of anisotropy to apply to the texture.
     *
     * @type {number}
     */
    get anisotropy(): number;
    /**
     * Sets whether the texture should generate/upload mipmaps.
     *
     * @type {boolean}
     */
    set mipmaps(v: boolean);
    /**
     * Gets whether the texture should generate/upload mipmaps.
     *
     * @type {boolean}
     */
    get mipmaps(): boolean;
    _needsMipmapsUpload: boolean;
    /**
     * Gets the number of mip levels.
     *
     * @type {number}
     */
    get numLevels(): number;
    /**
     * Defines if texture can be used as a storage texture by a compute shader.
     *
     * @type {boolean}
     */
    get storage(): boolean;
    /**
     * The width of the texture in pixels.
     *
     * @type {number}
     */
    get width(): number;
    /**
     * The height of the texture in pixels.
     *
     * @type {number}
     */
    get height(): number;
    /**
     * The number of depth slices in a 3D texture.
     *
     * @type {number}
     */
    get depth(): number;
    /**
     * The pixel format of the texture. Can be:
     *
     * - {@link PIXELFORMAT_R8}
     * - {@link PIXELFORMAT_RG8}
     * - {@link PIXELFORMAT_RGB565}
     * - {@link PIXELFORMAT_RGBA5551}
     * - {@link PIXELFORMAT_RGBA4}
     * - {@link PIXELFORMAT_RGB8}
     * - {@link PIXELFORMAT_RGBA8}
     * - {@link PIXELFORMAT_DXT1}
     * - {@link PIXELFORMAT_DXT3}
     * - {@link PIXELFORMAT_DXT5}
     * - {@link PIXELFORMAT_RGB16F}
     * - {@link PIXELFORMAT_RGBA16F}
     * - {@link PIXELFORMAT_RGB32F}
     * - {@link PIXELFORMAT_RGBA32F}
     * - {@link PIXELFORMAT_ETC1}
     * - {@link PIXELFORMAT_PVRTC_2BPP_RGB_1}
     * - {@link PIXELFORMAT_PVRTC_2BPP_RGBA_1}
     * - {@link PIXELFORMAT_PVRTC_4BPP_RGB_1}
     * - {@link PIXELFORMAT_PVRTC_4BPP_RGBA_1}
     * - {@link PIXELFORMAT_111110F}
     * - {@link PIXELFORMAT_ASTC_4x4}
     * - {@link PIXELFORMAT_ATC_RGB}
     * - {@link PIXELFORMAT_ATC_RGBA}
     *
     * @type {number}
     */
    get format(): number;
    /**
     * Returns true if this texture is a cube map and false otherwise.
     *
     * @type {boolean}
     */
    get cubemap(): boolean;
    get gpuSize(): number;
    /**
     * Returns true if this texture is a 2D texture array and false otherwise.
     *
     * @type {boolean}
     */
    get array(): boolean;
    /**
     * Returns the number of textures inside this texture if this is a 2D array texture or 0 otherwise.
     *
     * @type {number}
     */
    get arrayLength(): number;
    /**
     * Returns true if this texture is a 3D volume and false otherwise.
     *
     * @type {boolean}
     */
    get volume(): boolean;
    /**
     * Sets the texture type.
     *
     * @type {string}
     * @ignore
     */
    set type(value: string);
    /**
     * Gets the texture type.
     *
     * @type {string}
     * @ignore
     */
    get type(): string;
    /**
     * Sets the texture's internal format to an sRGB or linear equivalent of its current format.
     * When set to true, the texture is stored in sRGB format and automatically converted to linear
     * space when sampled. When set to false, the texture remains in a linear format. Changing this
     * property recreates the texture on the GPU, which is an expensive operation, so it is
     * preferable to create the texture with the correct format from the start. If the texture
     * format has no sRGB variant, this operation is ignored.
     * This is not a public API and is used by Editor only to update rendering when the sRGB
     * property is changed in the inspector. The higher cost is acceptable in this case.
     *
     * @type {boolean}
     * @ignore
     */
    set srgb(value: boolean);
    /**
     * Returns true if the texture is stored in an sRGB format, meaning it will be converted to
     * linear space when sampled. Returns false if the texture is stored in a linear format.
     *
     * @type {boolean}
     */
    get srgb(): boolean;
    /**
     * Sets whether the texture should be flipped in the Y-direction. Only affects textures
     * with a source that is an image, canvas or video element. Does not affect cubemaps,
     * compressed textures or textures set from raw pixel data. Defaults to true.
     *
     * @type {boolean}
     */
    set flipY(flipY: boolean);
    /**
     * Gets whether the texture should be flipped in the Y-direction.
     *
     * @type {boolean}
     */
    get flipY(): boolean;
    set premultiplyAlpha(premultiplyAlpha: boolean);
    get premultiplyAlpha(): boolean;
    /**
     * Returns true if all dimensions of the texture are power of two, and false otherwise.
     *
     * @type {boolean}
     */
    get pot(): boolean;
    get encoding(): "srgb" | "linear" | "rgbm" | "rgbe" | "rgbp";
    dirtyAll(): void;
    _levelsUpdated: boolean[] | boolean[][];
    _mipmapsUploaded: boolean;
    /**
     * Locks a miplevel of the texture, returning a typed array to be filled with pixel data.
     *
     * @param {object} [options] - Optional options object. Valid properties are as follows:
     * @param {number} [options.level] - The mip level to lock with 0 being the top level. Defaults
     * to 0.
     * @param {number} [options.face] - If the texture is a cubemap, this is the index of the face
     * to lock.
     * @param {number} [options.mode] - The lock mode. Can be:
     * - {@link TEXTURELOCK_READ}
     * - {@link TEXTURELOCK_WRITE}
     * Defaults to {@link TEXTURELOCK_WRITE}.
     * @returns {Uint8Array|Uint16Array|Uint32Array|Float32Array} A typed array containing the pixel data of
     * the locked mip level.
     */
    lock(options?: {
        level?: number;
        face?: number;
        mode?: number;
    }): Uint8Array | Uint16Array | Uint32Array | Float32Array;
    /**
     * Set the pixel data of the texture from a canvas, image, video, or HTML DOM element. If the
     * texture is a cubemap, the supplied source must be an array of 6 canvases, images or videos.
     *
     * Note: using an HTML element (e.g. `<div>`) as a source requires
     * {@link GraphicsDevice#supportsHtmlTextures} to be true.
     *
     * @param {HTMLCanvasElement|HTMLImageElement|HTMLVideoElement|HTMLElement|HTMLCanvasElement[]|HTMLImageElement[]|HTMLVideoElement[]|HTMLElement[]} source - A
     * canvas, image, video, or HTML element, or an array of 6 canvas, image, video, or HTML
     * elements.
     * @param {number} [mipLevel] - A non-negative integer specifying the image level of detail.
     * Defaults to 0, which represents the base image source. A level value of N, that is greater
     * than 0, represents the image source for the Nth mipmap reduction level.
     */
    setSource(source: HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | HTMLElement | HTMLCanvasElement[] | HTMLImageElement[] | HTMLVideoElement[] | HTMLElement[], mipLevel?: number): void;
    /**
     * Get the pixel data of the texture. If this is a cubemap then an array of 6 images will be
     * returned otherwise a single image.
     *
     * @param {number} [mipLevel] - A non-negative integer specifying the image level of detail.
     * Defaults to 0, which represents the base image source. A level value of N, that is greater
     * than 0, represents the image source for the Nth mipmap reduction level.
     * @returns {HTMLImageElement} The source image of this texture. Can be null if source not
     * assigned for specific image level.
     */
    getSource(mipLevel?: number): HTMLImageElement;
    /**
     * Unlocks the currently locked mip level and uploads it to VRAM.
     */
    unlock(): void;
    /**
     * Mark this texture as needing upload to the GPU.
     *
     * @ignore
     */
    markForUpload(): void;
    _needsUpload: boolean;
    /**
     * Forces a reupload of the textures pixel data to graphics memory. Ordinarily, this function
     * is called by internally by {@link setSource} and {@link unlock}. However, it still needs to
     * be called explicitly in the case where an HTMLVideoElement is set as the source of the
     * texture.  Normally, this is done once every frame before video textured geometry is
     * rendered.
     */
    upload(): void;
    /**
     * Download the textures data from the graphics memory to the local memory.
     *
     * @param {number} x - The left edge of the rectangle.
     * @param {number} y - The top edge of the rectangle.
     * @param {number} width - The width of the rectangle.
     * @param {number} height - The height of the rectangle.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {RenderTarget} [options.renderTarget] - The render target using the texture as a color
     * buffer. Provide as an optimization to avoid creating a new render target. Important especially
     * when this function is called with high frequency (per frame). Note that this is only utilized
     * on the WebGL platform, and ignored on WebGPU.
     * @param {number} [options.mipLevel] - The mip level to download. Defaults to 0.
     * @param {number} [options.face] - The face to download. Defaults to 0.
     * @param {Uint8Array|Uint16Array|Uint32Array|Float32Array} [options.data] - The data buffer to
     * write the pixel data to. If not provided, a new buffer will be created. The type of the buffer
     * must match the texture's format.
     * @param {boolean} [options.immediate] - If true, the read operation will be executed as soon as
     * possible. This has a performance impact, so it should be used only when necessary. Defaults
     * to false.
     * @returns {Promise<Uint8Array|Uint16Array|Uint32Array|Float32Array>} A promise that resolves
     * with the pixel data of the texture.
     */
    read(x: number, y: number, width: number, height: number, options?: {
        renderTarget?: RenderTarget;
        mipLevel?: number;
        face?: number;
        data?: Uint8Array | Uint16Array | Uint32Array | Float32Array;
        immediate?: boolean;
    }): Promise<Uint8Array | Uint16Array | Uint32Array | Float32Array>;
    /**
     * Upload texture data asynchronously to the GPU.
     *
     * @param {number} x - The left edge of the rectangle.
     * @param {number} y - The top edge of the rectangle.
     * @param {number} width - The width of the rectangle.
     * @param {number} height - The height of the rectangle.
     * @param {Uint8Array|Uint16Array|Uint32Array|Float32Array} data - The pixel data to upload. This should be a typed array.
     *
     * @returns {Promise<void>} A promise that resolves when the upload is complete.
     * @ignore
     */
    write(x: number, y: number, width: number, height: number, data: Uint8Array | Uint16Array | Uint32Array | Float32Array): Promise<void>;
    /**
     * Creates a TextureView for this texture, specifying a subset of mip levels and array layers.
     * TextureViews can be used with compute shaders to access specific portions of a texture.
     *
     * Note: TextureView is only supported on WebGPU. On WebGL, the full texture is always bound.
     *
     * @param {number} [baseMipLevel] - The first mip level accessible to the view. Defaults to 0.
     * @param {number} [mipLevelCount] - The number of mip levels accessible to the view. Defaults
     * to 1.
     * @param {number} [baseArrayLayer] - The first array layer accessible to the view. Defaults to
     * 0.
     * @param {number} [arrayLayerCount] - The number of array layers accessible to the view.
     * Defaults to 1.
     * @returns {TextureView} A new TextureView for this texture.
     * @example
     * // Create a view for mip level 1
     * const mip1View = texture.getView(1);
     *
     * // Use with compute shader
     * compute.setParameter('outputTexture', mip1View);
     */
    getView(baseMipLevel?: number, mipLevelCount?: number, baseArrayLayer?: number, arrayLayerCount?: number): TextureView;
}

/**
 * A render target is a rectangular rendering surface.
 *
 * @category Graphics
 */
declare class RenderTarget {
    /**
     * Creates a new RenderTarget instance. A color buffer or a depth buffer must be set.
     *
     * @param {object} [options] - Object for passing optional arguments.
     * @param {boolean} [options.autoResolve] - If samples > 1, enables or disables automatic MSAA
     * resolve after rendering to this RT (see {@link RenderTarget#resolve}). Defaults to true.
     * @param {Texture} [options.colorBuffer] - The texture that this render target will treat as a
     * rendering surface.
     * @param {Texture[]} [options.colorBuffers] - The textures that this render target will treat
     * as a rendering surfaces. If this option is set, the colorBuffer option is ignored.
     * @param {boolean} [options.depth] - If set to true, depth buffer will be created. Defaults to
     * true. Ignored if depthBuffer is defined.
     * @param {Texture} [options.depthBuffer] - The texture that this render target will treat as a
     * depth/stencil surface (WebGL2 only). If set, the 'depth' and 'stencil' properties are
     * ignored. Texture must have {@link PIXELFORMAT_DEPTH} or {@link PIXELFORMAT_DEPTHSTENCIL}
     * format.
     * @param {number} [options.mipLevel] - If set to a number greater than 0, the render target
     * will render to the specified mip level of the color buffer. Defaults to 0.
     * @param {number} [options.face] - If the colorBuffer parameter is a cubemap, use this option
     * to specify the face of the cubemap to render to. Can be:
     *
     * - {@link CUBEFACE_POSX}
     * - {@link CUBEFACE_NEGX}
     * - {@link CUBEFACE_POSY}
     * - {@link CUBEFACE_NEGY}
     * - {@link CUBEFACE_POSZ}
     * - {@link CUBEFACE_NEGZ}
     *
     * Defaults to {@link CUBEFACE_POSX}.
     * @param {boolean} [options.flipY] - When set to true the image will be flipped in Y. Default
     * is false.
     * @param {string} [options.name] - The name of the render target.
     * @param {number} [options.samples] - Number of hardware anti-aliasing samples. Default is 1.
     * @param {boolean} [options.stencil] - If set to true, depth buffer will include stencil.
     * Defaults to false. Ignored if depthBuffer is defined or depth is false.
     * @example
     * // Create a 512x512x24-bit render target with a depth buffer
     * const colorBuffer = new pc.Texture(graphicsDevice, {
     *     width: 512,
     *     height: 512,
     *     format: pc.PIXELFORMAT_RGB8
     * });
     * const renderTarget = new pc.RenderTarget({
     *     colorBuffer: colorBuffer,
     *     depth: true
     * });
     *
     * // Set the render target on a camera component
     * camera.renderTarget = renderTarget;
     *
     * // Destroy render target at a later stage. Note that the color buffer needs
     * // to be destroyed separately.
     * renderTarget.colorBuffer.destroy();
     * renderTarget.destroy();
     * camera.renderTarget = null;
     */
    constructor(options?: {
        autoResolve?: boolean;
        colorBuffer?: Texture;
        colorBuffers?: Texture[];
        depth?: boolean;
        depthBuffer?: Texture;
        mipLevel?: number;
        face?: number;
        flipY?: boolean;
        name?: string;
        samples?: number;
        stencil?: boolean;
    });
    /**
     * The name of the render target.
     *
     * @type {string}
     */
    name: string;
    /**
     * @type {GraphicsDevice}
     * @private
     */
    private _device;
    /**
     * @type {Texture}
     * @private
     */
    private _colorBuffer;
    /**
     * @type {Texture[]}
     * @private
     */
    private _colorBuffers;
    /**
     * @type {Texture}
     * @private
     */
    private _depthBuffer;
    /**
     * @type {boolean}
     * @private
     */
    private _depth;
    /**
     * @type {boolean}
     * @private
     */
    private _stencil;
    /**
     * @type {number}
     * @private
     */
    private _samples;
    /** @type {boolean} */
    autoResolve: boolean;
    /**
     * @type {number}
     * @private
     */
    private _face;
    /**
     * @type {number}
     * @private
     */
    private _mipLevel;
    /**
     * True if the mipmaps should be automatically generated for the color buffer(s) if it contains
     * a mip chain.
     *
     * @type {boolean}
     * @private
     */
    private _mipmaps;
    /**
     * @type {number | undefined}
     * @private
     */
    private _width;
    /**
     * @type {number | undefined}
     * @private
     */
    private _height;
    /** @type {boolean} */
    flipY: boolean;
    id: number;
    impl: any;
    /**
     * Frees resources associated with this render target.
     */
    destroy(): void;
    /**
     * Free device resources associated with this render target.
     *
     * @ignore
     */
    destroyFrameBuffers(): void;
    /**
     * Free textures associated with this render target.
     *
     * @ignore
     */
    destroyTextureBuffers(): void;
    /**
     * Resizes the render target to the specified width and height. Internally this resizes all the
     * assigned texture color and depth buffers.
     *
     * @param {number} width - The width of the render target in pixels.
     * @param {number} height - The height of the render target in pixels.
     */
    resize(width: number, height: number): void;
    validateMrt(): void;
    /**
     * Evaluates and stores the width and height of the render target based on the color/depth
     * buffers and mip level.
     *
     * @private
     */
    private evaluateDimensions;
    /**
     * Initializes the resources associated with this render target.
     *
     * @ignore
     */
    init(): void;
    /** @ignore */
    get initialized(): any;
    /** @ignore */
    get device(): GraphicsDevice;
    /**
     * Called when the device context was lost. It releases all context related resources.
     *
     * @ignore
     */
    loseContext(): void;
    /**
     * If samples > 1, resolves the anti-aliased render target (WebGL2 only). When you're rendering
     * to an anti-aliased render target, pixels aren't written directly to the readable texture.
     * Instead, they're first written to a MSAA buffer, where each sample for each pixel is stored
     * independently. In order to read the results, you first need to 'resolve' the buffer - to
     * average all samples and create a simple texture with one color per pixel. This function
     * performs this averaging and updates the colorBuffer and the depthBuffer. If autoResolve is
     * set to true, the resolve will happen after every rendering to this render target, otherwise
     * you can do it manually, during the app update or similar.
     *
     * @param {boolean} [color] - Resolve color buffer. Defaults to true.
     * @param {boolean} [depth] - Resolve depth buffer. Defaults to true if the render target has a
     * depth buffer.
     */
    resolve(color?: boolean, depth?: boolean): void;
    /**
     * Copies color and/or depth contents of source render target to this one. Formats, sizes and
     * anti-aliasing samples must match. Depth buffer can only be copied on WebGL 2.0.
     *
     * @param {RenderTarget} source - Source render target to copy from.
     * @param {boolean} [color] - If true, will copy the color buffer. Defaults to false.
     * @param {boolean} [depth] - If true, will copy the depth buffer. Defaults to false.
     * @returns {boolean} True if the copy was successful, false otherwise.
     */
    copy(source: RenderTarget, color?: boolean, depth?: boolean): boolean;
    /**
     * Number of antialiasing samples the render target uses.
     *
     * @type {number}
     */
    get samples(): number;
    /**
     * True if the render target contains the depth attachment.
     *
     * @type {boolean}
     */
    get depth(): boolean;
    /**
     * True if the render target contains the stencil attachment.
     *
     * @type {boolean}
     */
    get stencil(): boolean;
    /**
     * Color buffer set up on the render target.
     *
     * @type {Texture}
     */
    get colorBuffer(): Texture;
    /**
     * Accessor for multiple render target color buffers.
     *
     * @param {*} index - Index of the color buffer to get.
     * @returns {Texture} - Color buffer at the specified index.
     */
    getColorBuffer(index: any): Texture;
    /**
     * Depth buffer set up on the render target. Only available, if depthBuffer was set in
     * constructor. Not available if depth property was used instead.
     *
     * @type {Texture}
     */
    get depthBuffer(): Texture;
    /**
     * If the render target is bound to a cubemap, this property specifies which face of the
     * cubemap is rendered to. Can be:
     *
     * - {@link CUBEFACE_POSX}
     * - {@link CUBEFACE_NEGX}
     * - {@link CUBEFACE_POSY}
     * - {@link CUBEFACE_NEGY}
     * - {@link CUBEFACE_POSZ}
     * - {@link CUBEFACE_NEGZ}
     *
     * @type {number}
     */
    get face(): number;
    /**
     * Mip level of the render target.
     *
     * @type {number}
     */
    get mipLevel(): number;
    /**
     * True if the mipmaps are automatically generated for the color buffer(s) if it contains
     * a mip chain.
     *
     * @type {boolean}
     */
    get mipmaps(): boolean;
    /**
     * Width of the render target in pixels.
     *
     * @type {number}
     */
    get width(): number;
    /**
     * Height of the render target in pixels.
     *
     * @type {number}
     */
    get height(): number;
    /**
     * Gets whether the format of the specified color buffer is sRGB.
     *
     * @param {number} index - The index of the color buffer.
     * @returns {boolean} True if the color buffer is sRGB, false otherwise.
     * @ignore
     */
    isColorBufferSrgb(index?: number): boolean;
}

/**
 * A 2-dimensional vector. Vec2 is commonly used to represent 2D positions, directions, texture
 * coordinates (UVs) or any pair of related numeric values.
 *
 * @category Math
 */
declare class Vec2 {
    /**
     * Calculates the angle between two Vec2's in radians.
     *
     * @param {Vec2} lhs - The first vector operand for the calculation.
     * @param {Vec2} rhs - The second vector operand for the calculation.
     * @returns {number} The calculated angle in radians.
     * @ignore
     */
    static angleRad(lhs: Vec2, rhs: Vec2): number;
    /**
     * A constant vector set to [0, 0].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly ZERO: Vec2;
    /**
     * A constant vector set to [0.5, 0.5].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly HALF: Vec2;
    /**
     * A constant vector set to [1, 1].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly ONE: Vec2;
    /**
     * A constant vector set to [0, 1].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly UP: Vec2;
    /**
     * A constant vector set to [0, -1].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly DOWN: Vec2;
    /**
     * A constant vector set to [1, 0].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly RIGHT: Vec2;
    /**
     * A constant vector set to [-1, 0].
     *
     * @type {Vec2}
     * @readonly
     */
    static readonly LEFT: Vec2;
    /**
     * Creates a new Vec2 instance.
     *
     * @overload
     * @param {number} [x] - The x value. Defaults to 0.
     * @param {number} [y] - The y value. Defaults to 0.
     * @example
     * const v1 = new pc.Vec2(); // defaults to 0, 0
     * const v2 = new pc.Vec2(1, 2);
     */
    constructor(x?: number, y?: number);
    /**
     * Creates a new Vec2 instance.
     *
     * @overload
     * @param {number[]} arr - The array to set the vector values from.
     * @example
     * const v = new pc.Vec2([1, 2]);
     */
    constructor(arr: number[]);
    /**
     * The first component of the vector.
     *
     * @type {number}
     */
    x: number;
    /**
     * The second component of the vector.
     *
     * @type {number}
     */
    y: number;
    /**
     * Adds a 2-dimensional vector to another in place.
     *
     * @param {Vec2} rhs - The vector to add to the specified vector.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(10, 10);
     * const b = new pc.Vec2(20, 20);
     *
     * a.add(b);
     *
     * // Outputs [30, 30]
     * console.log("The result of the addition is: " + a.toString());
     */
    add(rhs: Vec2): Vec2;
    /**
     * Adds two 2-dimensional vectors together and returns the result.
     *
     * @param {Vec2} lhs - The first vector operand for the addition.
     * @param {Vec2} rhs - The second vector operand for the addition.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(10, 10);
     * const b = new pc.Vec2(20, 20);
     * const r = new pc.Vec2();
     *
     * r.add2(a, b);
     * // Outputs [30, 30]
     *
     * console.log("The result of the addition is: " + r.toString());
     */
    add2(lhs: Vec2, rhs: Vec2): Vec2;
    /**
     * Adds a number to each element of a vector.
     *
     * @param {number} scalar - The number to add.
     * @returns {Vec2} Self for chaining.
     * @example
     * const vec = new pc.Vec2(3, 4);
     *
     * vec.addScalar(2);
     *
     * // Outputs [5, 6]
     * console.log("The result of the addition is: " + vec.toString());
     */
    addScalar(scalar: number): Vec2;
    /**
     * Adds a 2-dimensional vector scaled by scalar value. Does not modify the vector being added.
     *
     * @param {Vec2} rhs - The vector to add to the specified vector.
     * @param {number} scalar - The number to multiply the added vector with.
     * @returns {Vec2} Self for chaining.
     * @example
     * const vec = new pc.Vec2(1, 2);
     *
     * vec.addScaled(pc.Vec2.UP, 2);
     *
     * // Outputs [1, 4]
     * console.log("The result of the addition is: " + vec.toString());
     */
    addScaled(rhs: Vec2, scalar: number): Vec2;
    /**
     * Returns an identical copy of the specified 2-dimensional vector.
     *
     * @returns {this} A 2-dimensional vector containing the result of the cloning.
     * @example
     * const v = new pc.Vec2(10, 20);
     * const vclone = v.clone();
     * console.log("The result of the cloning is: " + vclone.toString());
     */
    clone(): this;
    /**
     * Copies the contents of a source 2-dimensional vector to a destination 2-dimensional vector.
     *
     * @param {Vec2} rhs - A vector to copy to the specified vector.
     * @returns {Vec2} Self for chaining.
     * @example
     * const src = new pc.Vec2(10, 20);
     * const dst = new pc.Vec2();
     *
     * dst.copy(src);
     *
     * console.log("The two vectors are " + (dst.equals(src) ? "equal" : "different"));
     */
    copy(rhs: Vec2): Vec2;
    /**
     * Returns the result of a cross product operation performed on the two specified 2-dimensional
     * vectors.
     *
     * @param {Vec2} rhs - The second 2-dimensional vector operand of the cross product.
     * @returns {number} The cross product of the two vectors.
     * @example
     * const right = new pc.Vec2(1, 0);
     * const up = new pc.Vec2(0, 1);
     * const crossProduct = right.cross(up);
     *
     * // Prints 1
     * console.log("The result of the cross product is: " + crossProduct);
     */
    cross(rhs: Vec2): number;
    /**
     * Returns the distance between the two specified 2-dimensional vectors.
     *
     * @param {Vec2} rhs - The second 2-dimensional vector to test.
     * @returns {number} The distance between the two vectors.
     * @example
     * const v1 = new pc.Vec2(5, 10);
     * const v2 = new pc.Vec2(10, 20);
     * const d = v1.distance(v2);
     * console.log("The distance between v1 and v2 is: " + d);
     */
    distance(rhs: Vec2): number;
    /**
     * Divides a 2-dimensional vector by another in place.
     *
     * @param {Vec2} rhs - The vector to divide the specified vector by.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(4, 9);
     * const b = new pc.Vec2(2, 3);
     *
     * a.div(b);
     *
     * // Outputs [2, 3]
     * console.log("The result of the division is: " + a.toString());
     */
    div(rhs: Vec2): Vec2;
    /**
     * Divides one 2-dimensional vector by another and writes the result to the specified vector.
     *
     * @param {Vec2} lhs - The dividend vector (the vector being divided).
     * @param {Vec2} rhs - The divisor vector (the vector dividing the dividend).
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(4, 9);
     * const b = new pc.Vec2(2, 3);
     * const r = new pc.Vec2();
     *
     * r.div2(a, b);
     *
     * // Outputs [2, 3]
     * console.log("The result of the division is: " + r.toString());
     */
    div2(lhs: Vec2, rhs: Vec2): Vec2;
    /**
     * Divides each element of a vector by a number.
     *
     * @param {number} scalar - The number to divide by.
     * @returns {Vec2} Self for chaining.
     * @example
     * const vec = new pc.Vec2(3, 6);
     *
     * vec.divScalar(3);
     *
     * // Outputs [1, 2]
     * console.log("The result of the division is: " + vec.toString());
     */
    divScalar(scalar: number): Vec2;
    /**
     * Returns the result of a dot product operation performed on the two specified 2-dimensional
     * vectors.
     *
     * @param {Vec2} rhs - The second 2-dimensional vector operand of the dot product.
     * @returns {number} The result of the dot product operation.
     * @example
     * const v1 = new pc.Vec2(5, 10);
     * const v2 = new pc.Vec2(10, 20);
     * const v1dotv2 = v1.dot(v2);
     * console.log("The result of the dot product is: " + v1dotv2);
     */
    dot(rhs: Vec2): number;
    /**
     * Reports whether two vectors are equal.
     *
     * @param {Vec2} rhs - The vector to compare to the specified vector.
     * @returns {boolean} True if the vectors are equal and false otherwise.
     * @example
     * const a = new pc.Vec2(1, 2);
     * const b = new pc.Vec2(4, 5);
     * console.log("The two vectors are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Vec2): boolean;
    /**
     * Reports whether two vectors are equal using an absolute error tolerance.
     *
     * @param {Vec2} rhs - The vector to be compared against.
     * @param {number} [epsilon] - The maximum difference between each component of the two
     * vectors. Defaults to 1e-6.
     * @returns {boolean} True if the vectors are equal and false otherwise.
     * @example
     * const a = new pc.Vec2();
     * const b = new pc.Vec2();
     * console.log("The two vectors are approximately " + (a.equalsApprox(b, 1e-9) ? "equal" : "different"));
     */
    equalsApprox(rhs: Vec2, epsilon?: number): boolean;
    /**
     * Returns the magnitude of the specified 2-dimensional vector.
     *
     * @returns {number} The magnitude of the specified 2-dimensional vector.
     * @example
     * const vec = new pc.Vec2(3, 4);
     * const len = vec.length();
     * // Outputs 5
     * console.log("The length of the vector is: " + len);
     */
    length(): number;
    /**
     * Returns the magnitude squared of the specified 2-dimensional vector.
     *
     * @returns {number} The magnitude squared of the specified 2-dimensional vector.
     * @example
     * const vec = new pc.Vec2(3, 4);
     * const len = vec.lengthSq();
     * // Outputs 25
     * console.log("The length squared of the vector is: " + len);
     */
    lengthSq(): number;
    /**
     * Returns the result of a linear interpolation between two specified 2-dimensional vectors.
     *
     * @param {Vec2} lhs - The 2-dimensional vector to interpolate from.
     * @param {Vec2} rhs - The 2-dimensional vector to interpolate to.
     * @param {number} alpha - The value controlling the point of interpolation. Between 0 and 1,
     * the linear interpolant will occur on a straight line between lhs and rhs. Outside of this
     * range, the linear interpolant will occur on a ray extrapolated from this line.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(0, 0);
     * const b = new pc.Vec2(10, 10);
     * const r = new pc.Vec2();
     *
     * r.lerp(a, b, 0);   // r is equal to a
     * r.lerp(a, b, 0.5); // r is 5, 5
     * r.lerp(a, b, 1);   // r is equal to b
     */
    lerp(lhs: Vec2, rhs: Vec2, alpha: number): Vec2;
    /**
     * Multiplies a 2-dimensional vector to another in place.
     *
     * @param {Vec2} rhs - The 2-dimensional vector used as the second multiplicand of the operation.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(2, 3);
     * const b = new pc.Vec2(4, 5);
     *
     * a.mul(b);
     *
     * // Outputs 8, 15
     * console.log("The result of the multiplication is: " + a.toString());
     */
    mul(rhs: Vec2): Vec2;
    /**
     * Returns the result of multiplying the specified 2-dimensional vectors together.
     *
     * @param {Vec2} lhs - The 2-dimensional vector used as the first multiplicand of the operation.
     * @param {Vec2} rhs - The 2-dimensional vector used as the second multiplicand of the operation.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(2, 3);
     * const b = new pc.Vec2(4, 5);
     * const r = new pc.Vec2();
     *
     * r.mul2(a, b);
     *
     * // Outputs 8, 15
     * console.log("The result of the multiplication is: " + r.toString());
     */
    mul2(lhs: Vec2, rhs: Vec2): Vec2;
    /**
     * Multiplies each element of a vector by a number.
     *
     * @param {number} scalar - The number to multiply by.
     * @returns {Vec2} Self for chaining.
     * @example
     * const vec = new pc.Vec2(3, 6);
     *
     * vec.mulScalar(3);
     *
     * // Outputs [9, 18]
     * console.log("The result of the multiplication is: " + vec.toString());
     */
    mulScalar(scalar: number): Vec2;
    /**
     * Returns this 2-dimensional vector converted to a unit vector in place. If the vector has a
     * length of zero, the vector's elements will be set to zero.
     *
     * @param {Vec2} [src] - The vector to normalize. If not set, the operation is done in place.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2(25, 0);
     *
     * v.normalize();
     *
     * // Outputs 1, 0
     * console.log("The result of the vector normalization is: " + v.toString());
     */
    normalize(src?: Vec2): Vec2;
    /**
     * Rotate a vector by an angle in degrees.
     *
     * @param {number} degrees - The number to degrees to rotate the vector by.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2(0, 10);
     *
     * v.rotate(45); // rotates by 45 degrees
     *
     * // Outputs [7.071068.., 7.071068..]
     * console.log("Vector after rotation is: " + v.toString());
     */
    rotate(degrees: number): Vec2;
    /**
     * Returns the angle in degrees of the specified 2-dimensional vector.
     *
     * @returns {number} The angle in degrees of the specified 2-dimensional vector.
     * @example
     * const v = new pc.Vec2(6, 0);
     * const angle = v.angle();
     * // Outputs 90..
     * console.log("The angle of the vector is: " + angle);
     */
    angle(): number;
    /**
     * Returns the shortest Euler angle between two 2-dimensional vectors.
     *
     * @param {Vec2} rhs - The 2-dimensional vector to calculate angle to.
     * @returns {number} The shortest angle in degrees between two 2-dimensional vectors.
     * @example
     * const a = new pc.Vec2(0, 10); // up
     * const b = new pc.Vec2(1, -1); // down-right
     * const angle = a.angleTo(b);
     * // Outputs 135..
     * console.log("The angle between vectors a and b: " + angle);
     */
    angleTo(rhs: Vec2): number;
    /**
     * Each element is set to the largest integer less than or equal to its value.
     *
     * @param {Vec2} [src] - The vector to floor. If not set, the operation is done in place.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2(1.2, 3.9);
     * v.floor();
     * // v is now [1, 3]
     */
    floor(src?: Vec2): Vec2;
    /**
     * Each element is rounded up to the next largest integer.
     *
     * @param {Vec2} [src] - The vector to ceil. If not set, the operation is done in place.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2(1.2, 3.1);
     * v.ceil();
     * // v is now [2, 4]
     */
    ceil(src?: Vec2): Vec2;
    /**
     * Each element is rounded up or down to the nearest integer.
     *
     * @param {Vec2} [src] - The vector to round. If not set, the operation is done in place.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2(1.4, 3.6);
     * v.round();
     * // v is now [1, 4]
     */
    round(src?: Vec2): Vec2;
    /**
     * Each element is assigned a value from rhs parameter if it is smaller.
     *
     * @param {Vec2} rhs - The 2-dimensional vector used as the source of elements to compare to.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(5, 1);
     * const b = new pc.Vec2(2, 8);
     * a.min(b);
     * // a is now [2, 1]
     */
    min(rhs: Vec2): Vec2;
    /**
     * Each element is assigned a value from rhs parameter if it is larger.
     *
     * @param {Vec2} rhs - The 2-dimensional vector used as the source of elements to compare to.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(5, 1);
     * const b = new pc.Vec2(2, 8);
     * a.max(b);
     * // a is now [5, 8]
     */
    max(rhs: Vec2): Vec2;
    /**
     * Sets the specified 2-dimensional vector to the supplied numerical values.
     *
     * @param {number} x - The value to set on the first component of the vector.
     * @param {number} y - The value to set on the second component of the vector.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2();
     * v.set(5, 10);
     *
     * // Outputs 5, 10
     * console.log("The result of the vector set is: " + v.toString());
     */
    set(x: number, y: number): Vec2;
    /**
     * Subtracts a 2-dimensional vector from another in place.
     *
     * @param {Vec2} rhs - The vector to subtract from the specified vector.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(10, 10);
     * const b = new pc.Vec2(20, 20);
     *
     * a.sub(b);
     *
     * // Outputs [-10, -10]
     * console.log("The result of the subtraction is: " + a.toString());
     */
    sub(rhs: Vec2): Vec2;
    /**
     * Subtracts two 2-dimensional vectors from one another and returns the result.
     *
     * @param {Vec2} lhs - The first vector operand for the subtraction.
     * @param {Vec2} rhs - The second vector operand for the subtraction.
     * @returns {Vec2} Self for chaining.
     * @example
     * const a = new pc.Vec2(10, 10);
     * const b = new pc.Vec2(20, 20);
     * const r = new pc.Vec2();
     *
     * r.sub2(a, b);
     *
     * // Outputs [-10, -10]
     * console.log("The result of the subtraction is: " + r.toString());
     */
    sub2(lhs: Vec2, rhs: Vec2): Vec2;
    /**
     * Subtracts a number from each element of a vector.
     *
     * @param {number} scalar - The number to subtract.
     * @returns {Vec2} Self for chaining.
     * @example
     * const vec = new pc.Vec2(3, 4);
     *
     * vec.subScalar(2);
     *
     * // Outputs [1, 2]
     * console.log("The result of the subtraction is: " + vec.toString());
     */
    subScalar(scalar: number): Vec2;
    /**
     * Set the values of the vector from an array.
     *
     * @param {number[]|ArrayBufferView} arr - The array to set the vector values from.
     * @param {number} [offset] - The zero-based index at which to start copying elements from the
     * array. Default is 0.
     * @returns {Vec2} Self for chaining.
     * @example
     * const v = new pc.Vec2();
     * v.fromArray([20, 10]);
     * // v is set to [20, 10]
     */
    fromArray(arr: number[] | ArrayBufferView, offset?: number): Vec2;
    /**
     * Converts the vector to string form.
     *
     * @returns {string} The vector in string form.
     * @example
     * const v = new pc.Vec2(20, 10);
     * // Outputs [20, 10]
     * console.log(v.toString());
     */
    toString(): string;
    /**
     * @overload
     * @param {number[]} [arr] - The array to populate with the vector's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {number[]} The vector as an array.
     */
    toArray(arr?: number[], offset?: number): number[];
    /**
     * @overload
     * @param {ArrayBufferView} arr - The array to populate with the vector's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {ArrayBufferView} The vector as an array.
     */
    toArray(arr: ArrayBufferView, offset?: number): ArrayBufferView;
}

/**
 * The scope for variables.
 *
 * @category Graphics
 */
declare class ScopeSpace {
    /**
     * Create a new ScopeSpace instance.
     *
     * @param {string} name - The scope name.
     */
    constructor(name: string);
    /**
     * The scope name.
     *
     * @type {string}
     */
    name: string;
    variables: Map<any, any>;
    /**
     * Get (or create, if it doesn't already exist) a variable in the scope.
     *
     * @param {string} name - The variable name.
     * @returns {ScopeId} The variable instance.
     */
    resolve(name: string): ScopeId;
    /**
     * Clears value for any uniform with matching value (used to remove deleted textures).
     *
     * @param {*} value - The value to clear.
     * @ignore
     */
    removeValue(value: any): void;
}

/**
 * A class storing description of an individual uniform, stored inside a uniform buffer.
 *
 * @category Graphics
 */
declare class UniformFormat {
    /**
     * Create a new UniformFormat instance.
     *
     * @param {string} name - The name of the uniform.
     * @param {number} type - The type of the uniform. One of the UNIFORMTYPE_*** constants.
     * @param {number} count - The number of elements in the array. Defaults to 0, which represents
     * a single element (not an array).
     */
    constructor(name: string, type: number, count?: number);
    /**
     * @type {string}
     * @ignore
     */
    name: string;
    /**
     * @type {number}
     * @ignore
     */
    type: number;
    /**
     * @type {number}
     * @ignore
     */
    byteSize: number;
    /**
     * Index of the uniform in an array of 32bit values (Float32Array and similar)
     *
     * @type {number}
     * @ignore
     */
    offset: number;
    /**
     * @type {ScopeId}
     * @ignore
     */
    scopeId: ScopeId;
    /**
     * Count of elements for arrays, otherwise 0.
     *
     * @type {number}
     * @ignore
     */
    count: number;
    /**
     * Number of components in each element (e.g. vec2 has 2 components, mat4 has 16 components)
     *
     * @type {number}
     * @ignore
     */
    numComponents: number;
    /**
     * True if this is an array of elements (i.e. count > 0)
     *
     * @type {boolean}
     */
    get isArrayType(): boolean;
    shortName: string;
    updateType: number;
    invalid: boolean;
    calculateOffset(offset: any): void;
}
/**
 * A descriptor that defines the layout of of data inside the uniform buffer.
 *
 * @category Graphics
 */
declare class UniformBufferFormat {
    /**
     * Create a new UniformBufferFormat instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device.
     * @param {UniformFormat[]} uniforms - An array of uniforms to be stored in the buffer
     */
    constructor(graphicsDevice: GraphicsDevice, uniforms: UniformFormat[]);
    /**
     * @type {number}
     * @ignore
     */
    byteSize: number;
    /**
     * @type {Map<string,UniformFormat>}
     * @ignore
     */
    map: Map<string, UniformFormat>;
    scope: ScopeSpace;
    /** @type {UniformFormat[]} */
    uniforms: UniformFormat[];
    /**
     * Returns format of a uniform with specified name. Returns undefined if the uniform is not found.
     *
     * @param {string} name - The name of the uniform.
     * @returns {UniformFormat|undefined} - The format of the uniform.
     */
    get(name: string): UniformFormat | undefined;
}

/**
 * A class to describe the format of the uniform buffer for {@link BindGroupFormat}.
 *
 * @category Graphics
 */
declare class BindUniformBufferFormat extends BindBaseFormat {
}
/**
 * A class to describe the format of the texture for {@link BindGroupFormat}.
 *
 * @category Graphics
 */
declare class BindTextureFormat extends BindBaseFormat {
    /**
     * Create a new instance.
     *
     * @param {string} name - The name of the storage buffer.
     * @param {number} visibility - A bit-flag that specifies the shader stages in which the storage
     * buffer is visible. Can be:
     *
     * - {@link SHADERSTAGE_VERTEX}
     * - {@link SHADERSTAGE_FRAGMENT}
     * - {@link SHADERSTAGE_COMPUTE}
     *
     * @param {string} [textureDimension] - The dimension of the texture. Defaults to
     * {@link TEXTUREDIMENSION_2D}. Can be:
     *
     * - {@link TEXTUREDIMENSION_1D}
     * - {@link TEXTUREDIMENSION_2D}
     * - {@link TEXTUREDIMENSION_2D_ARRAY}
     * - {@link TEXTUREDIMENSION_CUBE}
     * - {@link TEXTUREDIMENSION_CUBE_ARRAY}
     * - {@link TEXTUREDIMENSION_3D}
     *
     * @param {number} [sampleType] - The type of the texture samples. Defaults to
     * {@link SAMPLETYPE_FLOAT}. Can be:
     *
     * - {@link SAMPLETYPE_FLOAT}
     * - {@link SAMPLETYPE_UNFILTERABLE_FLOAT}
     * - {@link SAMPLETYPE_DEPTH}
     * - {@link SAMPLETYPE_INT}
     * - {@link SAMPLETYPE_UINT}
     *
     * @param {boolean} [hasSampler] - True if the sampler for the texture is needed. Note that if the
     * sampler is used, it will take up an additional slot, directly following the texture slot.
     * Defaults to true.
     * @param {string|null} [samplerName] - Optional name of the sampler. Defaults to null.
     */
    constructor(name: string, visibility: number, textureDimension?: string, sampleType?: number, hasSampler?: boolean, samplerName?: string | null);
    textureDimension: string;
    sampleType: number;
    hasSampler: boolean;
    samplerName: string;
}
/**
 * BindGroupFormat is a data structure that defines the layout of resources (buffers, textures,
 * samplers) used by rendering or compute shaders. It describes the binding points for each
 * resource type, and the visibility of these resources in the shader stages.
 * Currently this class is only used on WebGPU platform to specify the input and output resources
 * for vertex, fragment and compute shaders written in {@link SHADERLANGUAGE_WGSL} language.
 *
 * @category Graphics
 */
declare class BindGroupFormat {
    /**
     * Create a new instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this vertex format.
     * @param {(BindTextureFormat|BindStorageTextureFormat|BindUniformBufferFormat|BindStorageBufferFormat)[]} formats -
     * An array of bind formats. Note that each entry in the array uses up one slot. The exception
     * is a texture format that has a sampler, which uses up two slots. The slots are allocated
     * sequentially, starting from 0.
     */
    constructor(graphicsDevice: GraphicsDevice, formats: (BindTextureFormat | BindStorageTextureFormat | BindUniformBufferFormat | BindStorageBufferFormat)[]);
    /**
     * @type {BindUniformBufferFormat[]}
     * @private
     */
    private uniformBufferFormats;
    /**
     * @type {BindTextureFormat[]}
     * @private
     */
    private textureFormats;
    /**
     * @type {BindStorageTextureFormat[]}
     * @private
     */
    private storageTextureFormats;
    /**
     * @type {BindStorageBufferFormat[]}
     * @private
     */
    private storageBufferFormats;
    id: number;
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {Map<string, number>} */
    bufferFormatsMap: Map<string, number>;
    /** @type {Map<string, number>} */
    textureFormatsMap: Map<string, number>;
    /** @type {Map<string, number>} */
    storageTextureFormatsMap: Map<string, number>;
    /** @type {Map<string, number>} */
    storageBufferFormatsMap: Map<string, number>;
    impl: any;
    /**
     * Frees resources associated with this bind group.
     */
    destroy(): void;
    /**
     * Returns format of texture with specified name.
     *
     * @param {string} name - The name of the texture slot.
     * @returns {BindTextureFormat|null} - The format.
     * @ignore
     */
    getTexture(name: string): BindTextureFormat | null;
    /**
     * Returns format of storage texture with specified name.
     *
     * @param {string} name - The name of the texture slot.
     * @returns {BindStorageTextureFormat|null} - The format.
     * @ignore
     */
    getStorageTexture(name: string): BindStorageTextureFormat | null;
    loseContext(): void;
}
/**
 * A class to describe the format of the storage texture for {@link BindGroupFormat}. Storage
 * texture is a texture created with the storage flag set to true, which allows it to be used as an
 * output of a compute shader.
 *
 * Note: At the current time, storage textures are only supported in compute shaders in a
 * write-only mode.
 *
 * @category Graphics
 */
declare class BindStorageTextureFormat extends BindBaseFormat {
    /**
     * Create a new instance.
     *
     * @param {string} name - The name of the storage buffer.
     * @param {number} [format] - The pixel format of the texture. Note that not all formats can be
     * used. Defaults to {@link PIXELFORMAT_RGBA8}.
     * @param {string} [textureDimension] - The dimension of the texture. Defaults to
     * {@link TEXTUREDIMENSION_2D}. Can be:
     *
     * - {@link TEXTUREDIMENSION_1D}
     * - {@link TEXTUREDIMENSION_2D}
     * - {@link TEXTUREDIMENSION_2D_ARRAY}
     * - {@link TEXTUREDIMENSION_3D}
     *
     * @param {boolean} [write] - Whether the storage texture is writeable. Defaults to true.
     * @param {boolean} [read] - Whether the storage texture is readable. Defaults to false. Note
     * that storage texture reads are only supported if
     * {@link GraphicsDevice#supportsStorageTextureRead} is true. Also note that only a subset of
     * pixel formats can be used for storage texture reads - as an example, PIXELFORMAT_RGBA8 is not
     * compatible, but PIXELFORMAT_R32U is.
     */
    constructor(name: string, format?: number, textureDimension?: string, write?: boolean, read?: boolean);
    format: number;
    textureDimension: string;
    write: boolean;
    read: boolean;
}
/**
 * A class to describe the format of the storage buffer for {@link BindGroupFormat}.
 *
 * @category Graphics
 */
declare class BindStorageBufferFormat extends BindBaseFormat {
    /**
     * Create a new instance.
     *
     * @param {string} name - The name of the storage buffer.
     * @param {number} visibility - A bit-flag that specifies the shader stages in which the storage
     * buffer is visible. Can be:
     *
     * - {@link SHADERSTAGE_VERTEX}
     * - {@link SHADERSTAGE_FRAGMENT}
     * - {@link SHADERSTAGE_COMPUTE}
     *
     * @param {boolean} [readOnly] - Whether the storage buffer is read-only, or read-write. Defaults
     * to false. This has to be true for the storage buffer used in the vertex shader.
     */
    constructor(name: string, visibility: number, readOnly?: boolean);
    /**
     * Format, extracted from vertex and fragment shader.
     *
     * @type {string}
     * @ignore
     */
    format: string;
    readOnly: boolean;
}
/**
 * A base class to describe the format of the resource for {@link BindGroupFormat}.
 *
 * @category Graphics
 */
declare class BindBaseFormat {
    /**
     * Create a new instance.
     *
     * @param {string} name - The name of the resource.
     * @param {number} visibility - A bit-flag that specifies the shader stages in which the resource
     * is visible. Can be:
     *
     * - {@link SHADERSTAGE_VERTEX}
     * - {@link SHADERSTAGE_FRAGMENT}
     * - {@link SHADERSTAGE_COMPUTE}
     */
    constructor(name: string, visibility: number);
    /**
     * @type {number}
     * @ignore
     */
    slot: number;
    /**
     * @type {ScopeId|null}
     * @ignore
     */
    scopeId: ScopeId | null;
    /** @type {string} */
    name: string;
    visibility: number;
}

/**
 * A shader is a program that is responsible for rendering graphical primitives on a device's
 * graphics processor. The shader is generated from a shader definition. This shader definition
 * specifies the code for processing vertices and fragments processed by the GPU. The language of
 * the code is GLSL (or more specifically ESSL, the OpenGL ES Shading Language). The shader
 * definition also describes how the PlayCanvas engine should map vertex buffer elements onto the
 * attributes specified in the vertex shader code.
 *
 * @category Graphics
 */
declare class Shader {
    /**
     * Creates a new Shader instance.
     *
     * Consider {@link ShaderUtils#createShader} as a simpler and more powerful way to create
     * a shader.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this shader.
     * @param {object} definition - The shader definition from which to build the shader.
     * @param {string} [definition.name] - The name of the shader.
     * @param {Object<string, string>} [definition.attributes] - Object detailing the mapping of
     * vertex shader attribute names to semantics SEMANTIC_*. This enables the engine to match
     * vertex buffer data as inputs to the shader. When not specified, rendering without vertex
     * buffer is assumed.
     * @param {string[]} [definition.feedbackVaryings] - A list of shader output variable
     * names that will be captured when using transform feedback. This setting is only effective
     * if the useTransformFeedback property is enabled.
     * @param {string} [definition.vshader] - Vertex shader source (GLSL code). Optional when
     * compute shader is specified.
     * @param {string} [definition.fshader] - Fragment shader source (GLSL code). Optional when
     * useTransformFeedback or compute shader is specified.
     * @param {string} [definition.cshader] - Compute shader source (WGSL code). Only supported on
     * WebGPU platform.
     * @param {string} [definition.computeEntryPoint] - The entry point function name for the compute
     * shader. Defaults to 'main'.
     * @param {Map<string, string>} [definition.vincludes] - A map containing key-value pairs of
     * include names and their content. These are used for resolving #include directives in the
     * vertex shader source.
     * @param {Map<string, string>} [definition.fincludes] - A map containing key-value pairs
     * of include names and their content. These are used for resolving #include directives in the
     * fragment shader source.
     * @param {Map<string, string>} [definition.cincludes] - A map containing key-value pairs
     * of include names and their content. These are used for resolving #include directives in the
     * compute shader source.
     * @param {Map<string, string>} [definition.cdefines] - A map containing key-value pairs of
     * define names and their values. These are used for resolving defines in the compute shader.
     * @param {boolean} [definition.useTransformFeedback] - Specifies that this shader outputs
     * post-VS data to a buffer.
     * @param {string | string[]} [definition.fragmentOutputTypes] - Fragment shader output types,
     * which default to vec4. Passing a string will set the output type for all color attachments.
     * Passing an array will set the output type for each color attachment.
     * @param {string} [definition.shaderLanguage] - Specifies the shader language of vertex and
     * fragment shaders. Defaults to {@link SHADERLANGUAGE_GLSL}.
     * @example
     * // Create a shader that renders primitives with a solid red color
     *
     * // Vertex shader
     * const vshader = `
     * attribute vec3 aPosition;
     *
     * void main(void) {
     *     gl_Position = vec4(aPosition, 1.0);
     * }
     * `;
     *
     * // Fragment shader
     * const fshader = `
     * precision ${graphicsDevice.precision} float;
     *
     * void main(void) {
     *     gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
     * }
     * `;
     *
     * const shaderDefinition = {
     *     attributes: {
     *         aPosition: pc.SEMANTIC_POSITION
     *     },
     *     vshader,
     *     fshader
     * };
     *
     * const shader = new pc.Shader(graphicsDevice, shaderDefinition);
     */
    constructor(graphicsDevice: GraphicsDevice, definition: {
        name?: string;
        attributes?: {
            [x: string]: string;
        };
        feedbackVaryings?: string[];
        vshader?: string;
        fshader?: string;
        cshader?: string;
        computeEntryPoint?: string;
        vincludes?: Map<string, string>;
        fincludes?: Map<string, string>;
        cincludes?: Map<string, string>;
        cdefines?: Map<string, string>;
        useTransformFeedback?: boolean;
        fragmentOutputTypes?: string | string[];
        shaderLanguage?: string;
    });
    /**
     * Format of the uniform buffer for mesh bind group.
     *
     * @type {UniformBufferFormat}
     * @ignore
     */
    meshUniformBufferFormat: UniformBufferFormat;
    /**
     * Format of the bind group for the mesh bind group.
     *
     * @type {BindGroupFormat}
     * @ignore
     */
    meshBindGroupFormat: BindGroupFormat;
    /**
     * The attributes that this shader code uses. The location is the key, the value is the name.
     * These attributes are queried / extracted from the final shader.
     *
     * @type {Map<number, string>}
     * @ignore
     */
    attributes: Map<number, string>;
    id: number;
    device: GraphicsDevice;
    definition: {
        name?: string;
        attributes?: {
            [x: string]: string;
        };
        feedbackVaryings?: string[];
        vshader?: string;
        fshader?: string;
        cshader?: string;
        computeEntryPoint?: string;
        vincludes?: Map<string, string>;
        fincludes?: Map<string, string>;
        cincludes?: Map<string, string>;
        cdefines?: Map<string, string>;
        useTransformFeedback?: boolean;
        fragmentOutputTypes?: string | string[];
        shaderLanguage?: string;
    };
    name: string;
    cUnmodified: string;
    vUnmodified: string;
    fUnmodified: string;
    failed: boolean;
    impl: any;
    /**
     * Initialize a shader back to its default state.
     *
     * @private
     */
    private init;
    ready: boolean;
    /** @ignore */
    get label(): string;
    /**
     * Frees resources associated with this shader.
     */
    destroy(): void;
    /**
     * Called when the WebGL context was lost. It releases all context related resources.
     *
     * @ignore
     */
    loseContext(): void;
    /** @ignore */
    restoreContext(): void;
}

/**
 * A vertex format is a descriptor that defines the layout of vertex data inside a
 * {@link VertexBuffer}.
 *
 * @property {object[]} elements The vertex attribute elements.
 * @property {string} elements[].name The meaning of the vertex element. This is used to link the
 * vertex data to a shader input. Can be:
 *
 * - {@link SEMANTIC_POSITION}
 * - {@link SEMANTIC_NORMAL}
 * - {@link SEMANTIC_TANGENT}
 * - {@link SEMANTIC_BLENDWEIGHT}
 * - {@link SEMANTIC_BLENDINDICES}
 * - {@link SEMANTIC_COLOR}
 * - {@link SEMANTIC_TEXCOORD0}
 * - {@link SEMANTIC_TEXCOORD1}
 * - {@link SEMANTIC_TEXCOORD2}
 * - {@link SEMANTIC_TEXCOORD3}
 * - {@link SEMANTIC_TEXCOORD4}
 * - {@link SEMANTIC_TEXCOORD5}
 * - {@link SEMANTIC_TEXCOORD6}
 * - {@link SEMANTIC_TEXCOORD7}
 *
 * If vertex data has a meaning other that one of those listed above, use the user-defined
 * semantics: {@link SEMANTIC_ATTR0} to {@link SEMANTIC_ATTR15}.
 * @property {number} elements[].numComponents The number of components of the vertex attribute.
 * Can be 1, 2, 3 or 4.
 * @property {number} elements[].dataType The data type of the attribute. Can be:
 *
 * - {@link TYPE_INT8}
 * - {@link TYPE_UINT8}
 * - {@link TYPE_INT16}
 * - {@link TYPE_UINT16}
 * - {@link TYPE_INT32}
 * - {@link TYPE_UINT32}
 * - {@link TYPE_FLOAT32}
 * - {@link TYPE_FLOAT16}
 * @property {boolean} elements[].normalize If true, vertex attribute data will be mapped from a 0
 * to 255 range down to 0 to 1 when fed to a shader. If false, vertex attribute data is left
 * unchanged. If this property is unspecified, false is assumed.
 * @property {number} elements[].offset The number of initial bytes at the start of a vertex that
 * are not relevant to this attribute.
 * @property {number} elements[].stride The number of total bytes that are between the start of one
 * vertex, and the start of the next.
 * @property {number} elements[].size The size of the attribute in bytes.
 * @category Graphics
 */
declare class VertexFormat {
    /**
     * The {@link VertexFormat} used to store matrices of type {@link Mat4} for hardware instancing.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to create this vertex
     * format.
     * @returns {VertexFormat} The default instancing vertex format.
     */
    static getDefaultInstancingFormat(graphicsDevice: GraphicsDevice): VertexFormat;
    static isElementValid(graphicsDevice: any, elementDesc: any): boolean;
    /**
     * @typedef {object} AttributeDescription
     * @property {string} semantic - The meaning of the vertex element. This is used to
     * link the vertex data to a shader input. Can be:
     *
     * - {@link SEMANTIC_POSITION}
     * - {@link SEMANTIC_NORMAL}
     * - {@link SEMANTIC_TANGENT}
     * - {@link SEMANTIC_BLENDWEIGHT}
     * - {@link SEMANTIC_BLENDINDICES}
     * - {@link SEMANTIC_COLOR}
     * - {@link SEMANTIC_TEXCOORD0}
     * - {@link SEMANTIC_TEXCOORD1}
     * - {@link SEMANTIC_TEXCOORD2}
     * - {@link SEMANTIC_TEXCOORD3}
     * - {@link SEMANTIC_TEXCOORD4}
     * - {@link SEMANTIC_TEXCOORD5}
     * - {@link SEMANTIC_TEXCOORD6}
     * - {@link SEMANTIC_TEXCOORD7}
     *
     * If vertex data has a meaning other that one of those listed above, use the user-defined
     * semantics: {@link SEMANTIC_ATTR0} to {@link SEMANTIC_ATTR15}.
     * @property {number} components - The number of components of the vertex attribute.
     * Can be 1, 2, 3 or 4.
     * @property {number} type - The data type of the attribute. Can be:
     *
     * - {@link TYPE_INT8}
     * - {@link TYPE_UINT8}
     * - {@link TYPE_INT16}
     * - {@link TYPE_UINT16}
     * - {@link TYPE_INT32}
     * - {@link TYPE_UINT32}
     * - {@link TYPE_FLOAT16}
     * - {@link TYPE_FLOAT32}
     *
     * @property {boolean} [normalize] - If true, vertex attribute data will be mapped
     * from a 0 to 255 range down to 0 to 1 when fed to a shader. If false, vertex attribute data
     * is left unchanged. If this property is unspecified, false is assumed. This property is
     * ignored when asInt is true.
     * @property {boolean} [asInt] - If true, vertex attribute data will be accessible
     * as integer numbers in shader code. Defaults to false, which means that vertex attribute data
     * will be accessible as floating point numbers. Can be only used with INT and UINT data types.
     */
    /**
     * Create a new VertexFormat instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this vertex
     * format.
     * @param {AttributeDescription[]} description - An array of vertex attribute descriptions.
     * @param {number} [vertexCount] - When specified, vertex format will be set up for
     * non-interleaved format with a specified number of vertices. (example: PPPPNNNNCCCC), where
     * arrays of individual attributes will be stored one right after the other (subject to
     * alignment requirements). Note that in this case, the format depends on the number of
     * vertices, and needs to change when the number of vertices changes. When not specified,
     * vertex format will be interleaved. (example: PNCPNCPNCPNC).
     * @example
     * // Specify 3-component positions (x, y, z)
     * const vertexFormat = new pc.VertexFormat(graphicsDevice, [
     *     { semantic: pc.SEMANTIC_POSITION, components: 3, type: pc.TYPE_FLOAT32 }
     * ]);
     * @example
     * // Specify 2-component positions (x, y), a texture coordinate (u, v) and a vertex color (r, g, b, a)
     * const vertexFormat = new pc.VertexFormat(graphicsDevice, [
     *     { semantic: pc.SEMANTIC_POSITION, components: 2, type: pc.TYPE_FLOAT32 },
     *     { semantic: pc.SEMANTIC_TEXCOORD0, components: 2, type: pc.TYPE_FLOAT32 },
     *     { semantic: pc.SEMANTIC_COLOR, components: 4, type: pc.TYPE_UINT8, normalize: true }
     * ]);
     */
    constructor(graphicsDevice: GraphicsDevice, description: {
        /**
         * - The meaning of the vertex element. This is used to
         * link the vertex data to a shader input. Can be:
         *
         * - {@link SEMANTIC_POSITION}
         * - {@link SEMANTIC_NORMAL}
         * - {@link SEMANTIC_TANGENT}
         * - {@link SEMANTIC_BLENDWEIGHT}
         * - {@link SEMANTIC_BLENDINDICES}
         * - {@link SEMANTIC_COLOR}
         * - {@link SEMANTIC_TEXCOORD0}
         * - {@link SEMANTIC_TEXCOORD1}
         * - {@link SEMANTIC_TEXCOORD2}
         * - {@link SEMANTIC_TEXCOORD3}
         * - {@link SEMANTIC_TEXCOORD4}
         * - {@link SEMANTIC_TEXCOORD5}
         * - {@link SEMANTIC_TEXCOORD6}
         * - {@link SEMANTIC_TEXCOORD7}
         *
         * If vertex data has a meaning other that one of those listed above, use the user-defined
         * semantics: {@link SEMANTIC_ATTR0} to {@link SEMANTIC_ATTR15}.
         */
        semantic: string;
        /**
         * - The number of components of the vertex attribute.
         * Can be 1, 2, 3 or 4.
         */
        components: number;
        /**
         * - The data type of the attribute. Can be:
         *
         * - {@link TYPE_INT8}
         * - {@link TYPE_UINT8}
         * - {@link TYPE_INT16}
         * - {@link TYPE_UINT16}
         * - {@link TYPE_INT32}
         * - {@link TYPE_UINT32}
         * - {@link TYPE_FLOAT16}
         * - {@link TYPE_FLOAT32}
         */
        type: number;
        /**
         * - If true, vertex attribute data will be mapped
         * from a 0 to 255 range down to 0 to 1 when fed to a shader. If false, vertex attribute data
         * is left unchanged. If this property is unspecified, false is assumed. This property is
         * ignored when asInt is true.
         */
        normalize?: boolean;
        /**
         * - If true, vertex attribute data will be accessible
         * as integer numbers in shader code. Defaults to false, which means that vertex attribute data
         * will be accessible as floating point numbers. Can be only used with INT and UINT data types.
         */
        asInt?: boolean;
    }[], vertexCount?: number);
    device: GraphicsDevice;
    _elements: {
        name: string;
        offset: any;
        stride: any;
        dataType: number;
        numComponents: number;
        normalize: boolean;
        size: number;
        asInt: boolean;
    }[];
    hasUv0: boolean;
    hasUv1: boolean;
    hasColor: boolean;
    hasTangents: boolean;
    verticesByteSize: number;
    vertexCount: number;
    interleaved: boolean;
    instancing: boolean;
    size: number;
    get elements(): {
        name: string;
        offset: any;
        stride: any;
        dataType: number;
        numComponents: number;
        normalize: boolean;
        size: number;
        asInt: boolean;
    }[];
    /**
     * Applies any changes made to the VertexFormat's properties.
     *
     * @private
     */
    private update;
    /**
     * Evaluates hash values for the format allowing fast compare of batching / rendering compatibility.
     *
     * @private
     */
    private _evaluateHash;
    batchingHash: number;
    shaderProcessingHashString: string;
    renderingHashString: string;
    renderingHash: number;
}

/**
 * A vertex buffer is the mechanism via which the application specifies vertex data to the graphics
 * hardware.
 *
 * @category Graphics
 */
declare class VertexBuffer {
    /**
     * Create a new VertexBuffer instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this vertex
     * buffer.
     * @param {VertexFormat} format - The vertex format of this vertex buffer.
     * @param {number} numVertices - The number of vertices that this vertex buffer will hold.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {number} [options.usage] - The usage type of the vertex buffer (see BUFFER_*).
     * Defaults to BUFFER_STATIC.
     * @param {ArrayBuffer} [options.data] - Initial data.
     * @param {boolean} [options.storage] - Defines if the vertex buffer can be used as a storage
     * buffer by a compute shader. Defaults to false. Only supported on WebGPU.
     */
    constructor(graphicsDevice: GraphicsDevice, format: VertexFormat, numVertices: number, options?: {
        usage?: number;
        data?: ArrayBuffer;
        storage?: boolean;
    }, ...args: any[]);
    usage: number;
    device: GraphicsDevice;
    format: VertexFormat;
    numVertices: number;
    id: number;
    impl: any;
    numBytes: number;
    storage: ArrayBuffer;
    /**
     * Frees resources associated with this vertex buffer.
     */
    destroy(): void;
    adjustVramSizeTracking(vram: any, size: any): void;
    /**
     * Called when the rendering context was lost. It releases all context related resources.
     *
     * @ignore
     */
    loseContext(): void;
    /**
     * Returns the data format of the specified vertex buffer.
     *
     * @returns {VertexFormat} The data format of the specified vertex buffer.
     */
    getFormat(): VertexFormat;
    /**
     * Returns the usage type of the specified vertex buffer. This indicates whether the buffer can
     * be modified once and used many times {@link BUFFER_STATIC}, modified repeatedly and used
     * many times {@link BUFFER_DYNAMIC} or modified once and used at most a few times
     * {@link BUFFER_STREAM}.
     *
     * @returns {number} The usage type of the vertex buffer (see BUFFER_*).
     */
    getUsage(): number;
    /**
     * Returns the number of vertices stored in the specified vertex buffer.
     *
     * @returns {number} The number of vertices stored in the vertex buffer.
     */
    getNumVertices(): number;
    /**
     * Returns a mapped memory block representing the content of the vertex buffer.
     *
     * @returns {ArrayBuffer} An array containing the byte data stored in the vertex buffer.
     */
    lock(): ArrayBuffer;
    /**
     * Notifies the graphics engine that the client side copy of the vertex buffer's memory can be
     * returned to the control of the graphics driver.
     */
    unlock(): void;
    /**
     * Copies data into vertex buffer's memory.
     *
     * @param {ArrayBuffer} [data] - Source data to copy.
     * @returns {boolean} True if function finished successfully, false otherwise.
     */
    setData(data?: ArrayBuffer): boolean;
}

/**
 * BlendState is a descriptor that defines how output of fragment shader is written and blended
 * into render target. A blend state can be set on a material using {@link Material#blendState},
 * or in some cases on the graphics device using {@link GraphicsDevice#setBlendState}.
 *
 * For the best performance, do not modify blend state after it has been created, but create
 * multiple blend states and assign them to the material or graphics device as needed.
 *
 * @category Graphics
 */
declare class BlendState {
    /**
     * A blend state that has blending disabled and writes to all color channels.
     *
     * @type {BlendState}
     * @readonly
     */
    static readonly NOBLEND: BlendState;
    /**
     * A blend state that does not write to color channels.
     *
     * @type {BlendState}
     * @readonly
     */
    static readonly NOWRITE: BlendState;
    /**
     * A blend state that does simple translucency using alpha channel.
     *
     * @type {BlendState}
     * @readonly
     */
    static readonly ALPHABLEND: BlendState;
    /**
     * A blend state that does simple additive blending.
     *
     * @type {BlendState}
     * @readonly
     */
    static readonly ADDBLEND: BlendState;
    /**
     * Create a new BlendState instance.
     *
     * All factor parameters can take the following values:
     *
     * - {@link BLENDMODE_ZERO}
     * - {@link BLENDMODE_ONE}
     * - {@link BLENDMODE_SRC_COLOR}
     * - {@link BLENDMODE_ONE_MINUS_SRC_COLOR}
     * - {@link BLENDMODE_DST_COLOR}
     * - {@link BLENDMODE_ONE_MINUS_DST_COLOR}
     * - {@link BLENDMODE_SRC_ALPHA}
     * - {@link BLENDMODE_SRC_ALPHA_SATURATE}
     * - {@link BLENDMODE_ONE_MINUS_SRC_ALPHA}
     * - {@link BLENDMODE_DST_ALPHA}
     * - {@link BLENDMODE_ONE_MINUS_DST_ALPHA}
     * - {@link BLENDMODE_CONSTANT}
     * - {@link BLENDMODE_ONE_MINUS_CONSTANT}
     *
     * All op parameters can take the following values:
     *
     * - {@link BLENDEQUATION_ADD}
     * - {@link BLENDEQUATION_SUBTRACT}
     * - {@link BLENDEQUATION_REVERSE_SUBTRACT}
     * - {@link BLENDEQUATION_MIN}
     * - {@link BLENDEQUATION_MAX}
     *
     * @param {boolean} [blend] - Enables or disables blending. Defaults to false.
     * @param {number} [colorOp] - Configures color blending operation. Defaults to
     * {@link BLENDEQUATION_ADD}.
     * @param {number} [colorSrcFactor] - Configures source color blending factor. Defaults to
     * {@link BLENDMODE_ONE}.
     * @param {number} [colorDstFactor] - Configures destination color blending factor. Defaults to
     * {@link BLENDMODE_ZERO}.
     * @param {number} [alphaOp] - Configures alpha blending operation. Defaults to
     * {@link BLENDEQUATION_ADD}.
     * @param {number} [alphaSrcFactor] - Configures source alpha blending factor. Defaults to
     * {@link BLENDMODE_ONE}.
     * @param {number} [alphaDstFactor] - Configures destination alpha blending factor. Defaults to
     * {@link BLENDMODE_ZERO}.
     * @param {boolean} [redWrite] - True to enable writing of the red channel and false otherwise.
     * Defaults to true.
     * @param {boolean} [greenWrite] - True to enable writing of the green channel and false
     * otherwise. Defaults to true.
     * @param {boolean} [blueWrite] - True to enable writing of the blue channel and false otherwise.
     * Defaults to true.
     * @param {boolean} [alphaWrite] - True to enable writing of the alpha channel and false
     * otherwise. Defaults to true.
     */
    constructor(blend?: boolean, colorOp?: number, colorSrcFactor?: number, colorDstFactor?: number, alphaOp?: number, alphaSrcFactor?: number, alphaDstFactor?: number, redWrite?: boolean, greenWrite?: boolean, blueWrite?: boolean, alphaWrite?: boolean);
    /**
     * Bit field representing the blend state for render target 0.
     *
     * @private
     */
    private target0;
    /**
     * Sets whether blending is enabled.
     *
     * @type {boolean}
     */
    set blend(value: boolean);
    /**
     * Gets whether blending is enabled.
     *
     * @type {boolean}
     */
    get blend(): boolean;
    setColorBlend(op: any, srcFactor: any, dstFactor: any): void;
    setAlphaBlend(op: any, srcFactor: any, dstFactor: any): void;
    setColorWrite(redWrite: any, greenWrite: any, blueWrite: any, alphaWrite: any): void;
    set redWrite(value: boolean);
    get redWrite(): boolean;
    set greenWrite(value: boolean);
    get greenWrite(): boolean;
    set blueWrite(value: boolean);
    get blueWrite(): boolean;
    set alphaWrite(value: boolean);
    get alphaWrite(): boolean;
    get colorOp(): number;
    get colorSrcFactor(): number;
    get colorDstFactor(): number;
    get alphaOp(): number;
    get alphaSrcFactor(): number;
    get alphaDstFactor(): number;
    get allWrite(): number;
    /**
     * Copies the contents of a source blend state to this blend state.
     *
     * @param {BlendState} rhs - A blend state to copy from.
     * @returns {BlendState} Self for chaining.
     */
    copy(rhs: BlendState): BlendState;
    /**
     * Returns an identical copy of the specified blend state.
     *
     * @returns {this} The result of the cloning.
     */
    clone(): this;
    get key(): number;
    /**
     * Reports whether two BlendStates are equal.
     *
     * @param {BlendState} rhs - The blend state to compare to.
     * @returns {boolean} True if the blend states are equal and false otherwise.
     */
    equals(rhs: BlendState): boolean;
}

/**
 * DepthState is a descriptor that defines how the depth value of the fragment is used by the
 * rendering pipeline. A depth state can be set on a material using {@link Material#depthState},
 * or in some cases on the graphics device using {@link GraphicsDevice#setDepthState}.
 *
 * For the best performance, do not modify depth state after it has been created, but create
 * multiple depth states and assign them to the material or graphics device as needed.
 *
 * @category Graphics
 */
declare class DepthState {
    /**
     * A default depth state that has the depth testing function set to {@link FUNC_LESSEQUAL} and
     * depth writes enabled.
     *
     * @type {DepthState}
     * @readonly
     */
    static readonly DEFAULT: DepthState;
    /**
     * A depth state that always passes the fragment but does not write depth to the depth buffer.
     *
     * @type {DepthState}
     * @readonly
     */
    static readonly NODEPTH: DepthState;
    /**
     * A depth state that always passes the fragment and writes depth to the depth buffer.
     *
     * @type {DepthState}
     * @readonly
     */
    static readonly WRITEDEPTH: DepthState;
    /**
     * Create a new Depth State instance.
     *
     * @param {number} func - Controls how the depth of the fragment is compared against the
     * current depth contained in the depth buffer. See {@link DepthState#func} for details.
     * Defaults to {@link FUNC_LESSEQUAL}.
     * @param {boolean} write - If true, depth values are written to the depth buffer of the
     * currently active render target. Defaults to true.
     */
    constructor(func?: number, write?: boolean);
    /**
     * Bit field representing the depth state.
     *
     * @private
     */
    private data;
    _depthBias: number;
    _depthBiasSlope: number;
    /**
     * A unique number representing the depth state. You can use this number to quickly compare
     * two depth states for equality. The key is always maintained valid without a dirty flag,
     * to avoid condition check at runtime, considering these change rarely.
     *
     * @type {number}
     */
    key: number;
    /**
     * Sets the depth testing function. Controls how the depth of the fragment is compared against
     * the current depth contained in the depth buffer. Can be:
     *
     * - {@link FUNC_NEVER}: don't draw
     * - {@link FUNC_LESS}: draw if new depth < depth buffer
     * - {@link FUNC_EQUAL}: draw if new depth == depth buffer
     * - {@link FUNC_LESSEQUAL}: draw if new depth <= depth buffer
     * - {@link FUNC_GREATER}: draw if new depth > depth buffer
     * - {@link FUNC_NOTEQUAL}: draw if new depth != depth buffer
     * - {@link FUNC_GREATEREQUAL}: draw if new depth >= depth buffer
     * - {@link FUNC_ALWAYS}: always draw
     *
     * @type {number}
     */
    set func(value: number);
    /**
     * Gets the depth testing function.
     *
     * @type {number}
     */
    get func(): number;
    /**
     * Sets whether depth writing is performed. If true, shader write a depth value to the depth
     * buffer of the currently active render target. If false, no depth value is written.
     *
     * @type {boolean}
     */
    set write(value: boolean);
    /**
     * Gets whether depth writing is performed.
     *
     * @type {boolean}
     */
    get write(): boolean;
    /**
     * Sets whether depth testing is performed. If true, a shader fragment is only written to the
     * current render target if it passes the depth test. If false, it is written regardless of
     * what is in the depth buffer. Note that when depth testing is disabled, writes to the depth
     * buffer are also disabled. Defaults to true.
     *
     * @type {boolean}
     */
    set test(value: boolean);
    /**
     * Gets whether depth testing is performed.
     *
     * @type {boolean}
     */
    get test(): boolean;
    /**
     * Sets the constant depth bias added to each fragment's depth. Useful for decals to prevent
     * z-fighting. Typically a small negative value (-0.1) is used to render the mesh slightly
     * closer to the camera. Defaults to 0.
     *
     * @type {number}
     */
    set depthBias(value: number);
    /**
     * Gets the constant depth bias added to each fragment's depth.
     *
     * @type {number}
     */
    get depthBias(): number;
    /**
     * Sets the depth bias that scales with the fragment's slope. Defaults to 0.
     *
     * @type {number}
     */
    set depthBiasSlope(value: number);
    /**
     * Gets the depth bias that scales with the fragment's slope.
     *
     * @type {number}
     */
    get depthBiasSlope(): number;
    /**
     * Copies the contents of a source depth state to this depth state.
     *
     * @param {DepthState} rhs - A depth state to copy from.
     * @returns {DepthState} Self for chaining.
     */
    copy(rhs: DepthState): DepthState;
    /**
     * Returns an identical copy of the specified depth state.
     *
     * @returns {this} The result of the cloning.
     */
    clone(): this;
    updateKey(): void;
    /**
     * Reports whether two DepthStates are equal.
     *
     * @param {DepthState} rhs - The depth state to compare to.
     * @returns {boolean} True if the depth states are equal and false otherwise.
     */
    equals(rhs: DepthState): boolean;
}

/**
 * Holds stencil test settings.
 *
 * @category Graphics
 */
declare class StencilParameters {
    /**
     * A default stencil state.
     *
     * @type {StencilParameters}
     * @readonly
     */
    static readonly DEFAULT: StencilParameters;
    /**
     * Create a new StencilParameters instance.
     *
     * @param {object} [options] - Options object to configure the stencil parameters.
     */
    constructor(options?: object);
    /**
     * @type {number}
     * @private
     */
    private _func;
    /**
     * @type {number}
     * @private
     */
    private _ref;
    /**
     * @type {number}
     * @private
     */
    private _fail;
    /**
     * @type {number}
     * @private
     */
    private _zfail;
    /**
     * @type {number}
     * @private
     */
    private _zpass;
    /**
     * @type {number}
     * @private
     */
    private _readMask;
    /**
     * @type {number}
     * @private
     */
    private _writeMask;
    /**
     * @type {boolean}
     * @private
     */
    private _dirty;
    /**
     * @type {number}
     * @private
     */
    private _key;
    /**
     * Sets the comparison function that decides if the pixel should be written, based on the
     * current stencil buffer value, reference value, and mask value. Can be:
     *
     * - {@link FUNC_NEVER}: never pass
     * - {@link FUNC_LESS}: pass if (ref & mask) < (stencil & mask)
     * - {@link FUNC_EQUAL}: pass if (ref & mask) == (stencil & mask)
     * - {@link FUNC_LESSEQUAL}: pass if (ref & mask) <= (stencil & mask)
     * - {@link FUNC_GREATER}: pass if (ref & mask) > (stencil & mask)
     * - {@link FUNC_NOTEQUAL}: pass if (ref & mask) != (stencil & mask)
     * - {@link FUNC_GREATEREQUAL}: pass if (ref & mask) >= (stencil & mask)
     * - {@link FUNC_ALWAYS}: always pass
     *
     * @type {number}
     */
    set func(value: number);
    /**
     * Sets the comparison function that decides if the pixel should be written.
     *
     * @type {number}
     */
    get func(): number;
    /**
     * Sets the stencil test reference value used in comparisons.
     *
     * @type {number}
     */
    set ref(value: number);
    /**
     * Gets the stencil test reference value used in comparisons.
     *
     * @type {number}
     */
    get ref(): number;
    /**
     * Sets the operation to perform if stencil test is failed. Can be:
     *
     * - {@link STENCILOP_KEEP}: don't change the stencil buffer value
     * - {@link STENCILOP_ZERO}: set value to zero
     * - {@link STENCILOP_REPLACE}: replace value with the reference value.
     * - {@link STENCILOP_INCREMENT}: increment the value
     * - {@link STENCILOP_INCREMENTWRAP}: increment the value, but wrap it to zero when it's larger
     * than a maximum representable value
     * - {@link STENCILOP_DECREMENT}: decrement the value
     * - {@link STENCILOP_DECREMENTWRAP}: decrement the value, but wrap it to a maximum
     * representable value, if the current value is 0
     * - {@link STENCILOP_INVERT}: invert the value bitwise
     *
     * @type {number}
     */
    set fail(value: number);
    /**
     * Gets the operation to perform if stencil test is failed.
     *
     * @type {number}
     */
    get fail(): number;
    /**
     * Sets the operation to perform if depth test is failed. Accepts the same values as `fail`.
     *
     * @type {number}
     */
    set zfail(value: number);
    /**
     * Gets the operation to perform if depth test is failed.
     *
     * @type {number}
     */
    get zfail(): number;
    /**
     * Sets the operation to perform if both stencil and depth test are passed. Accepts the same
     * values as `fail`.
     *
     * @type {number}
     */
    set zpass(value: number);
    /**
     * Gets the operation to perform if both stencil and depth test are passed.
     *
     * @type {number}
     */
    get zpass(): number;
    /**
     * Sets the mask applied to stencil buffer value and reference value before comparison.
     *
     * @type {number}
     */
    set readMask(value: number);
    /**
     * Gets the mask applied to stencil buffer value and reference value before comparison.
     *
     * @type {number}
     */
    get readMask(): number;
    /**
     * Sets the bit mask applied to the stencil value when written.
     *
     * @type {number}
     */
    set writeMask(value: number);
    /**
     * Gets the bit mask applied to the stencil value when written.
     *
     * @type {number}
     */
    get writeMask(): number;
    _evalKey(): void;
    get key(): number;
    /**
     * Copies the contents of a source stencil parameters to this stencil parameters.
     *
     * @param {StencilParameters} rhs - A stencil parameters to copy from.
     * @returns {StencilParameters} Self for chaining.
     */
    copy(rhs: StencilParameters): StencilParameters;
    /**
     * Clone the stencil parameters.
     *
     * @returns {StencilParameters} A cloned StencilParameters object.
     */
    clone(): StencilParameters;
}

/**
 * A uniform buffer represents a GPU memory buffer storing the uniforms.
 *
 * @ignore
 */
declare class UniformBuffer {
    /**
     * Create a new UniformBuffer instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this uniform
     * buffer.
     * @param {UniformBufferFormat} format - Format of the uniform buffer.
     * @param {boolean} [persistent] - Whether the buffer is persistent. Defaults to true.
     */
    constructor(graphicsDevice: GraphicsDevice, format: UniformBufferFormat, persistent?: boolean);
    device: GraphicsDevice;
    /** @type {boolean} */
    persistent: boolean;
    /** @type {DynamicBufferAllocation} */
    allocation: DynamicBufferAllocation;
    /** @type {Float32Array} */
    storageFloat32: Float32Array;
    /** @type {Int32Array} */
    storageInt32: Int32Array;
    /** @type {Uint32Array} */
    storageUint32: Uint32Array;
    /**
     * A render version used to track the last time the properties requiring bind group to be
     * updated were changed.
     *
     * @type {number}
     */
    renderVersionDirty: number;
    format: UniformBufferFormat;
    impl: any;
    /**
     * Frees resources associated with this uniform buffer.
     */
    destroy(): void;
    get offset(): number;
    /**
     * Assign a storage to this uniform buffer.
     *
     * @param {Int32Array} storage - The storage to assign to this uniform buffer.
     */
    assignStorage(storage: Int32Array): void;
    /**
     * Called when the rendering context was lost. It releases all context related resources.
     */
    loseContext(): void;
    /**
     * Assign a value to the uniform specified by its format. This is the fast version of assigning
     * a value to a uniform, avoiding any lookups.
     *
     * @param {UniformFormat} uniformFormat - The format of the uniform.
     * @param {any} value - The value to assign to the uniform.
     */
    setUniform(uniformFormat: UniformFormat, value: any): void;
    /**
     * Assign a value to the uniform specified by name.
     *
     * @param {string} name - The name of the uniform.
     * @param {any} value - The value to assign to the uniform.
     */
    set(name: string, value: any): void;
    startUpdate(dynamicBindGroup: any): void;
    endUpdate(): void;
    /**
     * @param {DynamicBindGroup} [dynamicBindGroup] - The function fills in the info about the
     * dynamic bind group for this frame, which uses this uniform buffer. Only used if the uniform
     * buffer is non-persistent. This allows the uniform buffer to be used without having to create
     * a bind group for it. Note that the bind group can only contains this single uniform buffer,
     * and no other resources.
     */
    update(dynamicBindGroup?: DynamicBindGroup): void;
}

/**
 * A storage buffer represents a memory which both the CPU and the GPU can access. Typically it is
 * used to provide data for compute shader, and to store the result of the computation.
 * Note that this class is only supported on the WebGPU platform.
 *
 * @category Graphics
 */
declare class StorageBuffer {
    /**
     * Create a new StorageBuffer instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this storage buffer.
     * @param {number} byteSize - The size of the storage buffer in bytes.
     * @param {number} [bufferUsage] - The usage type of the storage buffer. Can be a combination
     * of {@link BUFFERUSAGE_READ}, {@link BUFFERUSAGE_WRITE}, {@link BUFFERUSAGE_COPY_SRC} and
     * {@link BUFFERUSAGE_COPY_DST} flags. This parameter can be omitted if no special usage is
     * required.
     * @param {boolean} [addStorageUsage] - If true, automatically adds BUFFERUSAGE_STORAGE flag.
     * Set to false for staging buffers that use BUFFERUSAGE_WRITE. Defaults to true.
     */
    constructor(graphicsDevice: GraphicsDevice, byteSize: number, bufferUsage?: number, addStorageUsage?: boolean);
    id: number;
    device: GraphicsDevice;
    byteSize: number;
    bufferUsage: number;
    impl: any;
    /**
     * Frees resources associated with this storage buffer.
     */
    destroy(): void;
    adjustVramSizeTracking(vram: any, size: any): void;
    /**
     * Read the contents of a storage buffer.
     *
     * @param {number} [offset] - The byte offset of data to read. Defaults to 0.
     * @param {number} [size] - The byte size of data to read. Defaults to the full size of the
     * buffer minus the offset.
     * @param {ArrayBufferView|null} [data] - Typed array to populate with the data read from the
     * storage buffer. When typed array is supplied, enough space needs to be reserved, otherwise
     * only partial data is copied. If not specified, the data is returned in an Uint8Array.
     * Defaults to null.
     * @param {boolean} [immediate] - If true, the read operation will be executed as soon as
     * possible. This has a performance impact, so it should be used only when necessary. Defaults
     * to false.
     * @returns {Promise<ArrayBufferView>} A promise that resolves with the data read from the
     * storage buffer.
     * @ignore
     */
    read(offset?: number, size?: number, data?: ArrayBufferView | null, immediate?: boolean): Promise<ArrayBufferView>;
    /**
     * Issues a write operation of the provided data into a storage buffer.
     *
     * @param {number} bufferOffset - The offset in bytes to start writing to the storage buffer.
     * @param {ArrayBufferView} data - The data to write to the storage buffer.
     * @param {number} dataOffset - Offset in data to begin writing from. Given in elements if data
     * is a TypedArray and bytes otherwise.
     * @param {number} size - Size of content to write from data to buffer. Given in elements if
     * data is a TypedArray and bytes otherwise.
     */
    write(bufferOffset: number, data: ArrayBufferView, dataOffset: number, size: number): void;
    /**
     * Clear the content of a storage buffer to 0.
     *
     * @param {number} [offset] - The byte offset of data to clear. Defaults to 0.
     * @param {number} [size] - The byte size of data to clear. Defaults to the full size of the
     * buffer minus the offset.
     */
    clear(offset?: number, size?: number): void;
    /**
     * Copy data from another storage buffer into this storage buffer.
     *
     * @param {StorageBuffer} srcBuffer - The source storage buffer to copy from.
     * @param {number} [srcOffset] - The byte offset in the source buffer. Defaults to 0.
     * @param {number} [dstOffset] - The byte offset in this buffer. Defaults to 0.
     * @param {number} [size] - The byte size of data to copy. Defaults to the full size of the
     * source buffer minus the source offset.
     */
    copy(srcBuffer: StorageBuffer, srcOffset?: number, dstOffset?: number, size?: number): void;
}

/**
 * A bind group represents a collection of {@link UniformBuffer}, {@link Texture} and
 * {@link StorageBuffer} instanced, which can be bind on a GPU for rendering.
 *
 * @ignore
 */
declare class BindGroup {
    /**
     * Create a new Bind Group.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this uniform buffer.
     * @param {BindGroupFormat} format - Format of the bind group.
     * @param {UniformBuffer} [defaultUniformBuffer] - The default uniform buffer. Typically a bind
     * group only has a single uniform buffer, and this allows easier access.
     */
    constructor(graphicsDevice: GraphicsDevice, format: BindGroupFormat, defaultUniformBuffer?: UniformBuffer);
    /**
     * A render version the bind group was last updated on.
     *
     * @type {number}
     * @private
     */
    private renderVersionUpdated;
    /** @type {UniformBuffer[]} */
    uniformBuffers: UniformBuffer[];
    /**
     * An array of offsets for each uniform buffer in the bind group. This is the offset in the
     * buffer where the uniform buffer data starts.
     *
     * @type {number[]}
     */
    uniformBufferOffsets: number[];
    id: number;
    device: GraphicsDevice;
    format: BindGroupFormat;
    dirty: boolean;
    impl: any;
    /** @type {(Texture|TextureView)[]} */
    textures: (Texture | TextureView)[];
    /** @type {(Texture|TextureView)[]} */
    storageTextures: (Texture | TextureView)[];
    storageBuffers: any[];
    /** @type {UniformBuffer} */
    defaultUniformBuffer: UniformBuffer;
    /**
     * Frees resources associated with this bind group.
     */
    destroy(): void;
    /**
     * Assign a uniform buffer to a slot.
     *
     * @param {string} name - The name of the uniform buffer slot
     * @param {UniformBuffer} uniformBuffer - The Uniform buffer to assign to the slot.
     */
    setUniformBuffer(name: string, uniformBuffer: UniformBuffer): void;
    /**
     * Assign a storage buffer to a slot.
     *
     * @param {string} name - The name of the storage buffer slot.
     * @param {StorageBuffer} storageBuffer - The storage buffer to assign to the slot.
     */
    setStorageBuffer(name: string, storageBuffer: StorageBuffer): void;
    /**
     * Assign a texture to a named slot.
     *
     * @param {string} name - The name of the texture slot.
     * @param {Texture|TextureView} value - Texture or TextureView to assign to the slot.
     */
    setTexture(name: string, value: Texture | TextureView): void;
    /**
     * Assign a storage texture to a named slot.
     *
     * @param {string} name - The name of the texture slot.
     * @param {Texture|TextureView} value - Texture or TextureView to assign to the slot.
     */
    setStorageTexture(name: string, value: Texture | TextureView): void;
    /**
     * Updates the uniform buffers in this bind group.
     */
    updateUniformBuffers(): void;
    /**
     * Applies any changes made to the bind group's properties. Note that the content of used
     * uniform buffers needs to be updated before calling this method.
     */
    update(): void;
}
/**
 * Data structure to hold a bind group and its offsets. This is used by {@link UniformBuffer#update}
 * to return a dynamic bind group and offset for the uniform buffer.
 *
 * @ignore
 */
declare class DynamicBindGroup {
    bindGroup: any;
    offsets: any[];
}

/**
 * @import { GraphicsDevice } from './graphics-device.js'
 */
/**
 * A base class representing a single per platform buffer.
 *
 * @ignore
 */
declare class DynamicBuffer {
    constructor(device: any);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /**
     * A cache of bind groups for each uniform buffer size, which is used to avoid creating a new
     * bind group for each uniform buffer.
     *
     * @type {Map<number, BindGroup>}
     */
    bindGroupCache: Map<number, BindGroup>;
    bindGroupFormat: BindGroupFormat;
    getBindGroup(ub: any): BindGroup;
}

/**
 * The DynamicBuffers class provides a dynamic memory allocation system for uniform buffer data,
 * particularly for non-persistent uniform buffers. This class utilizes a bump allocator to
 * efficiently allocate aligned memory space from a set of large buffers managed internally. To
 * utilize this system, the user writes data to CPU-accessible staging buffers. When submitting
 * command buffers that require these buffers, the system automatically uploads the data to the GPU
 * buffers. This approach ensures efficient memory management and smooth data transfer between the
 * CPU and GPU.
 *
 * @ignore
 */
declare class DynamicBuffers {
    /**
     * Create the system of dynamic buffers.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {number} bufferSize - The size of the underlying large buffers.
     * @param {number} bufferAlignment - Alignment of each allocation.
     */
    constructor(device: GraphicsDevice, bufferSize: number, bufferAlignment: number);
    /**
     * Allocation size of the underlying buffers.
     *
     * @type {number}
     */
    bufferSize: number;
    /**
     * Internally allocated gpu buffers.
     *
     * @type {DynamicBuffer[]}
     */
    gpuBuffers: DynamicBuffer[];
    /**
     * Internally allocated staging buffers (CPU writable)
     *
     * @type {DynamicBuffer[]}
     */
    stagingBuffers: DynamicBuffer[];
    /**
     * @type {UsedBuffer[]}
     */
    usedBuffers: UsedBuffer[];
    /**
     * @type {UsedBuffer|null}
     */
    activeBuffer: UsedBuffer | null;
    device: GraphicsDevice;
    bufferAlignment: number;
    /**
     * Destroy the system of dynamic buffers.
     */
    destroy(): void;
    /**
     * Allocate an aligned space of the given size from a dynamic buffer.
     *
     * @param {DynamicBufferAllocation} allocation - The allocation info to fill.
     * @param {number} size - The size of the allocation.
     */
    alloc(allocation: DynamicBufferAllocation, size: number): void;
    scheduleSubmit(): void;
    submit(): void;
}
/**
 * A container for storing the return values of an allocation function.
 *
 * @ignore
 */
declare class DynamicBufferAllocation {
    /**
     * The storage access to the allocated data in the staging buffer.
     *
     * @type {Int32Array}
     */
    storage: Int32Array;
    /**
     * The gpu buffer this allocation will be copied to.
     *
     * @type {DynamicBuffer}
     */
    gpuBuffer: DynamicBuffer;
    /**
     * Offset in the gpuBuffer where the data will be copied to.
     *
     * @type {number}
     */
    offset: number;
}

/**
 * @import { DynamicBuffer } from './dynamic-buffer.js'
 * @import { GraphicsDevice } from './graphics-device.js'
 */
/**
 * A container for storing the used areas of a pair of staging and gpu buffers.
 *
 * @ignore
 */
declare class UsedBuffer {
    /** @type {DynamicBuffer} */
    gpuBuffer: DynamicBuffer;
    /** @type {DynamicBuffer} */
    stagingBuffer: DynamicBuffer;
    /**
     * The beginning position of the used area that needs to be copied from staging to to the GPU
     * buffer.
     *
     * @type {number}
     */
    offset: number;
    /**
     * Used byte size of the buffer, from the offset.
     *
     * @type {number}
     */
    size: number;
}

/**
 * Base class of a simple GPU profiler.
 *
 * @ignore
 */
declare class GpuProfiler {
    /**
     * Profiling slots allocated for the current frame, storing the names of the slots.
     *
     * @type {string[]}
     * @ignore
     */
    frameAllocations: string[];
    /**
     * Map of past frame allocations, indexed by renderVersion
     *
     * @type {Map<number, string[]>}
     * @ignore
     */
    pastFrameAllocations: Map<number, string[]>;
    /**
     * True if enabled in the current frame.
     *
     * @private
     */
    private _enabled;
    /**
     * The enable request for the next frame.
     *
     * @private
     */
    private _enableRequest;
    /**
     * The time it took to render the last frame on GPU, or 0 if the profiler is not enabled.
     *
     * @private
     */
    private _frameTime;
    /**
     * Per-pass timing data, with accumulated timings for passes with the same name.
     *
     * @type {Map<string, number>}
     * @private
     */
    private _passTimings;
    /**
     * Cache for parsed pass names to avoid repeated string operations.
     *
     * @type {Map<string, string>}
     * @private
     */
    private _nameCache;
    /**
     * The maximum number of slots that can be allocated during the frame.
     *
     * @type {number}
     */
    maxCount: number;
    loseContext(): void;
    /**
     * True to enable the profiler.
     *
     * @type {boolean}
     */
    set enabled(value: boolean);
    get enabled(): boolean;
    /**
     * Get the per-pass timing data.
     *
     * @type {Map<string, number>}
     * @ignore
     */
    get passTimings(): Map<string, number>;
    processEnableRequest(): void;
    request(renderVersion: any): void;
    /**
     * Parse a render pass name to a simplified form for stats.
     * Uses a cache to avoid repeated string operations.
     *
     * @param {string} name - The original pass name (e.g., "RenderPassCompose").
     * @returns {string} The parsed name (e.g., "compose").
     * @private
     */
    private _parsePassName;
    report(renderVersion: any, timings: any): void;
    /**
     * Allocate a slot for GPU timing during the frame. This slot is valid only for the current
     * frame. This allows multiple timers to be used during the frame, each with a unique name.
     *
     * @param {string} name - The name of the slot.
     * @returns {number} The assigned slot index, or -1 if the slot count exceeds the maximum number
     * of slots.
     *
     * @ignore
     */
    getSlot(name: string): number;
    /**
     * Number of slots allocated during the frame.
     *
     * @ignore
     */
    get slotCount(): number;
}

/**
 * An RGBA color.
 *
 * Each color component is a floating point value in the range 0 to 1. The {@link r} (red),
 * {@link g} (green) and {@link b} (blue) components define a color in RGB color space. The
 * {@link a} (alpha) component defines transparency. An alpha of 1 is fully opaque. An alpha of
 * 0 is fully transparent.
 *
 * @category Math
 */
declare class Color {
    /**
     * A constant color set to black [0, 0, 0, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly BLACK: Color;
    /**
     * A constant color set to blue [0, 0, 1, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly BLUE: Color;
    /**
     * A constant color set to cyan [0, 1, 1, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly CYAN: Color;
    /**
     * A constant color set to gray [0.5, 0.5, 0.5, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly GRAY: Color;
    /**
     * A constant color set to green [0, 1, 0, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly GREEN: Color;
    /**
     * A constant color set to magenta [1, 0, 1, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly MAGENTA: Color;
    /**
     * A constant color set to red [1, 0, 0, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly RED: Color;
    /**
     * A constant color set to white [1, 1, 1, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly WHITE: Color;
    /**
     * A constant color set to yellow [1, 1, 0, 1].
     *
     * @type {Color}
     * @readonly
     */
    static readonly YELLOW: Color;
    /**
     * Creates a new Color instance.
     *
     * @overload
     * @param {number} [r] - The r value. Defaults to 0.
     * @param {number} [g] - The g value. Defaults to 0.
     * @param {number} [b] - The b value. Defaults to 0.
     * @param {number} [a] - The a value. Defaults to 1.
     * @example
     * const c1 = new pc.Color(); // defaults to 0, 0, 0, 1
     * const c2 = new pc.Color(0.1, 0.2, 0.3, 0.4);
     */
    constructor(r?: number, g?: number, b?: number, a?: number);
    /**
     * Creates a new Color instance.
     *
     * @overload
     * @param {number[]} arr - The array to set the color values from.
     * @example
     * const c = new pc.Color([0.1, 0.2, 0.3, 0.4]);
     */
    constructor(arr: number[]);
    /**
     * The red component of the color.
     *
     * @type {number}
     */
    r: number;
    /**
     * The green component of the color.
     *
     * @type {number}
     */
    g: number;
    /**
     * The blue component of the color.
     *
     * @type {number}
     */
    b: number;
    /**
     * The alpha component of the color.
     *
     * @type {number}
     */
    a: number;
    /**
     * Returns a clone of the specified color.
     *
     * @returns {this} A duplicate color object.
     * @example
     * const c = new pc.Color(1, 0, 0, 1);
     * const cClone = c.clone();
     * // cClone is [1, 0, 0, 1]
     */
    clone(): this;
    /**
     * Copies the contents of a source color to a destination color.
     *
     * @param {Color} rhs - A color to copy to the specified color.
     * @returns {Color} Self for chaining.
     * @example
     * const src = new pc.Color(1, 0, 0, 1);
     * const dst = new pc.Color();
     *
     * dst.copy(src);
     *
     * console.log("The two colors are " + (dst.equals(src) ? "equal" : "different"));
     */
    copy(rhs: Color): Color;
    /**
     * Reports whether two colors are equal.
     *
     * @param {Color} rhs - The color to compare to the specified color.
     * @returns {boolean} True if the colors are equal and false otherwise.
     * @example
     * const a = new pc.Color(1, 0, 0, 1);
     * const b = new pc.Color(1, 1, 0, 1);
     * console.log("The two colors are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Color): boolean;
    /**
     * Assign values to the color components, including alpha.
     *
     * @param {number} r - The value for red (0-1).
     * @param {number} g - The value for green (0-1).
     * @param {number} b - The value for blue (0-1).
     * @param {number} [a] - The value for the alpha (0-1), defaults to 1.
     * @returns {Color} Self for chaining.
     * @example
     * const c = new pc.Color();
     * c.set(1, 0, 0, 1);
     * // c is now red [1, 0, 0, 1]
     */
    set(r: number, g: number, b: number, a?: number): Color;
    /**
     * Returns the result of a linear interpolation between two specified colors.
     *
     * @param {Color} lhs - The color to interpolate from.
     * @param {Color} rhs - The color to interpolate to.
     * @param {number} alpha - The value controlling the point of interpolation. Between 0 and 1,
     * the linear interpolant will occur on a straight line between lhs and rhs. Outside of this
     * range, the linear interpolant will occur on a ray extrapolated from this line.
     * @returns {Color} Self for chaining.
     * @example
     * const a = new pc.Color(0, 0, 0);
     * const b = new pc.Color(1, 1, 0.5);
     * const r = new pc.Color();
     *
     * r.lerp(a, b, 0);   // r is equal to a
     * r.lerp(a, b, 0.5); // r is 0.5, 0.5, 0.25
     * r.lerp(a, b, 1);   // r is equal to b
     */
    lerp(lhs: Color, rhs: Color, alpha: number): Color;
    /**
     * Converts the color from gamma to linear color space.
     *
     * @param {Color} [src] - The color to convert to linear color space. If not set, the operation
     * is done in place.
     * @returns {Color} Self for chaining.
     * @example
     * const c = new pc.Color(0.5, 0.5, 0.5, 1);
     * c.linear();
     * // c is now approximately [0.218, 0.218, 0.218, 1]
     */
    linear(src?: Color): Color;
    /**
     * Converts the color from linear to gamma color space.
     *
     * @param {Color} [src] - The color to convert to gamma color space. If not set, the operation is
     * done in place.
     * @returns {Color} Self for chaining.
     * @example
     * const c = new pc.Color(0.218, 0.218, 0.218, 1);
     * c.gamma();
     * // c is now approximately [0.5, 0.5, 0.5, 1]
     */
    gamma(src?: Color): Color;
    /**
     * Multiplies RGB elements of a Color by a number. Note that the alpha value is left unchanged.
     *
     * @param {number} scalar - The number to multiply by.
     * @returns {Color} Self for chaining.
     * @example
     * const c = new pc.Color(0.2, 0.4, 0.6, 1);
     * c.mulScalar(2);
     * // c is now [0.4, 0.8, 1.2, 1]
     */
    mulScalar(scalar: number): Color;
    /**
     * Set the values of the color from a string representation '#11223344' or '#112233'.
     *
     * @param {string} hex - A string representation in the format '#RRGGBBAA' or '#RRGGBB'. Where
     * RR, GG, BB, AA are red, green, blue and alpha values. This is the same format used in
     * HTML/CSS.
     * @returns {Color} Self for chaining.
     * @example
     * const c = new pc.Color();
     * c.fromString('#ff0000');
     * // c is now [1, 0, 0, 1]
     */
    fromString(hex: string): Color;
    /**
     * Set the values of the color from an array.
     *
     * @param {number[]} arr - The array to set the color values from.
     * @param {number} [offset] - The zero-based index at which to start copying elements from the
     * array. Default is 0.
     * @returns {Color} Self for chaining.
     * @example
     * const c = new pc.Color();
     * c.fromArray([1, 0, 1, 1]);
     * // c is set to [1, 0, 1, 1]
     */
    fromArray(arr: number[], offset?: number): Color;
    /**
     * Converts the color to string form. The format is '#RRGGBBAA', where RR, GG, BB, AA are the
     * red, green, blue and alpha values. When the alpha value is not included (the default), this
     * is the same format as used in HTML/CSS.
     *
     * @param {boolean} alpha - If true, the output string will include the alpha value.
     * @param {boolean} [asArray] - If true, the output will be an array of numbers. Defaults to false.
     * @returns {string} The color in string form.
     * @example
     * const c = new pc.Color(1, 1, 1);
     * // Outputs #ffffff
     * console.log(c.toString());
     */
    toString(alpha: boolean, asArray?: boolean): string;
    /**
     * @overload
     * @param {number[]} [arr] - The array to populate with the color's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {number[]} The color as an array.
     */
    toArray(arr?: number[], offset?: number): number[];
    /**
     * @overload
     * @param {ArrayBufferView} arr - The array to populate with the color's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {ArrayBufferView} The color as an array.
     */
    toArray(arr: ArrayBufferView, offset?: number): ArrayBufferView;
}

/**
 * Container holding parameters for multi-draw commands.
 *
 * Obtain an instance via {@link MeshInstance#setMultiDraw} and populate it using
 * {@link DrawCommands#add} followed by {@link DrawCommands#update}.
 *
 * @category Graphics
 */
declare class DrawCommands {
    /**
     * @param {import('./graphics-device.js').GraphicsDevice} device - The graphics device.
     * @param {number} [indexSizeBytes] - Size of index in bytes for WebGL multi-draw (1, 2 or 4).
     * @ignore
     */
    constructor(device: GraphicsDevice, indexSizeBytes?: number);
    /**
     * Graphics device used to determine backend (WebGPU vs WebGL).
     *
     * @type {import('./graphics-device.js').GraphicsDevice}
     * @ignore
     */
    device: GraphicsDevice;
    /**
     * Size of single index in bytes for WebGL multi-draw (1, 2 or 4). 0 represents non-indexed draw.
     *
     * @type {number}
     * @ignore
     */
    indexSizeBytes: number;
    /**
     * Maximum number of multi-draw calls the space is allocated for. Ignored for indirect draw commands.
     *
     * @type {number}
     * @private
     */
    private _maxCount;
    /**
     * Maximum number of multi-draw calls the space is allocated for.
     *
     * @type {number}
     */
    get maxCount(): number;
    /**
     * Platform-specific implementation.
     *
     * @type {any}
     * @ignore
     */
    impl: any;
    /**
     * Number of draw calls to perform.
     *
     * @type {number}
     * @private
     */
    private _count;
    /**
     * Number of draw calls to perform.
     *
     * @type {number}
     */
    get count(): number;
    /**
     * Slot index of the first indirect draw call. Ignored for multi-draw commands.
     *
     * @type {number}
     * @ignore
     */
    slotIndex: number;
    /**
     * Total number of primitives across all sub-draws (pre-calculated).
     *
     * @type {number}
     * @ignore
     */
    primitiveCount: number;
    /**
     * @ignore
     */
    destroy(): void;
    /**
     * Allocates persistent storage for the draw commands.
     *
     * @param {number} maxCount - Maximum number of draw calls to allocate storage for.
     * @ignore
     */
    allocate(maxCount: number): void;
    /**
     * Writes one draw command into the allocated storage.
     *
     * @param {number} i - Draw index to update.
     * @param {number} indexOrVertexCount - Number of indices or vertices to draw.
     * @param {number} instanceCount - Number of instances to draw (use 1 if not instanced).
     * @param {number} firstIndexOrVertex - Starting index (in indices, not bytes) or starting vertex.
     * @param {number} [baseVertex] - Signed base vertex (WebGPU only). Defaults to 0.
     * @param {number} [firstInstance] - First instance (WebGPU only). Defaults to 0.
     */
    add(i: number, indexOrVertexCount: number, instanceCount: number, firstIndexOrVertex: number, baseVertex?: number, firstInstance?: number): void;
    /**
     * Finalize and set draw count after all commands have been added.
     *
     * @param {number} count - Number of draws to execute.
     */
    update(count: number): void;
}

/**
 * A representation of a compute shader with the associated resources, that can be executed on the
 * GPU. Only supported on WebGPU platform.
 */
declare class Compute {
    /**
     * Calculate near-square 2D dispatch dimensions for a given workgroup count,
     * respecting the WebGPU per-dimension limit. When the count fits within a single
     * dimension, Y is 1. Otherwise, dimensions are chosen to be roughly square to
     * minimize wasted padding threads.
     *
     * @param {number} count - Total number of workgroups needed.
     * @param {Vec2} result - Output vector to receive X (x) and Y (y) dimensions.
     * @param {number} [maxDimension] - Maximum workgroups per dimension.
     * @returns {Vec2} The result vector with dimensions set.
     * @ignore
     */
    static calcDispatchSize(count: number, result: Vec2, maxDimension?: number): Vec2;
    /**
     * Create a compute instance. Note that this is supported on WebGPU only and is a no-op on
     * other platforms.
     *
     * @param {GraphicsDevice} graphicsDevice -
     * The graphics device.
     * @param {Shader} shader - The compute shader.
     * @param {string} [name] - The name of the compute instance, used for debugging only.
     */
    constructor(graphicsDevice: GraphicsDevice, shader: Shader, name?: string);
    /**
     * A compute shader.
     *
     * @type {Shader|null}
     * @ignore
     */
    shader: Shader | null;
    /**
     * The non-unique name of an instance of the class. Defaults to 'Unnamed'.
     *
     * @type {string}
     */
    name: string;
    /**
     * @type {Map<string, ComputeParameter>}
     * @ignore
     */
    parameters: Map<string, ComputeParameter>;
    /**
     * @type {number}
     * @ignore
     */
    countX: number;
    /**
     * @type {number|undefined}
     * @ignore
     */
    countY: number | undefined;
    /**
     * @type {number|undefined}
     * @ignore
     */
    countZ: number | undefined;
    /**
     * Slot index in the indirect dispatch buffer, or -1 for direct dispatch.
     *
     * @type {number}
     * @ignore
     */
    indirectSlotIndex: number;
    /**
     * Custom buffer for indirect dispatch, or null to use device's built-in buffer.
     *
     * @type {StorageBuffer|null}
     * @ignore
     */
    indirectBuffer: StorageBuffer | null;
    /**
     * Frame stamp (device.renderVersion) when indirect slot was set. Used for validation
     * when using the built-in buffer.
     *
     * @type {number}
     * @ignore
     */
    indirectFrameStamp: number;
    device: GraphicsDevice;
    impl: any;
    /**
     * Sets a shader parameter on a compute instance.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {number|number[]|Float32Array|Texture|StorageBuffer|VertexBuffer|IndexBuffer|TextureView} value -
     * The value for the specified parameter.
     */
    setParameter(name: string, value: number | number[] | Float32Array | Texture | StorageBuffer | VertexBuffer | IndexBuffer | TextureView): void;
    /**
     * Returns the value of a shader parameter from the compute instance.
     *
     * @param {string} name - The name of the parameter to get.
     * @returns {number|number[]|Float32Array|Texture|StorageBuffer|VertexBuffer|IndexBuffer|undefined}
     * The value of the specified parameter.
     */
    getParameter(name: string): number | number[] | Float32Array | Texture | StorageBuffer | VertexBuffer | IndexBuffer | undefined;
    /**
     * Deletes a shader parameter from the compute instance.
     *
     * @param {string} name - The name of the parameter to delete.
     */
    deleteParameter(name: string): void;
    /**
     * Frees resources associated with this compute instance.
     */
    destroy(): void;
    /**
     * Apply the parameters to the scope.
     *
     * @ignore
     */
    applyParameters(): void;
    /**
     * Prepare the compute work dispatch.
     *
     * @param {number} x - X dimension of the grid of work-groups to dispatch.
     * @param {number} [y] - Y dimension of the grid of work-groups to dispatch.
     * @param {number} [z] - Z dimension of the grid of work-groups to dispatch.
     */
    setupDispatch(x: number, y?: number, z?: number): void;
    /**
     * Prepare the compute work dispatch to use indirect parameters from a buffer. The dispatch
     * parameters (x, y, z workgroup counts) are read from the buffer at the specified slot index.
     *
     * When using the device's built-in buffer (buffer parameter is null), this method must be
     * called each frame as slots are only valid for the current frame.
     *
     * @param {number} slotIndex - Slot index in the indirect dispatch buffer. When using the
     * device's built-in buffer, obtain this by calling {@link GraphicsDevice#getIndirectDispatchSlot}.
     * @param {StorageBuffer|null} [buffer] - Optional custom storage buffer containing dispatch
     * parameters. If not provided, uses the device's built-in {@link GraphicsDevice#indirectDispatchBuffer}.
     * When providing a custom buffer, the user is responsible for its lifetime and contents.
     * @example
     * // Reserve a slot in the indirect dispatch buffer
     * const slot = device.getIndirectDispatchSlot();
     *
     * // First compute shader writes dispatch parameters to the buffer
     * prepareCompute.setParameter('indirectBuffer', device.indirectDispatchBuffer);
     * prepareCompute.setParameter('slot', slot);
     * prepareCompute.setupDispatch(1, 1, 1);
     * device.computeDispatch([prepareCompute]);
     *
     * // Second compute shader uses indirect dispatch
     * processCompute.setupIndirectDispatch(slot);
     * device.computeDispatch([processCompute]);
     */
    setupIndirectDispatch(slotIndex: number, buffer?: StorageBuffer | null): void;
}

/**
 * @import { GraphicsDevice } from './graphics-device.js'
 * @import { IndexBuffer } from './index-buffer.js'
 * @import { ScopeId } from './scope-id.js'
 * @import { Shader } from './shader.js'
 * @import { StorageBuffer } from './storage-buffer.js'
 * @import { Texture } from './texture.js'
 * @import { TextureView } from './texture-view.js'
 * @import { Vec2 } from '../../core/math/vec2.js'
 * @import { VertexBuffer } from './vertex-buffer.js'
 */
/**
 * A helper class storing a parameter value as well as its scope ID.
 *
 * @ignore
 */
declare class ComputeParameter {
    value: any;
    /** @type {ScopeId} */
    scopeId: ScopeId;
}

/**
 * The graphics device manages the underlying graphics context. It is responsible for submitting
 * render state changes and graphics primitives to the hardware. A graphics device is tied to a
 * specific canvas HTML element. It is valid to have more than one canvas element per page and
 * create a new graphics device against each.
 *
 * @category Graphics
 */
declare class GraphicsDevice extends EventHandler {
    static EVENT_RESIZE: string;
    constructor(canvas: any, options: any);
    /**
     * Fired when the canvas is resized. The handler is passed the new width and height as number
     * parameters.
     *
     * @event
     * @example
     * graphicsDevice.on('resizecanvas', (width, height) => {
     *     console.log(`The canvas was resized to ${width}x${height}`);
     * });
     */
    /**
     * The canvas DOM element that provides the underlying WebGL context used by the graphics device.
     *
     * @type {HTMLCanvasElement}
     * @readonly
     */
    readonly canvas: HTMLCanvasElement;
    /**
     * The render target representing the main back-buffer.
     *
     * @type {RenderTarget|null}
     * @ignore
     */
    backBuffer: RenderTarget | null;
    /**
     * The dimensions of the back buffer.
     *
     * @ignore
     */
    backBufferSize: Vec2;
    /**
     * The pixel format of the back buffer. Typically PIXELFORMAT_RGBA8, PIXELFORMAT_BGRA8 or
     * PIXELFORMAT_RGB8.
     *
     * @ignore
     */
    backBufferFormat: any;
    /**
     * True if the back buffer should use anti-aliasing.
     *
     * @type {boolean}
     */
    backBufferAntialias: boolean;
    /**
     * True if the deviceType is WebGPU
     *
     * @type {boolean}
     * @readonly
     */
    readonly isWebGPU: boolean;
    /**
     * True if the deviceType is WebGL2
     *
     * @type {boolean}
     * @readonly
     */
    readonly isWebGL2: boolean;
    /**
     * True if the deviceType is Null
     *
     * @type {boolean}
     * @readonly
     */
    readonly isNull: boolean;
    /**
     * True if the back-buffer is using HDR format, which means that the browser will display the
     * rendered images in high dynamic range mode. This is true if the options.displayFormat is set
     * to {@link DISPLAYFORMAT_HDR} when creating the graphics device using
     * {@link createGraphicsDevice}, and HDR is supported by the device.
     */
    isHdr: boolean;
    /**
     * The scope namespace for shader attributes and variables.
     *
     * @type {ScopeSpace}
     * @readonly
     */
    readonly scope: ScopeSpace;
    /**
     * The maximum number of indirect draw calls that can be used within a single frame. Used on
     * WebGPU only. This needs to be adjusted based on the maximum number of draw calls that can
     * be used within a single frame. Defaults to 1024.
     *
     * @type {number}
     */
    maxIndirectDrawCount: number;
    /**
     * The maximum number of indirect compute dispatches that can be used within a single frame.
     * Used on WebGPU only. Defaults to 256.
     *
     * @type {number}
     */
    maxIndirectDispatchCount: number;
    /**
     * The maximum supported texture anisotropy setting.
     *
     * @type {number}
     * @readonly
     */
    readonly maxAnisotropy: number;
    /**
     * The maximum supported dimension of a cube map.
     *
     * @type {number}
     * @readonly
     */
    readonly maxCubeMapSize: number;
    /**
     * The maximum supported dimension of a texture.
     *
     * @type {number}
     * @readonly
     */
    readonly maxTextureSize: number;
    /**
     * The maximum supported dimension of a 3D texture (any axis).
     *
     * @type {number}
     * @readonly
     */
    readonly maxVolumeSize: number;
    /**
     * The maximum supported number of color buffers attached to a render target.
     *
     * @type {number}
     * @readonly
     */
    readonly maxColorAttachments: number;
    /**
     * The highest shader precision supported by this graphics device. Can be 'hiphp', 'mediump' or
     * 'lowp'.
     *
     * @type {string}
     * @readonly
     */
    readonly precision: string;
    /**
     * The number of hardware anti-aliasing samples used by the frame buffer.
     *
     * @readonly
     * @type {number}
     */
    readonly samples: number;
    /**
     * The maximum supported number of hardware anti-aliasing samples.
     *
     * @readonly
     * @type {number}
     */
    readonly maxSamples: number;
    /**
     * True if the main framebuffer contains stencil attachment.
     *
     * @ignore
     * @type {boolean}
     */
    supportsStencil: boolean;
    /**
     * True if the device supports multi-draw. This is always supported on WebGPU, and support on
     * WebGL2 is optional, but pretty common.
     *
     * @type {boolean}
     */
    supportsMultiDraw: boolean;
    /**
     * True if the device supports compute shaders.
     *
     * @readonly
     * @type {boolean}
     */
    readonly supportsCompute: boolean;
    /**
     * True if the device can read from StorageTexture in the compute shader. By default, the
     * storage texture can be only used with the write operation.
     * When a shader uses this feature, it's recommended to use a `requires` directive to signal the
     * potential for non-portability at the top of the WGSL shader code:
     * ```javascript
     * requires readonly_and_readwrite_storage_textures;
     * ```
     *
     * @readonly
     * @type {boolean}
     */
    readonly supportsStorageTextureRead: boolean;
    /**
     * True if the device supports subgroup operations in shaders (WebGPU only). When supported,
     * compute and fragment shaders can use WGSL subgroup builtins such as `subgroupBroadcast`,
     * `subgroupAll`, `subgroupAny`, `subgroupAdd`, `subgroupShuffle`, etc. The `enable subgroups;`
     * directive is automatically injected into WGSL shaders when this feature is available.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsSubgroups: boolean;
    /**
     * True if the device supports the WGSL subgroup_uniformity extension, which allows
     * subgroup functionality to be considered uniform in more cases during shader compilation.
     * This is automatically enabled via the `enable subgroups;` directive when
     * {@link GraphicsDevice#supportsSubgroups} is true.
     *
     * @readonly
     * @type {boolean}
     */
    readonly supportsSubgroupUniformity: boolean;
    /**
     * True if the device supports the WGSL subgroup_id extension, which provides access to
     * `subgroup_id` and `num_subgroups` built-in values in workgroups. The `requires subgroup_id;`
     * directive is automatically injected into WGSL shaders when this feature is available.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsSubgroupId: boolean;
    /**
     * Currently active render target.
     *
     * @type {RenderTarget|null}
     * @ignore
     */
    renderTarget: RenderTarget | null;
    /**
     * Array of objects that need to be re-initialized after a context restore event
     *
     * @type {Shader[]}
     * @ignore
     */
    shaders: Shader[];
    /**
     * A set of currently created textures.
     *
     * @type {Set<Texture>}
     * @ignore
     */
    textures: Set<Texture>;
    /**
     * A set of textures that need to be uploaded to the GPU.
     *
     * @type {Set<Texture>}
     * @ignore
     */
    texturesToUpload: Set<Texture>;
    /**
     * A set of currently created render targets.
     *
     * @type {Set<RenderTarget>}
     * @ignore
     */
    targets: Set<RenderTarget>;
    /**
     * A version number that is incremented every frame. This is used to detect if some object were
     * invalidated.
     *
     * @type {number}
     * @ignore
     */
    renderVersion: number;
    /**
     * Index of the currently active render pass.
     *
     * @type {number}
     * @ignore
     */
    renderPassIndex: number;
    /** @type {boolean} */
    insideRenderPass: boolean;
    /**
     * True if the device supports uniform buffers.
     *
     * @type {boolean}
     * @ignore
     */
    supportsUniformBuffers: boolean;
    /**
     * True if the device supports clip distances (WebGPU only). Clip distances allow you to restrict
     * primitives' clip volume with user-defined half-spaces in the output of vertex stage.
     *
     * @type {boolean}
     */
    supportsClipDistances: boolean;
    /**
     * True if the device supports WebGPU texture format tier 1 capabilities. When enabled, a wider
     * set of normalized texture formats can be used as render targets and storage textures.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsTextureFormatTier1: boolean;
    /**
     * True if the device supports WebGPU texture format tier 2 capabilities. This extends tier 1
     * and enables read-write storage access for selected texture formats.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsTextureFormatTier2: boolean;
    /**
     * True if the device supports primitive index in fragment shaders (WebGPU only). When
     * supported, fragment shaders can access the `pcPrimitiveIndex` built-in variable which
     * uniquely identifies the current primitive being processed.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsPrimitiveIndex: boolean;
    /**
     * True if the device supports 16-bit floating-point types in shaders (WebGPU only). When
     * supported, shaders can use native WGSL types: `f16`, `vec2h`, `vec3h`, `vec4h`, `mat2x2h`,
     * `mat3x3h`, `mat4x4h`. For convenience, PlayCanvas also provides type aliases (`half`,
     * `half2`, `half3`, `half4`, `half2x2`, `half3x3`, `half4x4`) that resolve to f16 types when
     * supported, or fall back to f32 types when not supported.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsShaderF16: boolean;
    /**
     * True if HTML elements (e.g. `<div>`) can be used as texture sources via the HTML-in-Canvas
     * API. When supported, an HTML element appended to a canvas with the `layoutsubtree` attribute
     * can be passed to {@link Texture#setSource} and rendered as a live texture in the 3D scene.
     *
     * @type {boolean}
     * @readonly
     */
    readonly supportsHtmlTextures: boolean;
    /**
     * True if 32-bit floating-point textures can be used as a frame buffer.
     *
     * @type {boolean}
     * @readonly
     */
    readonly textureFloatRenderable: boolean;
    /**
     * True if 16-bit floating-point textures can be used as a frame buffer.
     *
     * @type {boolean}
     * @readonly
     */
    readonly textureHalfFloatRenderable: boolean;
    /**
     * True if small-float textures with format {@link PIXELFORMAT_111110F} can be used as a frame
     * buffer. This is always true on WebGL2, but optional on WebGPU device.
     *
     * @type {boolean}
     * @readonly
     */
    readonly textureRG11B10Renderable: boolean;
    /**
     * True if filtering can be applied when sampling float textures.
     *
     * @type {boolean}
     * @readonly
     */
    readonly textureFloatFilterable: boolean;
    /**
     * A vertex buffer representing a quad.
     *
     * @type {VertexBuffer}
     * @ignore
     */
    quadVertexBuffer: VertexBuffer;
    /**
     * An index buffer for drawing a quad as an indexed triangle list.
     * Contains 6 indices: [0, 1, 2, 2, 1, 3] forming two triangles.
     *
     * @type {IndexBuffer}
     * @ignore
     */
    quadIndexBuffer: IndexBuffer;
    /**
     * An object representing current blend state
     *
     * @ignore
     */
    blendState: BlendState;
    /**
     * The current depth state.
     *
     * @ignore
     */
    depthState: DepthState;
    /**
     * True if stencil is enabled and stencilFront and stencilBack are used
     *
     * @ignore
     */
    stencilEnabled: boolean;
    /**
     * The current front stencil parameters.
     *
     * @ignore
     */
    stencilFront: StencilParameters;
    /**
     * The current back stencil parameters.
     *
     * @ignore
     */
    stencilBack: StencilParameters;
    /**
     * The dynamic buffer manager.
     *
     * @type {DynamicBuffers}
     * @ignore
     */
    dynamicBuffers: DynamicBuffers;
    /**
     * The GPU profiler.
     *
     * @type {GpuProfiler}
     */
    gpuProfiler: GpuProfiler;
    /**
     * @type {boolean}
     * @ignore
     */
    _destroyed: boolean;
    defaultClearOptions: {
        color: number[];
        depth: number;
        stencil: number;
        flags: number;
    };
    /**
     * The current client rect.
     *
     * @type {{ width: number, height: number }}
     * @ignore
     */
    clientRect: {
        width: number;
        height: number;
    };
    /**
     * A very heavy handed way to force all shaders to be rebuilt. Avoid using as much as possible.
     *
     * @type {boolean}
     * @ignore
     */
    _shadersDirty: boolean;
    /**
     * A list of shader defines based on the capabilities of the device.
     *
     * @type {Map<string, string>}
     * @ignore
     */
    capsDefines: Map<string, string>;
    /**
     * A set of maps to clear at the end of the frame.
     *
     * @type {Set<Map>}
     * @ignore
     */
    mapsToClear: Set<Map<any, any>>;
    initOptions: any;
    _maxPixelRatio: number;
    buffers: Set<any>;
    _vram: {
        texShadow: number;
        texAsset: number;
        texLightmap: number;
        tex: number;
        vb: number;
        ib: number;
        ub: number;
        sb: number;
    };
    _shaderStats: {
        vsCompiled: number;
        fsCompiled: number;
        linked: number;
        materialShaders: number;
        compileTime: number;
    };
    _drawCallsPerFrame: number;
    _shaderSwitchesPerFrame: number;
    _primsPerFrame: number[];
    _renderTargetCreationTime: number;
    textureBias: ScopeId;
    /**
     * Function that executes after the device has been created.
     */
    postInit(): void;
    /**
     * Initialize the map of device capabilities, which are supplied to shaders as defines.
     *
     * @ignore
     */
    initCapsDefines(): void;
    /**
     * Destroy the graphics device.
     */
    destroy(): void;
    onDestroyShader(shader: any): void;
    /**
     * Called when a texture is destroyed to remove it from internal tracking structures.
     *
     * @param {Texture} texture - The texture being destroyed.
     * @ignore
     */
    onTextureDestroyed(texture: Texture): void;
    postDestroy(): void;
    /**
     * Called when the device context was lost. It releases all context related resources.
     *
     * @ignore
     */
    loseContext(): void;
    contextLost: boolean;
    /**
     * Called when the device context is restored. It reinitializes all context related resources.
     *
     * @ignore
     */
    restoreContext(): void;
    toJSON(key: any): any;
    initializeContextCaches(): void;
    vertexBuffers: any[];
    shader: any;
    shaderValid: any;
    shaderAsyncCompile: boolean;
    initializeRenderState(): void;
    cullMode: number;
    frontFace: number;
    vx: number;
    vy: number;
    vw: number;
    vh: number;
    sx: number;
    sy: number;
    sw: number;
    sh: number;
    blendColor: Color;
    /**
     * Sets the specified stencil state. If both stencilFront and stencilBack are null, stencil
     * operation is disabled.
     *
     * @param {StencilParameters} [stencilFront] - The front stencil parameters. Defaults to
     * {@link StencilParameters.DEFAULT} if not specified.
     * @param {StencilParameters} [stencilBack] - The back stencil parameters. Defaults to
     * {@link StencilParameters.DEFAULT} if not specified.
     */
    setStencilState(stencilFront?: StencilParameters, stencilBack?: StencilParameters): void;
    /**
     * Sets the specified blend state.
     *
     * @param {BlendState} blendState - New blend state.
     */
    setBlendState(blendState: BlendState): void;
    /**
     * Sets the constant blend color and alpha values used with {@link BLENDMODE_CONSTANT} and
     * {@link BLENDMODE_ONE_MINUS_CONSTANT} factors specified in {@link BlendState}. Defaults to
     * [0, 0, 0, 0].
     *
     * @param {number} r - The value for red.
     * @param {number} g - The value for green.
     * @param {number} b - The value for blue.
     * @param {number} a - The value for alpha.
     */
    setBlendColor(r: number, g: number, b: number, a: number): void;
    /**
     * Sets the specified depth state.
     *
     * @param {DepthState} depthState - New depth state.
     */
    setDepthState(depthState: DepthState): void;
    /**
     * Controls how triangles are culled based on their face direction. The default cull mode is
     * {@link CULLFACE_BACK}.
     *
     * @param {number} cullMode - The cull mode to set. Can be:
     *
     * - {@link CULLFACE_NONE}
     * - {@link CULLFACE_BACK}
     * - {@link CULLFACE_FRONT}
     */
    setCullMode(cullMode: number): void;
    /**
     * Controls whether polygons are front- or back-facing by setting a winding
     * orientation. The default frontFace is {@link FRONTFACE_CCW}.
     *
     * @param {number} frontFace - The front face to set. Can be:
     *
     * - {@link FRONTFACE_CW}
     * - {@link FRONTFACE_CCW}
     */
    setFrontFace(frontFace: number): void;
    /**
     * Sets all draw-related render states in a single call. All parameters have sensible defaults
     * for utility rendering (full-screen quads, particles, etc.), so calling `setDrawStates()` with
     * no arguments resets to a safe baseline.
     *
     * @param {BlendState} [blendState] - Blend state. Defaults to {@link BlendState.NOBLEND}.
     * @param {DepthState} [depthState] - Depth state. Defaults to {@link DepthState.NODEPTH}.
     * @param {number} [cullMode] - Cull mode. Defaults to {@link CULLFACE_NONE}.
     * @param {number} [frontFace] - Front face winding. Defaults to {@link FRONTFACE_CCW}.
     * @param {StencilParameters} [stencilFront] - Front stencil parameters.
     * @param {StencilParameters} [stencilBack] - Back stencil parameters.
     */
    setDrawStates(blendState?: BlendState, depthState?: DepthState, cullMode?: number, frontFace?: number, stencilFront?: StencilParameters, stencilBack?: StencilParameters): void;
    /**
     * Sets the specified render target on the device. If null is passed as a parameter, the back
     * buffer becomes the current target for all rendering operations.
     *
     * @param {RenderTarget|null} renderTarget - The render target to activate.
     * @example
     * // Set a render target to receive all rendering output
     * device.setRenderTarget(renderTarget);
     *
     * // Set the back buffer to receive all rendering output
     * device.setRenderTarget(null);
     */
    setRenderTarget(renderTarget: RenderTarget | null): void;
    /**
     * Sets the current vertex buffer on the graphics device. For subsequent draw calls, the
     * specified vertex buffer(s) will be used to provide vertex data for any primitives.
     *
     * @param {VertexBuffer} vertexBuffer - The vertex buffer to assign to the device.
     * @ignore
     */
    setVertexBuffer(vertexBuffer: VertexBuffer): void;
    /**
     * Clears the vertex buffer set on the graphics device. This is called automatically by the
     * renderer.
     * @ignore
     */
    clearVertexBuffer(): void;
    /**
     * Retrieves the first available slot in the {@link indirectDrawBuffer} used for indirect
     * rendering, which can be utilized by a {@link Compute} shader to generate indirect draw
     * parameters and by {@link MeshInstance#setIndirect} to configure indirect draw calls.
     *
     * When reserving multiple consecutive slots, specify the optional `count` parameter.
     *
     * @param {number} [count] - Number of consecutive slots to reserve. Defaults to 1.
     * @returns {number} - The first reserved slot index used for indirect rendering.
     */
    getIndirectDrawSlot(count?: number): number;
    /**
     * Returns the buffer used to store arguments for indirect draw calls. The size of the buffer is
     * controlled by the {@link maxIndirectDrawCount} property. This buffer can be passed to a
     * {@link Compute} shader along with a slot obtained by calling {@link getIndirectDrawSlot}, in
     * order to prepare indirect draw parameters. Also see {@link MeshInstance#setIndirect}.
     *
     * Only available on WebGPU, returns null on other platforms.
     *
     * @type {StorageBuffer|null}
     */
    get indirectDrawBuffer(): StorageBuffer | null;
    /**
     * Retrieves the first available slot in the {@link indirectDispatchBuffer} used for indirect
     * compute dispatch, which can be utilized by a {@link Compute} shader to generate indirect
     * dispatch parameters for another compute shader.
     *
     * When reserving multiple consecutive slots, specify the optional `count` parameter.
     *
     * @param {number} [count] - Number of consecutive slots to reserve. Defaults to 1.
     * @returns {number} - The first reserved slot index used for indirect dispatch.
     */
    getIndirectDispatchSlot(count?: number): number;
    /**
     * Returns the buffer used to store arguments for indirect compute dispatch calls. The size of
     * the buffer is controlled by the {@link maxIndirectDispatchCount} property. This buffer can
     * be passed to a {@link Compute} shader along with a slot obtained by calling
     * {@link getIndirectDispatchSlot}, in order to prepare indirect dispatch parameters.
     *
     * Only available on WebGPU, returns null on other platforms.
     *
     * @type {StorageBuffer|null}
     */
    get indirectDispatchBuffer(): StorageBuffer | null;
    /**
     * Queries the currently set render target on the device.
     *
     * @returns {RenderTarget} The current render target.
     * @example
     * // Get the current render target
     * const renderTarget = device.getRenderTarget();
     */
    getRenderTarget(): RenderTarget;
    /**
     * Initialize render target before it can be used.
     *
     * @param {RenderTarget} target - The render target to be initialized.
     * @ignore
     */
    initRenderTarget(target: RenderTarget): void;
    /**
     * Submits a graphical primitive to the hardware for immediate rendering.
     *
     * @param {object} primitive - Primitive object describing how to submit current vertex/index
     * buffers.
     * @param {number} primitive.type - The type of primitive to render. Can be:
     *
     * - {@link PRIMITIVE_POINTS}
     * - {@link PRIMITIVE_LINES}
     * - {@link PRIMITIVE_LINELOOP}
     * - {@link PRIMITIVE_LINESTRIP}
     * - {@link PRIMITIVE_TRIANGLES}
     * - {@link PRIMITIVE_TRISTRIP}
     * - {@link PRIMITIVE_TRIFAN}
     *
     * @param {number} primitive.base - The offset of the first index or vertex to dispatch in the
     * draw call.
     * @param {number} primitive.count - The number of indices or vertices to dispatch in the draw
     * call.
     * @param {boolean} [primitive.indexed] - True to interpret the primitive as indexed, thereby
     * using the currently set index buffer and false otherwise.
     * @param {IndexBuffer} [indexBuffer] - The index buffer to use for the draw call.
     * @param {number} [numInstances] - The number of instances to render when using instancing.
     * Defaults to 1.
     * @param {DrawCommands} [drawCommands] - The draw commands to use for the draw call.
     * @param {boolean} [first] - True if this is the first draw call in a sequence of draw calls.
     * When set to true, vertex and index buffers related state is set up. Defaults to true.
     * @param {boolean} [last] - True if this is the last draw call in a sequence of draw calls.
     * When set to true, vertex and index buffers related state is cleared. Defaults to true.
     * @example
     * // Render a single, unindexed triangle
     * device.draw({
     *     type: pc.PRIMITIVE_TRIANGLES,
     *     base: 0,
     *     count: 3,
     *     indexed: false
     * });
     *
     * @ignore
     */
    draw(primitive: {
        type: number;
        base: number;
        count: number;
        indexed?: boolean;
    }, indexBuffer?: IndexBuffer, numInstances?: number, drawCommands?: DrawCommands, first?: boolean, last?: boolean): void;
    /**
     * Reports whether a texture source is a canvas, image, video, ImageBitmap, or HTML element.
     *
     * @param {*} texture - Texture source data.
     * @returns {boolean} True if the texture is a canvas, image, video, ImageBitmap, or HTML
     * element and false otherwise.
     * @ignore
     */
    _isBrowserInterface(texture: any): boolean;
    _isImageBrowserInterface(texture: any): boolean;
    _isImageCanvasInterface(texture: any): boolean;
    _isImageVideoInterface(texture: any): boolean;
    /**
     * Reports whether a texture source is a generic HTML element (not image, canvas, or video).
     * Used for the HTML-in-Canvas proposal (texElementImage2D).
     *
     * @param {*} texture - Texture source data.
     * @returns {boolean} True if the texture is an HTMLElement that is not an image, canvas, or
     * video.
     * @ignore
     */
    _isHTMLElementInterface(texture: any): boolean;
    /**
     * Sets the width and height of the canvas, then fires the `resizecanvas` event. Note that the
     * specified width and height values will be multiplied by the value of
     * {@link GraphicsDevice#maxPixelRatio} to give the final resultant width and height for the
     * canvas.
     *
     * @param {number} width - The new width of the canvas.
     * @param {number} height - The new height of the canvas.
     * @ignore
     */
    resizeCanvas(width: number, height: number): void;
    /**
     * Sets the width and height of the canvas, then fires the `resizecanvas` event. Note that the
     * value of {@link GraphicsDevice#maxPixelRatio} is ignored.
     *
     * @param {number} width - The new width of the canvas.
     * @param {number} height - The new height of the canvas.
     * @ignore
     */
    setResolution(width: number, height: number): void;
    update(): void;
    updateClientRect(): void;
    /**
     * Width of the back buffer in pixels.
     *
     * @type {number}
     */
    get width(): number;
    /**
     * Height of the back buffer in pixels.
     *
     * @type {number}
     */
    get height(): number;
    /**
     * Sets whether the device is currently in fullscreen mode.
     *
     * @type {boolean}
     */
    set fullscreen(fullscreen: boolean);
    /**
     * Gets whether the device is currently in fullscreen mode.
     *
     * @type {boolean}
     */
    get fullscreen(): boolean;
    /**
     * Sets the maximum pixel ratio.
     *
     * @type {number}
     */
    set maxPixelRatio(ratio: number);
    /**
     * Gets the maximum pixel ratio.
     *
     * @type {number}
     */
    get maxPixelRatio(): number;
    /**
     * Gets the type of the device. Can be:
     *
     * - {@link DEVICETYPE_WEBGL2}
     * - {@link DEVICETYPE_WEBGPU}
     *
     * @type {DEVICETYPE_WEBGL2|DEVICETYPE_WEBGPU}
     */
    get deviceType(): "webgl2" | "webgpu";
    startRenderPass(renderPass: any): void;
    endRenderPass(renderPass: any): void;
    startComputePass(name: any): void;
    endComputePass(): void;
    /**
     * Function which executes at the start of the frame. This should not be called manually, as
     * it is handled by the AppBase instance.
     *
     * @ignore
     */
    frameStart(): void;
    /**
     * Function which executes at the end of the frame. This should not be called manually, as it is
     * handled by the AppBase instance.
     *
     * @ignore
     */
    frameEnd(): void;
    /**
     * Dispatch multiple compute shaders inside a single compute shader pass.
     *
     * @param {Array<Compute>} computes - An array of compute shaders to dispatch.
     * @param {string} [name] - The name of the dispatch, used for debugging and reporting only.
     */
    computeDispatch(computes: Array<Compute>, name?: string): void;
    /**
     * Get a renderable HDR pixel format supported by the graphics device.
     *
     * Note:
     *
     * - When the `filterable` parameter is set to false, this function returns one of the supported
     * formats on the majority of devices apart from some very old iOS and Android devices (99%).
     * - When the `filterable` parameter is set to true, the function returns a format on a
     * considerably lower number of devices (70%).
     *
     * @param {number[]} [formats] - An array of pixel formats to check for support. Can contain:
     *
     * - {@link PIXELFORMAT_111110F}
     * - {@link PIXELFORMAT_RGBA16F}
     * - {@link PIXELFORMAT_RGBA32F}
     *
     * @param {boolean} [filterable] - If true, the format also needs to be filterable. Defaults to
     * true.
     * @param {number} [samples] - The number of samples to check for. Some formats are not
     * compatible with multi-sampling, for example {@link PIXELFORMAT_RGBA32F} on WebGPU platform.
     * Defaults to 1.
     * @returns {number|undefined} The first supported renderable HDR format or undefined if none is
     * supported.
     */
    getRenderableHdrFormat(formats?: number[], filterable?: boolean, samples?: number): number | undefined;
    /**
     * Validate that all attributes required by the shader are present in the currently assigned
     * vertex buffers.
     *
     * @param {Shader} shader - The shader to validate.
     * @param {VertexFormat} vb0Format - The format of the first vertex buffer.
     * @param {VertexFormat} vb1Format - The format of the second vertex buffer.
     * @protected
     */
    protected validateAttributes(shader: Shader, vb0Format: VertexFormat, vb1Format: VertexFormat): void;
}

/**
 * An index buffer stores index values into a {@link VertexBuffer}. Indexed graphical primitives
 * can normally utilize less memory that unindexed primitives (if vertices are shared).
 *
 * Typically, index buffers are set on {@link Mesh} objects.
 *
 * @category Graphics
 */
declare class IndexBuffer {
    /**
     * Create a new IndexBuffer instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this index buffer.
     * @param {number} format - The type of each index to be stored in the index buffer. Can be:
     *
     * - {@link INDEXFORMAT_UINT8}
     * - {@link INDEXFORMAT_UINT16}
     * - {@link INDEXFORMAT_UINT32}
     * @param {number} numIndices - The number of indices to be stored in the index buffer.
     * @param {number} [usage] - The usage type of the vertex buffer. Can be:
     *
     * - {@link BUFFER_DYNAMIC}
     * - {@link BUFFER_STATIC}
     * - {@link BUFFER_STREAM}
     *
     * Defaults to {@link BUFFER_STATIC}.
     * @param {ArrayBuffer} [initialData] - Initial data. If left unspecified, the index buffer
     * will be initialized to zeros.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {boolean} [options.storage] - Defines if the index buffer can be used as a storage
     * buffer by a compute shader. Defaults to false. Only supported on WebGPU.
     * @example
     * // Create an index buffer holding 3 16-bit indices. The buffer is marked as
     * // static, hinting that the buffer will never be modified.
     * const indices = new UInt16Array([0, 1, 2]);
     * const indexBuffer = new pc.IndexBuffer(graphicsDevice,
     *                                        pc.INDEXFORMAT_UINT16,
     *                                        3,
     *                                        pc.BUFFER_STATIC,
     *                                        indices);
     */
    constructor(graphicsDevice: GraphicsDevice, format: number, numIndices: number, usage?: number, initialData?: ArrayBuffer, options?: {
        storage?: boolean;
    });
    device: GraphicsDevice;
    format: number;
    numIndices: number;
    usage: number;
    id: number;
    impl: any;
    bytesPerIndex: number;
    numBytes: number;
    storage: ArrayBuffer;
    /**
     * Frees resources associated with this index buffer.
     */
    destroy(): void;
    adjustVramSizeTracking(vram: any, size: any): void;
    /**
     * Called when the rendering context was lost. It releases all context related resources.
     *
     * @ignore
     */
    loseContext(): void;
    /**
     * Returns the data format of the specified index buffer.
     *
     * @returns {number} The data format of the specified index buffer. Can be:
     *
     * - {@link INDEXFORMAT_UINT8}
     * - {@link INDEXFORMAT_UINT16}
     * - {@link INDEXFORMAT_UINT32}
     */
    getFormat(): number;
    /**
     * Returns the number of indices stored in the specified index buffer.
     *
     * @returns {number} The number of indices stored in the specified index buffer.
     */
    getNumIndices(): number;
    /**
     * Gives access to the block of memory that stores the buffer's indices.
     *
     * @returns {ArrayBuffer} A contiguous block of memory where index data can be written to.
     */
    lock(): ArrayBuffer;
    /**
     * Signals that the block of memory returned by a call to the lock function is ready to be
     * given to the graphics hardware. Only unlocked index buffers can be set on the currently
     * active device.
     */
    unlock(): void;
    /**
     * Set preallocated data on the index buffer.
     *
     * @param {ArrayBuffer} data - The index data to set.
     * @returns {boolean} True if the data was set successfully, false otherwise.
     * @ignore
     */
    setData(data: ArrayBuffer): boolean;
    /**
     * Get the appropriate typed array from an index buffer.
     *
     * @returns {Uint8Array|Uint16Array|Uint32Array} The typed array containing the index data.
     * @private
     */
    private _lockTypedArray;
    /**
     * Copies the specified number of elements from data into index buffer. Optimized for
     * performance from both typed array as well as array.
     *
     * @param {Uint8Array|Uint16Array|Uint32Array|number[]} data - The data to write.
     * @param {number} count - The number of indices to write.
     * @ignore
     */
    writeData(data: Uint8Array | Uint16Array | Uint32Array | number[], count: number): void;
    /**
     * Copies index data from index buffer into provided data array.
     *
     * @param {Uint8Array|Uint16Array|Uint32Array|number[]} data - The data array to write to.
     * @returns {number} The number of indices read.
     * @ignore
     */
    readData(data: Uint8Array | Uint16Array | Uint32Array | number[]): number;
}

/**
 * A 3-dimensional vector. Vec3 is commonly used to represent 3D positions, directions, Euler
 * angles or scales.
 *
 * @category Math
 */
declare class Vec3 {
    /**
     * A constant vector set to [0, 0, 0].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly ZERO: Vec3;
    /**
     * A constant vector set to [0.5, 0.5, 0.5].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly HALF: Vec3;
    /**
     * A constant vector set to [1, 1, 1].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly ONE: Vec3;
    /**
     * A constant vector set to [0, 1, 0].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly UP: Vec3;
    /**
     * A constant vector set to [0, -1, 0].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly DOWN: Vec3;
    /**
     * A constant vector set to [1, 0, 0].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly RIGHT: Vec3;
    /**
     * A constant vector set to [-1, 0, 0].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly LEFT: Vec3;
    /**
     * A constant vector set to [0, 0, -1].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly FORWARD: Vec3;
    /**
     * A constant vector set to [0, 0, 1].
     *
     * @type {Vec3}
     * @readonly
     */
    static readonly BACK: Vec3;
    /**
     * Creates a new Vec3 instance.
     *
     * @overload
     * @param {number} [x] - The x value. Defaults to 0.
     * @param {number} [y] - The y value. Defaults to 0.
     * @param {number} [z] - The z value. Defaults to 0.
     * @example
     * const v1 = new pc.Vec3(); // defaults to 0, 0, 0
     * const v2 = new pc.Vec3(1, 2, 3);
     */
    constructor(x?: number, y?: number, z?: number);
    /**
     * Creates a new Vec3 instance.
     *
     * @overload
     * @param {number[]} arr - The array to set the vector values from.
     * @example
     * const v = new pc.Vec3([1, 2, 3]);
     */
    constructor(arr: number[]);
    /**
     * The first component of the vector.
     *
     * @type {number}
     */
    x: number;
    /**
     * The second component of the vector.
     *
     * @type {number}
     */
    y: number;
    /**
     * The third component of the vector.
     *
     * @type {number}
     */
    z: number;
    /**
     * Adds a 3-dimensional vector to another in place.
     *
     * @param {Vec3} rhs - The vector to add to the specified vector.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(10, 10, 10);
     * const b = new pc.Vec3(20, 20, 20);
     *
     * a.add(b);
     *
     * // Outputs [30, 30, 30]
     * console.log("The result of the addition is: " + a.toString());
     */
    add(rhs: Vec3): Vec3;
    /**
     * Adds two 3-dimensional vectors together and returns the result.
     *
     * @param {Vec3} lhs - The first vector operand for the addition.
     * @param {Vec3} rhs - The second vector operand for the addition.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(10, 10, 10);
     * const b = new pc.Vec3(20, 20, 20);
     * const r = new pc.Vec3();
     *
     * r.add2(a, b);
     * // Outputs [30, 30, 30]
     *
     * console.log("The result of the addition is: " + r.toString());
     */
    add2(lhs: Vec3, rhs: Vec3): Vec3;
    /**
     * Adds a number to each element of a vector.
     *
     * @param {number} scalar - The number to add.
     * @returns {Vec3} Self for chaining.
     * @example
     * const vec = new pc.Vec3(3, 4, 5);
     *
     * vec.addScalar(2);
     *
     * // Outputs [5, 6, 7]
     * console.log("The result of the addition is: " + vec.toString());
     */
    addScalar(scalar: number): Vec3;
    /**
     * Adds a 3-dimensional vector scaled by scalar value. Does not modify the vector being added.
     *
     * @param {Vec3} rhs - The vector to add to the specified vector.
     * @param {number} scalar - The number to multiply the added vector with.
     * @returns {Vec3} Self for chaining.
     * @example
     * const vec = new pc.Vec3(1, 2, 3);
     *
     * vec.addScaled(pc.Vec3.UP, 2);
     *
     * // Outputs [1, 4, 3]
     * console.log("The result of the addition is: " + vec.toString());
     */
    addScaled(rhs: Vec3, scalar: number): Vec3;
    /**
     * Returns an identical copy of the specified 3-dimensional vector.
     *
     * @returns {this} A 3-dimensional vector containing the result of the cloning.
     * @example
     * const v = new pc.Vec3(10, 20, 30);
     * const vclone = v.clone();
     * console.log("The result of the cloning is: " + vclone.toString());
     */
    clone(): this;
    /**
     * Copies the contents of a source 3-dimensional vector to a destination 3-dimensional vector.
     *
     * @param {Vec3} rhs - A vector to copy to the specified vector.
     * @returns {Vec3} Self for chaining.
     * @example
     * const src = new pc.Vec3(10, 20, 30);
     * const dst = new pc.Vec3();
     *
     * dst.copy(src);
     *
     * console.log("The two vectors are " + (dst.equals(src) ? "equal" : "different"));
     */
    copy(rhs: Vec3): Vec3;
    /**
     * Returns the result of a cross product operation performed on the two specified 3-dimensional
     * vectors.
     *
     * @param {Vec3} lhs - The first 3-dimensional vector operand of the cross product.
     * @param {Vec3} rhs - The second 3-dimensional vector operand of the cross product.
     * @returns {Vec3} Self for chaining.
     * @example
     * const back = new pc.Vec3().cross(pc.Vec3.RIGHT, pc.Vec3.UP);
     *
     * // Prints the Z axis (i.e. [0, 0, 1])
     * console.log("The result of the cross product is: " + back.toString());
     */
    cross(lhs: Vec3, rhs: Vec3): Vec3;
    /**
     * Returns the distance between the two specified 3-dimensional vectors.
     *
     * @param {Vec3} rhs - The second 3-dimensional vector to test.
     * @returns {number} The distance between the two vectors.
     * @example
     * const v1 = new pc.Vec3(5, 10, 20);
     * const v2 = new pc.Vec3(10, 20, 40);
     * const d = v1.distance(v2);
     * console.log("The distance between v1 and v2 is: " + d);
     */
    distance(rhs: Vec3): number;
    /**
     * Divides a 3-dimensional vector by another in place.
     *
     * @param {Vec3} rhs - The vector to divide the specified vector by.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(4, 9, 16);
     * const b = new pc.Vec3(2, 3, 4);
     *
     * a.div(b);
     *
     * // Outputs [2, 3, 4]
     * console.log("The result of the division is: " + a.toString());
     */
    div(rhs: Vec3): Vec3;
    /**
     * Divides one 3-dimensional vector by another and writes the result to the specified vector.
     *
     * @param {Vec3} lhs - The dividend vector (the vector being divided).
     * @param {Vec3} rhs - The divisor vector (the vector dividing the dividend).
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(4, 9, 16);
     * const b = new pc.Vec3(2, 3, 4);
     * const r = new pc.Vec3();
     *
     * r.div2(a, b);
     *
     * // Outputs [2, 3, 4]
     * console.log("The result of the division is: " + r.toString());
     */
    div2(lhs: Vec3, rhs: Vec3): Vec3;
    /**
     * Divides each element of a vector by a number.
     *
     * @param {number} scalar - The number to divide by.
     * @returns {Vec3} Self for chaining.
     * @example
     * const vec = new pc.Vec3(3, 6, 9);
     *
     * vec.divScalar(3);
     *
     * // Outputs [1, 2, 3]
     * console.log("The result of the division is: " + vec.toString());
     */
    divScalar(scalar: number): Vec3;
    /**
     * Returns the result of a dot product operation performed on the two specified 3-dimensional
     * vectors.
     *
     * @param {Vec3} rhs - The second 3-dimensional vector operand of the dot product.
     * @returns {number} The result of the dot product operation.
     * @example
     * const v1 = new pc.Vec3(5, 10, 20);
     * const v2 = new pc.Vec3(10, 20, 40);
     * const v1dotv2 = v1.dot(v2);
     * console.log("The result of the dot product is: " + v1dotv2);
     */
    dot(rhs: Vec3): number;
    /**
     * Reports whether two vectors are equal.
     *
     * @param {Vec3} rhs - The vector to compare to the specified vector.
     * @returns {boolean} True if the vectors are equal and false otherwise.
     * @example
     * const a = new pc.Vec3(1, 2, 3);
     * const b = new pc.Vec3(4, 5, 6);
     * console.log("The two vectors are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Vec3): boolean;
    /**
     * Reports whether two vectors are equal using an absolute error tolerance.
     *
     * @param {Vec3} rhs - The vector to be compared against.
     * @param {number} [epsilon] - The maximum difference between each component of the two
     * vectors. Defaults to 1e-6.
     * @returns {boolean} True if the vectors are equal and false otherwise.
     * @example
     * const a = new pc.Vec3();
     * const b = new pc.Vec3();
     * console.log("The two vectors are approximately " + (a.equalsApprox(b, 1e-9) ? "equal" : "different"));
     */
    equalsApprox(rhs: Vec3, epsilon?: number): boolean;
    /**
     * Returns the magnitude of the specified 3-dimensional vector.
     *
     * @returns {number} The magnitude of the specified 3-dimensional vector.
     * @example
     * const vec = new pc.Vec3(3, 4, 0);
     * const len = vec.length();
     * // Outputs 5
     * console.log("The length of the vector is: " + len);
     */
    length(): number;
    /**
     * Returns the magnitude squared of the specified 3-dimensional vector.
     *
     * @returns {number} The magnitude squared of the specified 3-dimensional vector.
     * @example
     * const vec = new pc.Vec3(3, 4, 0);
     * const len = vec.lengthSq();
     * // Outputs 25
     * console.log("The length squared of the vector is: " + len);
     */
    lengthSq(): number;
    /**
     * Returns the result of a linear interpolation between two specified 3-dimensional vectors.
     *
     * @param {Vec3} lhs - The 3-dimensional vector to interpolate from.
     * @param {Vec3} rhs - The 3-dimensional vector to interpolate to.
     * @param {number} alpha - The value controlling the point of interpolation. Between 0 and 1,
     * the linear interpolant will occur on a straight line between lhs and rhs. Outside of this
     * range, the linear interpolant will occur on a ray extrapolated from this line.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(0, 0, 0);
     * const b = new pc.Vec3(10, 10, 10);
     * const r = new pc.Vec3();
     *
     * r.lerp(a, b, 0);   // r is equal to a
     * r.lerp(a, b, 0.5); // r is 5, 5, 5
     * r.lerp(a, b, 1);   // r is equal to b
     */
    lerp(lhs: Vec3, rhs: Vec3, alpha: number): Vec3;
    /**
     * Multiplies a 3-dimensional vector to another in place.
     *
     * @param {Vec3} rhs - The 3-dimensional vector used as the second multiplicand of the operation.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(2, 3, 4);
     * const b = new pc.Vec3(4, 5, 6);
     *
     * a.mul(b);
     *
     * // Outputs [8, 15, 24]
     * console.log("The result of the multiplication is: " + a.toString());
     */
    mul(rhs: Vec3): Vec3;
    /**
     * Returns the result of multiplying the specified 3-dimensional vectors together.
     *
     * @param {Vec3} lhs - The 3-dimensional vector used as the first multiplicand of the operation.
     * @param {Vec3} rhs - The 3-dimensional vector used as the second multiplicand of the operation.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(2, 3, 4);
     * const b = new pc.Vec3(4, 5, 6);
     * const r = new pc.Vec3();
     *
     * r.mul2(a, b);
     *
     * // Outputs [8, 15, 24]
     * console.log("The result of the multiplication is: " + r.toString());
     */
    mul2(lhs: Vec3, rhs: Vec3): Vec3;
    /**
     * Multiplies each element of a vector by a number.
     *
     * @param {number} scalar - The number to multiply by.
     * @returns {Vec3} Self for chaining.
     * @example
     * const vec = new pc.Vec3(3, 6, 9);
     *
     * vec.mulScalar(3);
     *
     * // Outputs [9, 18, 27]
     * console.log("The result of the multiplication is: " + vec.toString());
     */
    mulScalar(scalar: number): Vec3;
    /**
     * Returns this 3-dimensional vector converted to a unit vector in place. If the vector has a
     * length of zero, the vector's elements will be set to zero.
     *
     * @param {Vec3} [src] - The vector to normalize. If not set, the operation is done in place.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3(25, 0, 0);
     *
     * v.normalize();
     *
     * // Outputs [1, 0, 0]
     * console.log("The result of the vector normalization is: " + v.toString());
     */
    normalize(src?: Vec3): Vec3;
    /**
     * Each element is set to the largest integer less than or equal to its value.
     *
     * @param {Vec3} [src] - The vector to floor. If not set, the operation is done in place.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3(1.2, 3.9, 5.5);
     * v.floor();
     * // v is now [1, 3, 5]
     */
    floor(src?: Vec3): Vec3;
    /**
     * Each element is rounded up to the next largest integer.
     *
     * @param {Vec3} [src] - The vector to ceil. If not set, the operation is done in place.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3(1.2, 3.1, 5.9);
     * v.ceil();
     * // v is now [2, 4, 6]
     */
    ceil(src?: Vec3): Vec3;
    /**
     * Each element is rounded up or down to the nearest integer.
     *
     * @param {Vec3} [src] - The vector to round. If not set, the operation is done in place.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3(1.4, 3.6, 5.5);
     * v.round();
     * // v is now [1, 4, 6]
     */
    round(src?: Vec3): Vec3;
    /**
     * Each element is assigned a value from rhs parameter if it is smaller.
     *
     * @param {Vec3} rhs - The 3-dimensional vector used as the source of elements to compare to.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(5, 1, 7);
     * const b = new pc.Vec3(2, 8, 3);
     * a.min(b);
     * // a is now [2, 1, 3]
     */
    min(rhs: Vec3): Vec3;
    /**
     * Each element is assigned a value from rhs parameter if it is larger.
     *
     * @param {Vec3} rhs - The 3-dimensional vector used as the source of elements to compare to.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(5, 1, 7);
     * const b = new pc.Vec3(2, 8, 3);
     * a.max(b);
     * // a is now [5, 8, 7]
     */
    max(rhs: Vec3): Vec3;
    /**
     * Projects this 3-dimensional vector onto the specified vector.
     *
     * @param {Vec3} rhs - The vector onto which the original vector will be projected on.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3(5, 5, 5);
     * const normal = new pc.Vec3(1, 0, 0);
     *
     * v.project(normal);
     *
     * // Outputs [5, 0, 0]
     * console.log("The result of the vector projection is: " + v.toString());
     */
    project(rhs: Vec3): Vec3;
    /**
     * Sets the specified 3-dimensional vector to the supplied numerical values.
     *
     * @param {number} x - The value to set on the first component of the vector.
     * @param {number} y - The value to set on the second component of the vector.
     * @param {number} z - The value to set on the third component of the vector.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3();
     * v.set(5, 10, 20);
     *
     * // Outputs [5, 10, 20]
     * console.log("The result of the vector set is: " + v.toString());
     */
    set(x: number, y: number, z: number): Vec3;
    /**
     * Subtracts a 3-dimensional vector from another in place.
     *
     * @param {Vec3} rhs - The vector to subtract from the specified vector.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(10, 10, 10);
     * const b = new pc.Vec3(20, 20, 20);
     *
     * a.sub(b);
     *
     * // Outputs [-10, -10, -10]
     * console.log("The result of the subtraction is: " + a.toString());
     */
    sub(rhs: Vec3): Vec3;
    /**
     * Subtracts two 3-dimensional vectors from one another and returns the result.
     *
     * @param {Vec3} lhs - The first vector operand for the subtraction.
     * @param {Vec3} rhs - The second vector operand for the subtraction.
     * @returns {Vec3} Self for chaining.
     * @example
     * const a = new pc.Vec3(10, 10, 10);
     * const b = new pc.Vec3(20, 20, 20);
     * const r = new pc.Vec3();
     *
     * r.sub2(a, b);
     *
     * // Outputs [-10, -10, -10]
     * console.log("The result of the subtraction is: " + r.toString());
     */
    sub2(lhs: Vec3, rhs: Vec3): Vec3;
    /**
     * Subtracts a number from each element of a vector.
     *
     * @param {number} scalar - The number to subtract.
     * @returns {Vec3} Self for chaining.
     * @example
     * const vec = new pc.Vec3(3, 4, 5);
     *
     * vec.subScalar(2);
     *
     * // Outputs [1, 2, 3]
     * console.log("The result of the subtraction is: " + vec.toString());
     */
    subScalar(scalar: number): Vec3;
    /**
     * Set the values of the vector from an array.
     *
     * @param {number[]|ArrayBufferView} arr - The array to set the vector values from.
     * @param {number} [offset] - The zero-based index at which to start copying elements from the
     * array. Default is 0.
     * @returns {Vec3} Self for chaining.
     * @example
     * const v = new pc.Vec3();
     * v.fromArray([20, 10, 5]);
     * // v is set to [20, 10, 5]
     */
    fromArray(arr: number[] | ArrayBufferView, offset?: number): Vec3;
    /**
     * Converts the vector to string form.
     *
     * @returns {string} The vector in string form.
     * @example
     * const v = new pc.Vec3(20, 10, 5);
     * // Outputs [20, 10, 5]
     * console.log(v.toString());
     */
    toString(): string;
    /**
     * @overload
     * @param {number[]} [arr] - The array to populate with the vector's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {number[]} The vector as an array.
     */
    toArray(arr?: number[], offset?: number): number[];
    /**
     * @overload
     * @param {ArrayBufferView} arr - The array to populate with the vector's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {ArrayBufferView} The vector as an array.
     */
    toArray(arr: ArrayBufferView, offset?: number): ArrayBufferView;
}

/**
 * A 4-dimensional vector. Vec4 is commonly used to represent homogeneous coordinates or shader
 * uniforms requiring four components.
 *
 * @category Math
 */
declare class Vec4 {
    /**
     * A constant vector set to [0, 0, 0, 0].
     *
     * @type {Vec4}
     * @readonly
     */
    static readonly ZERO: Vec4;
    /**
     * A constant vector set to [0.5, 0.5, 0.5, 0.5].
     *
     * @type {Vec4}
     * @readonly
     */
    static readonly HALF: Vec4;
    /**
     * A constant vector set to [1, 1, 1, 1].
     *
     * @type {Vec4}
     * @readonly
     */
    static readonly ONE: Vec4;
    /**
     * Creates a new Vec4 instance.
     *
     * @overload
     * @param {number} [x] - The x value. Defaults to 0.
     * @param {number} [y] - The y value. Defaults to 0.
     * @param {number} [z] - The z value. Defaults to 0.
     * @param {number} [w] - The w value. Defaults to 0.
     * @example
     * const v1 = new pc.Vec4(); // defaults to 0, 0, 0, 0
     * const v2 = new pc.Vec4(1, 2, 3, 4);
     */
    constructor(x?: number, y?: number, z?: number, w?: number);
    /**
     * Creates a new Vec4 instance.
     *
     * @overload
     * @param {number[]} arr - The array to set the vector values from.
     * @example
     * const v = new pc.Vec4([1, 2, 3, 4]);
     */
    constructor(arr: number[]);
    /**
     * The first component of the vector.
     *
     * @type {number}
     */
    x: number;
    /**
     * The second component of the vector.
     *
     * @type {number}
     */
    y: number;
    /**
     * The third component of the vector.
     *
     * @type {number}
     */
    z: number;
    /**
     * The fourth component of the vector.
     *
     * @type {number}
     */
    w: number;
    /**
     * Adds a 4-dimensional vector to another in place.
     *
     * @param {Vec4} rhs - The vector to add to the specified vector.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(10, 10, 10, 10);
     * const b = new pc.Vec4(20, 20, 20, 20);
     *
     * a.add(b);
     *
     * // Outputs [30, 30, 30, 30]
     * console.log("The result of the addition is: " + a.toString());
     */
    add(rhs: Vec4): Vec4;
    /**
     * Adds two 4-dimensional vectors together and returns the result.
     *
     * @param {Vec4} lhs - The first vector operand for the addition.
     * @param {Vec4} rhs - The second vector operand for the addition.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(10, 10, 10, 10);
     * const b = new pc.Vec4(20, 20, 20, 20);
     * const r = new pc.Vec4();
     *
     * r.add2(a, b);
     * // Outputs [30, 30, 30, 30]
     *
     * console.log("The result of the addition is: " + r.toString());
     */
    add2(lhs: Vec4, rhs: Vec4): Vec4;
    /**
     * Adds a number to each element of a vector.
     *
     * @param {number} scalar - The number to add.
     * @returns {Vec4} Self for chaining.
     * @example
     * const vec = new pc.Vec4(3, 4, 5, 6);
     *
     * vec.addScalar(2);
     *
     * // Outputs [5, 6, 7, 8]
     * console.log("The result of the addition is: " + vec.toString());
     */
    addScalar(scalar: number): Vec4;
    /**
     * Adds a 4-dimensional vector scaled by scalar value. Does not modify the vector being added.
     *
     * @param {Vec4} rhs - The vector to add to the specified vector.
     * @param {number} scalar - The number to multiply the added vector with.
     * @returns {Vec4} Self for chaining.
     * @example
     * const vec = new pc.Vec4(1, 2, 3, 4);
     *
     * vec.addScaled(pc.Vec4.ONE, 2);
     *
     * // Outputs [3, 4, 5, 6]
     * console.log("The result of the addition is: " + vec.toString());
     */
    addScaled(rhs: Vec4, scalar: number): Vec4;
    /**
     * Returns an identical copy of the specified 4-dimensional vector.
     *
     * @returns {this} A 4-dimensional vector containing the result of the cloning.
     * @example
     * const v = new pc.Vec4(10, 20, 30, 40);
     * const vclone = v.clone();
     * console.log("The result of the cloning is: " + vclone.toString());
     */
    clone(): this;
    /**
     * Copies the contents of a source 4-dimensional vector to a destination 4-dimensional vector.
     *
     * @param {Vec4} rhs - A vector to copy to the specified vector.
     * @returns {Vec4} Self for chaining.
     * @example
     * const src = new pc.Vec4(10, 20, 30, 40);
     * const dst = new pc.Vec4();
     *
     * dst.copy(src);
     *
     * console.log("The two vectors are " + (dst.equals(src) ? "equal" : "different"));
     */
    copy(rhs: Vec4): Vec4;
    /**
     * Divides a 4-dimensional vector by another in place.
     *
     * @param {Vec4} rhs - The vector to divide the specified vector by.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(4, 9, 16, 25);
     * const b = new pc.Vec4(2, 3, 4, 5);
     *
     * a.div(b);
     *
     * // Outputs [2, 3, 4, 5]
     * console.log("The result of the division is: " + a.toString());
     */
    div(rhs: Vec4): Vec4;
    /**
     * Divides one 4-dimensional vector by another and writes the result to the specified vector.
     *
     * @param {Vec4} lhs - The dividend vector (the vector being divided).
     * @param {Vec4} rhs - The divisor vector (the vector dividing the dividend).
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(4, 9, 16, 25);
     * const b = new pc.Vec4(2, 3, 4, 5);
     * const r = new pc.Vec4();
     *
     * r.div2(a, b);
     *
     * // Outputs [2, 3, 4, 5]
     * console.log("The result of the division is: " + r.toString());
     */
    div2(lhs: Vec4, rhs: Vec4): Vec4;
    /**
     * Divides each element of a vector by a number.
     *
     * @param {number} scalar - The number to divide by.
     * @returns {Vec4} Self for chaining.
     * @example
     * const vec = new pc.Vec4(3, 6, 9, 12);
     *
     * vec.divScalar(3);
     *
     * // Outputs [1, 2, 3, 4]
     * console.log("The result of the division is: " + vec.toString());
     */
    divScalar(scalar: number): Vec4;
    /**
     * Returns the result of a dot product operation performed on the two specified 4-dimensional
     * vectors.
     *
     * @param {Vec4} rhs - The second 4-dimensional vector operand of the dot product.
     * @returns {number} The result of the dot product operation.
     * @example
     * const v1 = new pc.Vec4(5, 10, 20, 40);
     * const v2 = new pc.Vec4(10, 20, 40, 80);
     * const v1dotv2 = v1.dot(v2);
     * console.log("The result of the dot product is: " + v1dotv2);
     */
    dot(rhs: Vec4): number;
    /**
     * Reports whether two vectors are equal.
     *
     * @param {Vec4} rhs - The vector to compare to the specified vector.
     * @returns {boolean} True if the vectors are equal and false otherwise.
     * @example
     * const a = new pc.Vec4(1, 2, 3, 4);
     * const b = new pc.Vec4(5, 6, 7, 8);
     * console.log("The two vectors are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Vec4): boolean;
    /**
     * Reports whether two vectors are equal using an absolute error tolerance.
     *
     * @param {Vec4} rhs - The vector to be compared against.
     * @param {number} [epsilon] - The maximum difference between each component of the two
     * vectors. Defaults to 1e-6.
     * @returns {boolean} True if the vectors are equal and false otherwise.
     * @example
     * const a = new pc.Vec4();
     * const b = new pc.Vec4();
     * console.log("The two vectors are approximately " + (a.equalsApprox(b, 1e-9) ? "equal" : "different"));
     */
    equalsApprox(rhs: Vec4, epsilon?: number): boolean;
    /**
     * Returns the magnitude of the specified 4-dimensional vector.
     *
     * @returns {number} The magnitude of the specified 4-dimensional vector.
     * @example
     * const vec = new pc.Vec4(3, 4, 0, 0);
     * const len = vec.length();
     * // Outputs 5
     * console.log("The length of the vector is: " + len);
     */
    length(): number;
    /**
     * Returns the magnitude squared of the specified 4-dimensional vector.
     *
     * @returns {number} The magnitude squared of the specified 4-dimensional vector.
     * @example
     * const vec = new pc.Vec4(3, 4, 0, 0);
     * const len = vec.lengthSq();
     * // Outputs 25
     * console.log("The length squared of the vector is: " + len);
     */
    lengthSq(): number;
    /**
     * Returns the result of a linear interpolation between two specified 4-dimensional vectors.
     *
     * @param {Vec4} lhs - The 4-dimensional vector to interpolate from.
     * @param {Vec4} rhs - The 4-dimensional vector to interpolate to.
     * @param {number} alpha - The value controlling the point of interpolation. Between 0 and 1,
     * the linear interpolant will occur on a straight line between lhs and rhs. Outside of this
     * range, the linear interpolant will occur on a ray extrapolated from this line.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(0, 0, 0, 0);
     * const b = new pc.Vec4(10, 10, 10, 10);
     * const r = new pc.Vec4();
     *
     * r.lerp(a, b, 0);   // r is equal to a
     * r.lerp(a, b, 0.5); // r is 5, 5, 5, 5
     * r.lerp(a, b, 1);   // r is equal to b
     */
    lerp(lhs: Vec4, rhs: Vec4, alpha: number): Vec4;
    /**
     * Multiplies a 4-dimensional vector to another in place.
     *
     * @param {Vec4} rhs - The 4-dimensional vector used as the second multiplicand of the operation.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(2, 3, 4, 5);
     * const b = new pc.Vec4(4, 5, 6, 7);
     *
     * a.mul(b);
     *
     * // Outputs 8, 15, 24, 35
     * console.log("The result of the multiplication is: " + a.toString());
     */
    mul(rhs: Vec4): Vec4;
    /**
     * Returns the result of multiplying the specified 4-dimensional vectors together.
     *
     * @param {Vec4} lhs - The 4-dimensional vector used as the first multiplicand of the operation.
     * @param {Vec4} rhs - The 4-dimensional vector used as the second multiplicand of the operation.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(2, 3, 4, 5);
     * const b = new pc.Vec4(4, 5, 6, 7);
     * const r = new pc.Vec4();
     *
     * r.mul2(a, b);
     *
     * // Outputs 8, 15, 24, 35
     * console.log("The result of the multiplication is: " + r.toString());
     */
    mul2(lhs: Vec4, rhs: Vec4): Vec4;
    /**
     * Multiplies each element of a vector by a number.
     *
     * @param {number} scalar - The number to multiply by.
     * @returns {Vec4} Self for chaining.
     * @example
     * const vec = new pc.Vec4(3, 6, 9, 12);
     *
     * vec.mulScalar(3);
     *
     * // Outputs [9, 18, 27, 36]
     * console.log("The result of the multiplication is: " + vec.toString());
     */
    mulScalar(scalar: number): Vec4;
    /**
     * Returns this 4-dimensional vector converted to a unit vector in place. If the vector has a
     * length of zero, the vector's elements will be set to zero.
     *
     * @param {Vec4} [src] - The vector to normalize. If not set, the operation is done in place.
     * @returns {Vec4} Self for chaining.
     * @example
     * const v = new pc.Vec4(25, 0, 0, 0);
     *
     * v.normalize();
     *
     * // Outputs 1, 0, 0, 0
     * console.log("The result of the vector normalization is: " + v.toString());
     */
    normalize(src?: Vec4): Vec4;
    /**
     * Each element is set to the largest integer less than or equal to its value.
     *
     * @param {Vec4} [src] - The vector to floor. If not set, the operation is done in place.
     * @returns {Vec4} Self for chaining.
     * @example
     * const v = new pc.Vec4(1.2, 3.9, 5.5, 7.8);
     * v.floor();
     * // v is now [1, 3, 5, 7]
     */
    floor(src?: Vec4): Vec4;
    /**
     * Each element is rounded up to the next largest integer.
     *
     * @param {Vec4} [src] - The vector to ceil. If not set, the operation is done in place.
     * @returns {Vec4} Self for chaining.
     * @example
     * const v = new pc.Vec4(1.2, 3.1, 5.9, 7.4);
     * v.ceil();
     * // v is now [2, 4, 6, 8]
     */
    ceil(src?: Vec4): Vec4;
    /**
     * Each element is rounded up or down to the nearest integer.
     *
     * @param {Vec4} [src] - The vector to round. If not set, the operation is done in place.
     * @returns {Vec4} Self for chaining.
     * @example
     * const v = new pc.Vec4(1.4, 3.6, 5.5, 7.2);
     * v.round();
     * // v is now [1, 4, 6, 7]
     */
    round(src?: Vec4): Vec4;
    /**
     * Each element is assigned a value from rhs parameter if it is smaller.
     *
     * @param {Vec4} rhs - The 4-dimensional vector used as the source of elements to compare to.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(5, 1, 7, 3);
     * const b = new pc.Vec4(2, 8, 3, 9);
     * a.min(b);
     * // a is now [2, 1, 3, 3]
     */
    min(rhs: Vec4): Vec4;
    /**
     * Each element is assigned a value from rhs parameter if it is larger.
     *
     * @param {Vec4} rhs - The 4-dimensional vector used as the source of elements to compare to.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(5, 1, 7, 3);
     * const b = new pc.Vec4(2, 8, 3, 9);
     * a.max(b);
     * // a is now [5, 8, 7, 9]
     */
    max(rhs: Vec4): Vec4;
    /**
     * Sets the specified 4-dimensional vector to the supplied numerical values.
     *
     * @param {number} x - The value to set on the first component of the vector.
     * @param {number} y - The value to set on the second component of the vector.
     * @param {number} z - The value to set on the third component of the vector.
     * @param {number} w - The value to set on the fourth component of the vector.
     * @returns {Vec4} Self for chaining.
     * @example
     * const v = new pc.Vec4();
     * v.set(5, 10, 20, 40);
     *
     * // Outputs 5, 10, 20, 40
     * console.log("The result of the vector set is: " + v.toString());
     */
    set(x: number, y: number, z: number, w: number): Vec4;
    /**
     * Subtracts a 4-dimensional vector from another in place.
     *
     * @param {Vec4} rhs - The vector to subtract from the specified vector.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(10, 10, 10, 10);
     * const b = new pc.Vec4(20, 20, 20, 20);
     *
     * a.sub(b);
     *
     * // Outputs [-10, -10, -10, -10]
     * console.log("The result of the subtraction is: " + a.toString());
     */
    sub(rhs: Vec4): Vec4;
    /**
     * Subtracts two 4-dimensional vectors from one another and returns the result.
     *
     * @param {Vec4} lhs - The first vector operand for the subtraction.
     * @param {Vec4} rhs - The second vector operand for the subtraction.
     * @returns {Vec4} Self for chaining.
     * @example
     * const a = new pc.Vec4(10, 10, 10, 10);
     * const b = new pc.Vec4(20, 20, 20, 20);
     * const r = new pc.Vec4();
     *
     * r.sub2(a, b);
     *
     * // Outputs [-10, -10, -10, -10]
     * console.log("The result of the subtraction is: " + r.toString());
     */
    sub2(lhs: Vec4, rhs: Vec4): Vec4;
    /**
     * Subtracts a number from each element of a vector.
     *
     * @param {number} scalar - The number to subtract.
     * @returns {Vec4} Self for chaining.
     * @example
     * const vec = new pc.Vec4(3, 4, 5, 6);
     *
     * vec.subScalar(2);
     *
     * // Outputs [1, 2, 3, 4]
     * console.log("The result of the subtraction is: " + vec.toString());
     */
    subScalar(scalar: number): Vec4;
    /**
     * Set the values of the vector from an array.
     *
     * @param {number[]|ArrayBufferView} arr - The array to set the vector values from.
     * @param {number} [offset] - The zero-based index at which to start copying elements from the
     * array. Default is 0.
     * @returns {Vec4} Self for chaining.
     * @example
     * const v = new pc.Vec4();
     * v.fromArray([20, 10, 5, 0]);
     * // v is set to [20, 10, 5, 0]
     */
    fromArray(arr: number[] | ArrayBufferView, offset?: number): Vec4;
    /**
     * Converts the vector to string form.
     *
     * @returns {string} The vector in string form.
     * @example
     * const v = new pc.Vec4(20, 10, 5, 0);
     * // Outputs [20, 10, 5, 0]
     * console.log(v.toString());
     */
    toString(): string;
    /**
     * @overload
     * @param {number[]} [arr] - The array to populate with the vector's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {number[]} The vector as an array.
     */
    toArray(arr?: number[], offset?: number): number[];
    /**
     * @overload
     * @param {ArrayBufferView} arr - The array to populate with the vector's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {ArrayBufferView} The vector as an array.
     */
    toArray(arr: ArrayBufferView, offset?: number): ArrayBufferView;
}

/**
 * @import { Mat4 } from './mat4.js'
 */
/**
 * A quaternion representing rotation in 3D space. Quaternions are typically used to represent
 * rotations in 3D applications, offering advantages over Euler angles including no gimbal lock and
 * more efficient interpolation.
 *
 * @category Math
 */
declare class Quat {
    /**
     * A constant quaternion set to [0, 0, 0, 1] (the identity). Represents no rotation.
     *
     * @type {Quat}
     * @readonly
     */
    static readonly IDENTITY: Quat;
    /**
     * A constant quaternion set to [0, 0, 0, 0].
     *
     * @type {Quat}
     * @readonly
     */
    static readonly ZERO: Quat;
    /**
     * Creates a new Quat instance.
     *
     * @overload
     * @param {number} [x] - The x value. Defaults to 0.
     * @param {number} [y] - The y value. Defaults to 0.
     * @param {number} [z] - The z value. Defaults to 0.
     * @param {number} [w] - The w value. Defaults to 1.
     * @example
     * const q1 = new pc.Quat(); // defaults to 0, 0, 0, 1
     * const q2 = new pc.Quat(1, 2, 3, 4);
     */
    constructor(x?: number, y?: number, z?: number, w?: number);
    /**
     * Creates a new Quat instance.
     *
     * @overload
     * @param {number[]} arr - The array to set the quaternion values from.
     * @example
     * const q = new pc.Quat([1, 2, 3, 4]);
     */
    constructor(arr: number[]);
    /**
     * The x component of the quaternion.
     *
     * @type {number}
     */
    x: number;
    /**
     * The y component of the quaternion.
     *
     * @type {number}
     */
    y: number;
    /**
     * The z component of the quaternion.
     *
     * @type {number}
     */
    z: number;
    /**
     * The w component of the quaternion.
     *
     * @type {number}
     */
    w: number;
    /**
     * Returns an identical copy of the specified quaternion.
     *
     * @returns {this} A new quaternion identical to this one.
     * @example
     * const q = new pc.Quat(-0.11, -0.15, -0.46, 0.87);
     * const qclone = q.clone();
     *
     * console.log("The result of the cloning is: " + qclone.toString());
     */
    clone(): this;
    /**
     * Conjugates a quaternion.
     *
     * @param {Quat} [src] - The quaternion to conjugate. If not set, the operation is done in place.
     * @returns {Quat} Self for chaining.
     * @example
     * const q = new pc.Quat(1, 2, 3, 4);
     * q.conjugate();
     * // q is now [-1, -2, -3, 4]
     * @ignore
     */
    conjugate(src?: Quat): Quat;
    /**
     * Copies the contents of a source quaternion to a destination quaternion.
     *
     * @param {Quat} rhs - The quaternion to be copied.
     * @returns {Quat} Self for chaining.
     * @example
     * const src = new pc.Quat();
     * const dst = new pc.Quat();
     * dst.copy(src);
     * console.log("The two quaternions are " + (src.equals(dst) ? "equal" : "different"));
     */
    copy(rhs: Quat): Quat;
    /**
     * Calculates the dot product of two quaternions.
     *
     * @param {Quat} other - The quaternion to calculate the dot product with.
     * @returns {number} The dot product of the two quaternions.
     * @example
     * const a = new pc.Quat(1, 0, 0, 0);
     * const b = new pc.Quat(0, 1, 0, 0);
     * console.log("Dot product: " + a.dot(b)); // Outputs 0
     */
    dot(other: Quat): number;
    /**
     * Reports whether two quaternions are equal.
     *
     * @param {Quat} rhs - The quaternion to be compared against.
     * @returns {boolean} True if the quaternions are equal and false otherwise.
     * @example
     * const a = new pc.Quat();
     * const b = new pc.Quat();
     * console.log("The two quaternions are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Quat): boolean;
    /**
     * Reports whether two quaternions are equal using an absolute error tolerance.
     *
     * @param {Quat} rhs - The quaternion to be compared against.
     * @param {number} [epsilon] - The maximum difference between each component of the two
     * quaternions. Defaults to 1e-6.
     * @returns {boolean} True if the quaternions are equal and false otherwise.
     * @example
     * const a = new pc.Quat();
     * const b = new pc.Quat();
     * console.log("The two quaternions are approximately " + (a.equalsApprox(b, 1e-9) ? "equal" : "different"));
     */
    equalsApprox(rhs: Quat, epsilon?: number): boolean;
    /**
     * Gets the rotation axis and angle for a given quaternion. If a quaternion is created with
     * `setFromAxisAngle`, this method will return the same values as provided in the original
     * parameter list OR functionally equivalent values.
     *
     * @param {Vec3} axis - The 3-dimensional vector to receive the axis of rotation.
     * @returns {number} Angle, in degrees, of the rotation.
     * @example
     * const q = new pc.Quat();
     * q.setFromAxisAngle(new pc.Vec3(0, 1, 0), 90);
     * const v = new pc.Vec3();
     * const angle = q.getAxisAngle(v);
     * // Outputs 90
     * console.log(angle);
     * // Outputs [0, 1, 0]
     * console.log(v.toString());
     */
    getAxisAngle(axis: Vec3): number;
    /**
     * Converts this quaternion to Euler angles, specified in degrees. The decomposition uses an
     * **intrinsic XYZ** order, representing the angles required to achieve the quaternion's
     * orientation by rotating sequentially: first around the X-axis, then around the newly
     * transformed Y-axis, and finally around the resulting Z-axis.
     *
     * @param {Vec3} [eulers] - An optional 3-dimensional vector to receive the calculated
     * Euler angles (output parameter). If not provided, a new Vec3 object will be allocated
     * and returned.
     * @returns {Vec3} The 3-dimensional vector holding the Euler angles in degrees. This will be
     * the same object passed in as the `eulers` parameter (if one was provided).
     * @example
     * const q = new pc.Quat();
     * q.setFromAxisAngle(pc.Vec3.UP, 90);
     * const e = new pc.Vec3();
     * q.getEulerAngles(e);
     * // Outputs [0, 90, 0]
     * console.log(e.toString());
     */
    getEulerAngles(eulers?: Vec3): Vec3;
    /**
     * Generates the inverse of the specified quaternion.
     *
     * @param {Quat} [src] - The quaternion to invert. If not set, the operation is done in place.
     * @returns {Quat} Self for chaining.
     * @example
     * // Create a quaternion rotated 180 degrees around the y-axis
     * const rot = new pc.Quat().setFromEulerAngles(0, 180, 0);
     *
     * // Invert in place
     * rot.invert();
     */
    invert(src?: Quat): Quat;
    /**
     * Returns the magnitude of the specified quaternion.
     *
     * @returns {number} The magnitude of the specified quaternion.
     * @example
     * const q = new pc.Quat(0, 0, 0, 5);
     * const len = q.length();
     * // Outputs 5
     * console.log("The length of the quaternion is: " + len);
     */
    length(): number;
    /**
     * Returns the magnitude squared of the specified quaternion.
     *
     * @returns {number} The magnitude squared of the quaternion.
     * @example
     * const q = new pc.Quat(3, 4, 0, 0);
     * const lenSq = q.lengthSq();
     * // Outputs 25
     * console.log("The length squared of the quaternion is: " + lenSq);
     */
    lengthSq(): number;
    /**
     * Performs a linear interpolation between two quaternions. The result of the interpolation
     * is written to the quaternion calling the function.
     *
     * @param {Quat} lhs - The quaternion to interpolate from.
     * @param {Quat} rhs - The quaternion to interpolate to.
     * @param {number} alpha - The value controlling the interpolation in relation to the two input
     * quaternions. The value is in the range 0 to 1, 0 generating q1, 1 generating q2 and anything
     * in between generating a linear interpolation between the two.
     * @returns {Quat} Self for chaining.
     * @example
     * const q1 = new pc.Quat(-0.11, -0.15, -0.46, 0.87);
     * const q2 = new pc.Quat(-0.21, -0.21, -0.67, 0.68);
     *
     * const result = new pc.Quat();
     * result.lerp(q1, q2, 0);   // Return q1
     * result.lerp(q1, q2, 0.5); // Return the midpoint interpolant
     * result.lerp(q1, q2, 1);   // Return q2
     */
    lerp(lhs: Quat, rhs: Quat, alpha: number): Quat;
    /**
     * Returns the result of multiplying the specified quaternions together.
     *
     * @param {Quat} rhs - The quaternion used as the second multiplicand of the operation.
     * @returns {Quat} Self for chaining.
     * @example
     * const a = new pc.Quat().setFromEulerAngles(0, 30, 0);
     * const b = new pc.Quat().setFromEulerAngles(0, 60, 0);
     *
     * // a becomes a 90 degree rotation around the Y axis
     * // In other words, a = a * b
     * a.mul(b);
     *
     * console.log("The result of the multiplication is: " + a.toString());
     */
    mul(rhs: Quat): Quat;
    /**
     * Multiplies each element of a quaternion by a number.
     *
     * @param {number} scalar - The number to multiply by.
     * @param {Quat} [src] - The quaternion to scale. If not set, the operation is done in place.
     * @returns {Quat} Self for chaining.
     * @example
     * const q = new pc.Quat(1, 2, 3, 4);
     * q.mulScalar(2);
     * // q is now [2, 4, 6, 8]
     */
    mulScalar(scalar: number, src?: Quat): Quat;
    /**
     * Returns the result of multiplying the specified quaternions together.
     *
     * @param {Quat} lhs - The quaternion used as the first multiplicand of the operation.
     * @param {Quat} rhs - The quaternion used as the second multiplicand of the operation.
     * @returns {Quat} Self for chaining.
     * @example
     * const a = new pc.Quat().setFromEulerAngles(0, 30, 0);
     * const b = new pc.Quat().setFromEulerAngles(0, 60, 0);
     * const r = new pc.Quat();
     *
     * // r is set to a 90 degree rotation around the Y axis
     * // In other words, r = a * b
     * r.mul2(a, b);
     */
    mul2(lhs: Quat, rhs: Quat): Quat;
    /**
     * Normalizes the specified quaternion.
     *
     * @param {Quat} [src] - The quaternion to normalize. If not set, the operation is done in place.
     * @returns {Quat} Self for chaining.
     * @example
     * const v = new pc.Quat(0, 0, 0, 5);
     * v.normalize();
     * // Outputs [0, 0, 0, 1]
     * console.log(v.toString());
     */
    normalize(src?: Quat): Quat;
    /**
     * Sets the specified quaternion to the supplied numerical values.
     *
     * @param {number} x - The x component of the quaternion.
     * @param {number} y - The y component of the quaternion.
     * @param {number} z - The z component of the quaternion.
     * @param {number} w - The w component of the quaternion.
     * @returns {Quat} Self for chaining.
     * @example
     * const q = new pc.Quat();
     * q.set(1, 0, 0, 0);
     *
     * // Outputs 1, 0, 0, 0
     * console.log("The result of the quaternion set is: " + q.toString());
     */
    set(x: number, y: number, z: number, w: number): Quat;
    /**
     * Sets a quaternion from an angular rotation around an axis.
     *
     * @param {Vec3} axis - World space axis around which to rotate. Should be normalized.
     * @param {number} angle - Angle to rotate around the given axis in degrees.
     * @returns {Quat} Self for chaining.
     * @example
     * const q = new pc.Quat();
     * q.setFromAxisAngle(pc.Vec3.UP, 90);
     */
    setFromAxisAngle(axis: Vec3, angle: number): Quat;
    /**
     * Sets this quaternion to represent a rotation specified by Euler angles in degrees.
     * The rotation is applied using an **intrinsic XYZ** order: first around the X-axis, then
     * around the newly transformed Y-axis, and finally around the resulting Z-axis.
     *
     * @param {number|Vec3} ex - The angle to rotate around the X-axis in degrees, or a Vec3
     * object containing the X, Y, and Z angles in degrees in its respective components (`ex.x`,
     * `ex.y`, `ex.z`).
     * @param {number} [ey] - The angle to rotate around the Y-axis in degrees. This parameter is
     * only used if `ex` is provided as a number.
     * @param {number} [ez] - The angle to rotate around the Z-axis in degrees. This parameter is
     * only used if `ex` is provided as a number.
     * @returns {Quat} The quaternion itself (this), now representing the orientation from the
     * specified XYZ Euler angles. Allows for method chaining.
     * @example
     * // Create a quaternion from 3 individual Euler angles (interpreted as X, Y, Z order)
     * const q1 = new pc.Quat();
     * q1.setFromEulerAngles(45, 90, 180); // 45 deg around X, then 90 deg around Y', then 180 deg around Z''
     * console.log("From numbers:", q1.toString());
     * @example
     * // Create the same quaternion from a Vec3 containing the angles (X, Y, Z)
     * const anglesVec = new pc.Vec3(45, 90, 180);
     * const q2 = new pc.Quat();
     * q2.setFromEulerAngles(anglesVec);
     * console.log("From Vec3:", q2.toString()); // Should match q1
     */
    setFromEulerAngles(ex: number | Vec3, ey?: number, ez?: number): Quat;
    /**
     * Converts the specified 4x4 matrix to a quaternion. Note that since a quaternion is purely a
     * representation for orientation, only the rotational part of the matrix is used.
     *
     * @param {Mat4} m - The 4x4 matrix to convert.
     * @returns {Quat} Self for chaining.
     * @example
     * // Create a 4x4 rotation matrix of 180 degrees around the y-axis
     * const rot = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 180);
     *
     * // Convert to a quaternion
     * const q = new pc.Quat().setFromMat4(rot);
     */
    setFromMat4(m: Mat4): Quat;
    /**
     * Set the quaternion that represents the shortest rotation from one direction to another.
     *
     * @param {Vec3} from - The direction to rotate from. It should be normalized.
     * @param {Vec3} to - The direction to rotate to. It should be normalized.
     * @returns {Quat} Self for chaining.
     * @example
     * const q = new pc.Quat();
     * const from = new pc.Vec3(0, 0, 1);
     * const to = new pc.Vec3(0, 1, 0);
     * q.setFromDirections(from, to);
     */
    setFromDirections(from: Vec3, to: Vec3): Quat;
    /**
     * Performs a spherical interpolation between two quaternions. The result of the interpolation
     * is written to the quaternion calling the function.
     *
     * @param {Quat} lhs - The quaternion to interpolate from.
     * @param {Quat} rhs - The quaternion to interpolate to.
     * @param {number} alpha - The value controlling the interpolation in relation to the two input
     * quaternions. The value is in the range 0 to 1, 0 generating q1, 1 generating q2 and anything
     * in between generating a spherical interpolation between the two.
     * @returns {Quat} Self for chaining.
     * @example
     * const q1 = new pc.Quat(-0.11, -0.15, -0.46, 0.87);
     * const q2 = new pc.Quat(-0.21, -0.21, -0.67, 0.68);
     *
     * const result = new pc.Quat();
     * result.slerp(q1, q2, 0);   // Return q1
     * result.slerp(q1, q2, 0.5); // Return the midpoint interpolant
     * result.slerp(q1, q2, 1);   // Return q2
     */
    slerp(lhs: Quat, rhs: Quat, alpha: number): Quat;
    /**
     * Transforms a 3-dimensional vector by the specified quaternion.
     *
     * @param {Vec3} vec - The 3-dimensional vector to be transformed.
     * @param {Vec3} [res] - An optional 3-dimensional vector to receive the result of the transformation.
     * @returns {Vec3} The transformed vector (res if specified, otherwise a new Vec3).
     * @example
     * // Create a 3-dimensional vector
     * const v = new pc.Vec3(1, 2, 3);
     *
     * // Create a quaternion rotation
     * const q = new pc.Quat().setFromEulerAngles(10, 20, 30);
     *
     * const tv = q.transformVector(v);
     */
    transformVector(vec: Vec3, res?: Vec3): Vec3;
    /**
     * Set the values of the quaternion from an array.
     *
     * @param {number[]|ArrayBufferView} arr - The array to set the quaternion values from.
     * @param {number} [offset] - The zero-based index at which to start copying elements from the
     * array. Default is 0.
     * @returns {Quat} Self for chaining.
     * @example
     * const q = new pc.Quat();
     * q.fromArray([20, 10, 5, 0]);
     * // q is set to [20, 10, 5, 0]
     */
    fromArray(arr: number[] | ArrayBufferView, offset?: number): Quat;
    /**
     * Converts the quaternion to string form.
     *
     * @returns {string} The quaternion in string form.
     * @example
     * const q = new pc.Quat(0, 0, 0, 1);
     * // Outputs [0, 0, 0, 1]
     * console.log(q.toString());
     */
    toString(): string;
    /**
     * @overload
     * @param {number[]} [arr] - The array to populate with the quaternion's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {number[]} The quaternion as an array.
     */
    toArray(arr?: number[], offset?: number): number[];
    /**
     * @overload
     * @param {ArrayBufferView} arr - The array to populate with the quaternion's number
     * components. If not specified, a new array is created.
     * @param {number} [offset] - The zero-based index at which to start copying elements to the
     * array. Default is 0.
     * @returns {ArrayBufferView} The quaternion as an array.
     */
    toArray(arr: ArrayBufferView, offset?: number): ArrayBufferView;
}

/**
 * A 4x4 matrix. Mat4 is commonly used to represent world, view and projection transformations in
 * 3D graphics, combining rotation, translation and scale into a single matrix.
 *
 * @category Math
 */
declare class Mat4 {
    static _getPerspectiveHalfSize(halfSize: any, fov: any, aspect: any, znear: any, fovIsHorizontal: any): void;
    /**
     * A constant matrix set to the identity.
     *
     * @type {Mat4}
     * @readonly
     */
    static readonly IDENTITY: Mat4;
    /**
     * A constant matrix with all elements set to 0.
     *
     * @type {Mat4}
     * @readonly
     */
    static readonly ZERO: Mat4;
    /**
     * Matrix elements in the form of a flat array.
     *
     * @type {Float32Array}
     */
    data: Float32Array;
    /**
     * Adds the specified 4x4 matrices together and stores the result in the current instance.
     *
     * @param {Mat4} lhs - The 4x4 matrix used as the first operand of the addition.
     * @param {Mat4} rhs - The 4x4 matrix used as the second operand of the addition.
     * @returns {Mat4} Self for chaining.
     * @example
     * const m = new pc.Mat4();
     *
     * m.add2(pc.Mat4.IDENTITY, pc.Mat4.ONE);
     *
     * console.log("The result of the addition is: " + m.toString());
     */
    add2(lhs: Mat4, rhs: Mat4): Mat4;
    /**
     * Adds the specified 4x4 matrix to the current instance.
     *
     * @param {Mat4} rhs - The 4x4 matrix used as the second operand of the addition.
     * @returns {Mat4} Self for chaining.
     * @example
     * const m = new pc.Mat4();
     *
     * m.add(pc.Mat4.ONE);
     *
     * console.log("The result of the addition is: " + m.toString());
     */
    add(rhs: Mat4): Mat4;
    /**
     * Creates a duplicate of the specified matrix.
     *
     * @returns {this} A duplicate matrix.
     * @example
     * const src = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     * const dst = src.clone();
     * console.log("The two matrices are " + (src.equals(dst) ? "equal" : "different"));
     */
    clone(): this;
    /**
     * Copies the contents of a source 4x4 matrix to a destination 4x4 matrix.
     *
     * @param {Mat4} rhs - A 4x4 matrix to be copied.
     * @returns {Mat4} Self for chaining.
     * @example
     * const src = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     * const dst = new pc.Mat4();
     * dst.copy(src);
     * console.log("The two matrices are " + (src.equals(dst) ? "equal" : "different"));
     */
    copy(rhs: Mat4): Mat4;
    /**
     * Reports whether two matrices are equal.
     *
     * @param {Mat4} rhs - The other matrix.
     * @returns {boolean} True if the matrices are equal and false otherwise.
     * @example
     * const a = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     * const b = new pc.Mat4();
     * console.log("The two matrices are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Mat4): boolean;
    /**
     * Reports whether the specified matrix is the identity matrix.
     *
     * @returns {boolean} True if the matrix is identity and false otherwise.
     * @example
     * const m = new pc.Mat4();
     * console.log("The matrix is " + (m.isIdentity() ? "identity" : "not identity"));
     */
    isIdentity(): boolean;
    /**
     * Multiplies the specified 4x4 matrices together and stores the result in the current
     * instance.
     *
     * @param {Mat4} lhs - The 4x4 matrix used as the first multiplicand of the operation.
     * @param {Mat4} rhs - The 4x4 matrix used as the second multiplicand of the operation.
     * @returns {Mat4} Self for chaining.
     * @example
     * const a = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     * const b = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 180);
     * const r = new pc.Mat4();
     *
     * // r = a * b
     * r.mul2(a, b);
     *
     * console.log("The result of the multiplication is: " + r.toString());
     */
    mul2(lhs: Mat4, rhs: Mat4): Mat4;
    /**
     * Multiplies the specified 4x4 matrices together and stores the result in the current
     * instance. This function assumes the matrices are affine transformation matrices, where the
     * upper left 3x3 elements are a rotation matrix, and the bottom left 3 elements are
     * translation. The rightmost column is assumed to be [0, 0, 0, 1]. The parameters are not
     * verified to be in the expected format. This function is faster than general
     * {@link Mat4#mul2}.
     *
     * @param {Mat4} lhs - The affine transformation 4x4 matrix used as the first multiplicand of
     * the operation.
     * @param {Mat4} rhs - The affine transformation 4x4 matrix used as the second multiplicand of
     * the operation.
     * @returns {Mat4} Self for chaining.
     * @example
     * const a = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     * const b = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 180);
     * const r = new pc.Mat4();
     *
     * // r = a * b (optimized for affine transforms)
     * r.mulAffine2(a, b);
     */
    mulAffine2(lhs: Mat4, rhs: Mat4): Mat4;
    /**
     * Multiplies the current instance by the specified 4x4 matrix.
     *
     * @param {Mat4} rhs - The 4x4 matrix used as the second multiplicand of the operation.
     * @returns {Mat4} Self for chaining.
     * @example
     * const a = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     * const b = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 180);
     *
     * // a = a * b
     * a.mul(b);
     *
     * console.log("The result of the multiplication is: " + a.toString());
     */
    mul(rhs: Mat4): Mat4;
    /**
     * Transforms a 3-dimensional point by a 4x4 matrix.
     *
     * @param {Vec3} vec - The 3-dimensional point to be transformed.
     * @param {Vec3} [res] - An optional 3-dimensional point to receive the result of the
     * transformation.
     * @returns {Vec3} The input point v transformed by the current instance.
     * @example
     * // Create a 3-dimensional point
     * const v = new pc.Vec3(1, 2, 3);
     *
     * // Create a 4x4 rotation matrix
     * const m = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     *
     * const tv = m.transformPoint(v);
     */
    transformPoint(vec: Vec3, res?: Vec3): Vec3;
    /**
     * Transforms a 3-dimensional vector by a 4x4 matrix.
     *
     * @param {Vec3} vec - The 3-dimensional vector to be transformed.
     * @param {Vec3} [res] - An optional 3-dimensional vector to receive the result of the
     * transformation.
     * @returns {Vec3} The input vector v transformed by the current instance.
     * @example
     * // Create a 3-dimensional vector
     * const v = new pc.Vec3(1, 2, 3);
     *
     * // Create a 4x4 rotation matrix
     * const m = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     *
     * const tv = m.transformVector(v);
     */
    transformVector(vec: Vec3, res?: Vec3): Vec3;
    /**
     * Transforms a 4-dimensional vector by a 4x4 matrix.
     *
     * @param {Vec4} vec - The 4-dimensional vector to be transformed.
     * @param {Vec4} [res] - An optional 4-dimensional vector to receive the result of the
     * transformation.
     * @returns {Vec4} The input vector v transformed by the current instance.
     * @example
     * // Create an input 4-dimensional vector
     * const v = new pc.Vec4(1, 2, 3, 4);
     *
     * // Create an output 4-dimensional vector
     * const result = new pc.Vec4();
     *
     * // Create a 4x4 rotation matrix
     * const m = new pc.Mat4().setFromEulerAngles(10, 20, 30);
     *
     * m.transformVec4(v, result);
     */
    transformVec4(vec: Vec4, res?: Vec4): Vec4;
    /**
     * Sets the specified matrix to a viewing matrix derived from an eye point, a target point and
     * an up vector. The matrix maps the target point to the negative z-axis and the eye point to
     * the origin, so that when you use a typical projection matrix, the center of the scene maps
     * to the center of the viewport. Similarly, the direction described by the up vector projected
     * onto the viewing plane is mapped to the positive y-axis so that it points upward in the
     * viewport. The up vector must not be parallel to the line of sight from the eye to the
     * reference point.
     *
     * @param {Vec3} position - 3-d vector holding view position.
     * @param {Vec3} target - 3-d vector holding reference point.
     * @param {Vec3} up - 3-d vector holding the up direction.
     * @returns {Mat4} Self for chaining.
     * @example
     * const position = new pc.Vec3(10, 10, 10);
     * const target = new pc.Vec3(0, 0, 0);
     * const up = new pc.Vec3(0, 1, 0);
     * const m = new pc.Mat4().setLookAt(position, target, up);
     */
    setLookAt(position: Vec3, target: Vec3, up: Vec3): Mat4;
    /**
     * Sets the specified matrix to a perspective projection matrix. The function's parameters
     * define the shape of a frustum.
     *
     * @param {number} left - The x-coordinate for the left edge of the camera's projection plane
     * in eye space.
     * @param {number} right - The x-coordinate for the right edge of the camera's projection plane
     * in eye space.
     * @param {number} bottom - The y-coordinate for the bottom edge of the camera's projection
     * plane in eye space.
     * @param {number} top - The y-coordinate for the top edge of the camera's projection plane in
     * eye space.
     * @param {number} znear - The near clip plane in eye coordinates.
     * @param {number} zfar - The far clip plane in eye coordinates.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 perspective projection matrix
     * const f = pc.Mat4().setFrustum(-2, 2, -1, 1, 1, 1000);
     * @ignore
     */
    setFrustum(left: number, right: number, bottom: number, top: number, znear: number, zfar: number): Mat4;
    /**
     * Sets the specified matrix to a perspective projection matrix. The function's parameters
     * define the shape of a frustum.
     *
     * @param {number} fov - The frustum's field of view in degrees. The fovIsHorizontal parameter
     * controls whether this is a vertical or horizontal field of view. By default, it's a vertical
     * field of view.
     * @param {number} aspect - The aspect ratio of the frustum's projection plane
     * (width / height).
     * @param {number} znear - The near clip plane in eye coordinates.
     * @param {number} zfar - The far clip plane in eye coordinates.
     * @param {boolean} [fovIsHorizontal] - Set to true to treat the fov as horizontal (x-axis) and
     * false for vertical (y-axis). Defaults to false.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 perspective projection matrix
     * const persp = new pc.Mat4().setPerspective(45, 16 / 9, 1, 1000);
     */
    setPerspective(fov: number, aspect: number, znear: number, zfar: number, fovIsHorizontal?: boolean): Mat4;
    /**
     * Sets the specified matrix to an orthographic projection matrix. The function's parameters
     * define the shape of a cuboid-shaped frustum.
     *
     * @param {number} left - The x-coordinate for the left edge of the camera's projection plane
     * in eye space.
     * @param {number} right - The x-coordinate for the right edge of the camera's projection plane
     * in eye space.
     * @param {number} bottom - The y-coordinate for the bottom edge of the camera's projection
     * plane in eye space.
     * @param {number} top - The y-coordinate for the top edge of the camera's projection plane in
     * eye space.
     * @param {number} near - The near clip plane in eye coordinates.
     * @param {number} far - The far clip plane in eye coordinates.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 orthographic projection matrix
     * const ortho = new pc.Mat4().setOrtho(-2, 2, -2, 2, 1, 1000);
     */
    setOrtho(left: number, right: number, bottom: number, top: number, near: number, far: number): Mat4;
    /**
     * Sets the specified matrix to a rotation matrix equivalent to a rotation around an axis. The
     * axis must be normalized (unit length) and the angle must be specified in degrees.
     *
     * @param {Vec3} axis - The normalized axis vector around which to rotate.
     * @param {number} angle - The angle of rotation in degrees.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 rotation matrix
     * const rm = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 90);
     */
    setFromAxisAngle(axis: Vec3, angle: number): Mat4;
    /**
     * Sets the specified matrix to a translation matrix.
     *
     * @param {number} x - The x-component of the translation.
     * @param {number} y - The y-component of the translation.
     * @param {number} z - The z-component of the translation.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 translation matrix
     * const tm = new pc.Mat4().setTranslate(10, 10, 10);
     * @ignore
     */
    setTranslate(x: number, y: number, z: number): Mat4;
    /**
     * Sets the specified matrix to a scale matrix.
     *
     * @param {number} x - The x-component of the scale.
     * @param {number} y - The y-component of the scale.
     * @param {number} z - The z-component of the scale.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 scale matrix
     * const sm = new pc.Mat4().setScale(10, 10, 10);
     * @ignore
     */
    setScale(x: number, y: number, z: number): Mat4;
    /**
     * Sets the specified matrix to a matrix transforming a normalized view volume (in range of
     * -1 .. 1) to their position inside a viewport (in range of 0 .. 1). This encapsulates a
     * scaling to the size of the viewport and a translation to the position of the viewport.
     *
     * @param {number} x - The x-component of the position of the viewport (in 0..1 range).
     * @param {number} y - The y-component of the position of the viewport (in 0..1 range).
     * @param {number} width - The width of the viewport (in 0..1 range).
     * @param {number} height - The height of the viewport (in 0..1 range).
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 viewport matrix which scales normalized view volume to full texture viewport
     * const vm = new pc.Mat4().setViewport(0, 0, 1, 1);
     * @ignore
     */
    setViewport(x: number, y: number, width: number, height: number): Mat4;
    /**
     * Sets the matrix to a reflection matrix, which can be used as a mirror transformation by the
     * plane.
     *
     * @param {Vec3} normal - The normal of the plane to reflect by.
     * @param {number} distance - The distance of plane to reflect by.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a reflection matrix for a horizontal plane at y=0
     * const reflection = new pc.Mat4().setReflection(pc.Vec3.UP, 0);
     */
    setReflection(normal: Vec3, distance: number): Mat4;
    /**
     * Sets the matrix to the inverse of a source matrix.
     *
     * @param {Mat4} [src] - The matrix to invert. If not set, the matrix is inverted in-place.
     * @returns {Mat4} Self for chaining.
     * @example
     * // Create a 4x4 rotation matrix of 180 degrees around the y-axis
     * const rot = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 180);
     *
     * // Invert in place
     * rot.invert();
     */
    invert(src?: Mat4): Mat4;
    /**
     * Sets matrix data from an array.
     *
     * @param {number[]} src - Source array. Must have 16 values.
     * @returns {Mat4} Self for chaining.
     * @example
     * const m = new pc.Mat4();
     * m.set([1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 10, 20, 30, 1]);
     */
    set(src: number[]): Mat4;
    /**
     * Sets the specified matrix to the identity matrix.
     *
     * @returns {Mat4} Self for chaining.
     * @example
     * m.setIdentity();
     * console.log("The matrix is " + (m.isIdentity() ? "identity" : "not identity"));
     */
    setIdentity(): Mat4;
    /**
     * Sets the specified matrix to the concatenation of a translation, a quaternion rotation and a
     * scale.
     *
     * @param {Vec3} t - A 3-d vector translation.
     * @param {Quat} r - A quaternion rotation.
     * @param {Vec3} s - A 3-d vector scale.
     * @returns {Mat4} Self for chaining.
     * @example
     * const t = new pc.Vec3(10, 20, 30);
     * const r = new pc.Quat();
     * const s = new pc.Vec3(2, 2, 2);
     *
     * const m = new pc.Mat4();
     * m.setTRS(t, r, s);
     */
    setTRS(t: Vec3, r: Quat, s: Vec3): Mat4;
    /**
     * Sets the matrix to the transpose of a source matrix.
     *
     * @param {Mat4} [src] - The matrix to transpose. If not set, the matrix is transposed in-place.
     * @returns {Mat4} Self for chaining.
     * @example
     * const m = new pc.Mat4();
     *
     * // Transpose in place
     * m.transpose();
     */
    transpose(src?: Mat4): Mat4;
    /**
     * Extracts the translational component from the specified 4x4 matrix.
     *
     * @param {Vec3} [t] - The vector to receive the translation of the matrix.
     * @returns {Vec3} The translation of the specified 4x4 matrix.
     * @example
     * // Create a 4x4 matrix
     * const m = new pc.Mat4();
     *
     * // Query the translation component
     * const t = new pc.Vec3();
     * m.getTranslation(t);
     */
    getTranslation(t?: Vec3): Vec3;
    /**
     * Extracts the x-axis from the specified 4x4 matrix.
     *
     * @param {Vec3} [x] - The vector to receive the x axis of the matrix.
     * @returns {Vec3} The x-axis of the specified 4x4 matrix.
     * @example
     * // Create a 4x4 matrix
     * const m = new pc.Mat4();
     *
     * // Query the x-axis component
     * const x = new pc.Vec3();
     * m.getX(x);
     */
    getX(x?: Vec3): Vec3;
    /**
     * Extracts the y-axis from the specified 4x4 matrix.
     *
     * @param {Vec3} [y] - The vector to receive the y axis of the matrix.
     * @returns {Vec3} The y-axis of the specified 4x4 matrix.
     * @example
     * // Create a 4x4 matrix
     * const m = new pc.Mat4();
     *
     * // Query the y-axis component
     * const y = new pc.Vec3();
     * m.getY(y);
     */
    getY(y?: Vec3): Vec3;
    /**
     * Extracts the z-axis from the specified 4x4 matrix.
     *
     * @param {Vec3} [z] - The vector to receive the z axis of the matrix.
     * @returns {Vec3} The z-axis of the specified 4x4 matrix.
     * @example
     * // Create a 4x4 matrix
     * const m = new pc.Mat4();
     *
     * // Query the z-axis component
     * const z = new pc.Vec3();
     * m.getZ(z);
     */
    getZ(z?: Vec3): Vec3;
    /**
     * Extracts the scale component from the specified 4x4 matrix.
     *
     * @param {Vec3} [scale] - Vector to receive the scale.
     * @returns {Vec3} The scale in X, Y and Z of the specified 4x4 matrix.
     * @example
     * // Query the scale component
     * const scale = m.getScale();
     */
    getScale(scale?: Vec3): Vec3;
    /**
     * -1 if the the matrix has an odd number of negative scales (mirrored); 1 otherwise.
     *
     * @type {number}
     * @ignore
     */
    get scaleSign(): number;
    /**
     * Sets the specified matrix to a rotation matrix defined by Euler angles. The rotation is
     * applied using an **intrinsic XYZ** order: first around the X-axis, then around the newly
     * transformed Y-axis, and finally around the resulting Z-axis. Angles are specified in
     * degrees.
     *
     * @param {number} ex - Angle to rotate around X axis in degrees.
     * @param {number} ey - Angle to rotate around Y axis in degrees.
     * @param {number} ez - Angle to rotate around Z axis in degrees.
     * @returns {Mat4} Self for chaining.
     * @example
     * const m = new pc.Mat4();
     * m.setFromEulerAngles(45, 90, 180);
     */
    setFromEulerAngles(ex: number, ey: number, ez: number): Mat4;
    /**
     * Extracts the Euler angles equivalent to the rotational portion of the specified matrix. The
     * returned Euler angles are in **intrinsic XYZ** order and in degrees.
     *
     * @param {Vec3} [eulers] - A 3-d vector to receive the Euler angles.
     * @returns {Vec3} A 3-d vector containing the Euler angles.
     * @example
     * // Create a 4x4 rotation matrix of 45 degrees around the y-axis
     * const m = new pc.Mat4().setFromAxisAngle(pc.Vec3.UP, 45);
     *
     * const eulers = m.getEulerAngles();
     */
    getEulerAngles(eulers?: Vec3): Vec3;
    /**
     * Converts the specified matrix to string form.
     *
     * @returns {string} The matrix in string form.
     * @example
     * const m = new pc.Mat4();
     * // Outputs [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1]
     * console.log(m.toString());
     */
    toString(): string;
}

/**
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 * @import { Mat4 } from '../core/math/mat4.js'
 */
/**
 * A skin contains data about the bones in a hierarchy that drive a skinned mesh animation.
 * Specifically, the skin stores the bone name and inverse bind matrix and for each bone. Inverse
 * bind matrices are instrumental in the mathematics of vertex skinning.
 *
 * @category Graphics
 */
declare class Skin {
    /**
     * Create a new Skin instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this skin.
     * @param {Mat4[]} ibp - The array of inverse bind matrices.
     * @param {string[]} boneNames - The array of bone names for the bones referenced by this skin.
     */
    constructor(graphicsDevice: GraphicsDevice, ibp: Mat4[], boneNames: string[]);
    device: GraphicsDevice;
    inverseBindPose: Mat4[];
    boneNames: string[];
}

/**
 * An infinite ray. Rays are commonly used for picking, raycasting and intersection tests.
 *
 * @category Math
 */
declare class Ray {
    /**
     * Creates a new Ray instance. The ray is infinite, starting at a given origin and pointing in
     * a given direction.
     *
     * @param {Vec3} [origin] - The starting point of the ray. The constructor copies
     * this parameter. Defaults to the origin (0, 0, 0).
     * @param {Vec3} [direction] - The direction of the ray. The constructor copies
     * this parameter. Defaults to a direction down the world negative Z axis (0, 0, -1).
     * @example
     * // Create a new ray starting at the position of this entity and pointing down
     * // the entity's negative Z axis
     * const ray = new pc.Ray(this.entity.getPosition(), this.entity.forward);
     */
    constructor(origin?: Vec3, direction?: Vec3);
    /**
     * The starting point of the ray.
     *
     * @readonly
     * @type {Vec3}
     */
    readonly origin: Vec3;
    /**
     * The direction of the ray.
     *
     * @readonly
     * @type {Vec3}
     */
    readonly direction: Vec3;
    /**
     * Sets origin and direction to the supplied vector values.
     *
     * @param {Vec3} origin - The starting point of the ray.
     * @param {Vec3} direction - The direction of the ray.
     * @returns {Ray} Self for chaining.
     */
    set(origin: Vec3, direction: Vec3): Ray;
    /**
     * Copies the contents of a source Ray.
     *
     * @param {Ray} src - The Ray to copy from.
     * @returns {Ray} Self for chaining.
     */
    copy(src: Ray): Ray;
    /**
     * Returns a clone of the Ray.
     *
     * @returns {this} A duplicate Ray.
     */
    clone(): this;
}

/**
 * A bounding sphere is a volume for facilitating fast intersection testing.
 *
 * @category Math
 */
declare class BoundingSphere {
    /**
     * Creates a new BoundingSphere instance.
     *
     * @param {Vec3} [center] - The world space coordinate marking the center of the sphere. The
     * constructor takes a reference of this parameter.
     * @param {number} [radius] - The radius of the bounding sphere. Defaults to 0.5.
     * @example
     * // Create a new bounding sphere centered on the origin with a radius of 0.5
     * const sphere = new pc.BoundingSphere();
     */
    constructor(center?: Vec3, radius?: number);
    /**
     * Center of sphere.
     *
     * @type {Vec3}
     * @readonly
     */
    readonly center: Vec3;
    /**
     * The radius of the bounding sphere.
     *
     * @type {number}
     */
    radius: number;
    /**
     * Test if a point is inside the sphere.
     *
     * @param {Vec3} point - Point to test.
     * @returns {boolean} True if the point is inside the sphere and false otherwise.
     * @example
     * const sphere = new pc.BoundingSphere(new pc.Vec3(0, 0, 0), 1);
     * const point = new pc.Vec3(0.5, 0, 0);
     * const isInside = sphere.containsPoint(point); // true
     */
    containsPoint(point: Vec3): boolean;
    /**
     * Test if a ray intersects with the sphere.
     *
     * @param {Ray} ray - Ray to test against (direction must be normalized).
     * @param {Vec3} [point] - If there is an intersection, the intersection point will be copied
     * into here.
     * @returns {boolean} True if there is an intersection.
     */
    intersectsRay(ray: Ray, point?: Vec3): boolean;
    /**
     * Test if a Bounding Sphere is overlapping, enveloping, or inside this Bounding Sphere.
     *
     * @param {BoundingSphere} sphere - Bounding Sphere to test.
     * @returns {boolean} True if the Bounding Sphere is overlapping, enveloping, or inside this Bounding Sphere and false otherwise.
     */
    intersectsBoundingSphere(sphere: BoundingSphere): boolean;
}

/**
 * Axis-Aligned Bounding Box. An AABB is commonly used for fast overlap tests in collision
 * detection, spatial indexing and frustum culling.
 *
 * @category Math
 */
declare class BoundingBox {
    /**
     * Compute the min and max bounding values to encapsulate all specified vertices.
     *
     * @param {number[]|Float32Array} vertices - The vertices used to compute the new size for the
     * AABB.
     * @param {Vec3} min - Stored computed min value.
     * @param {Vec3} max - Stored computed max value.
     * @param {number} [numVerts] - Number of vertices to use from the beginning of vertices array.
     * All vertices are used if not specified.
     */
    static computeMinMax(vertices: number[] | Float32Array, min: Vec3, max: Vec3, numVerts?: number): void;
    /**
     * Create a new BoundingBox instance. The bounding box is axis-aligned.
     *
     * @param {Vec3} [center] - Center of box. The constructor copies this parameter. Defaults to
     * (0, 0, 0).
     * @param {Vec3} [halfExtents] - Half the distance across the box in each axis. The constructor
     * copies this parameter. Defaults to (0.5, 0.5, 0.5).
     */
    constructor(center?: Vec3, halfExtents?: Vec3);
    /**
     * Center of box.
     *
     * @type {Vec3}
     * @readonly
     */
    readonly center: Vec3;
    /**
     * Half the distance across the box in each axis.
     *
     * @type {Vec3}
     * @readonly
     */
    readonly halfExtents: Vec3;
    /**
     * @type {Vec3}
     * @private
     */
    private _min;
    /**
     * @type {Vec3}
     * @private
     */
    private _max;
    /**
     * Combines two bounding boxes into one, enclosing both.
     *
     * @param {BoundingBox} other - Bounding box to add.
     */
    add(other: BoundingBox): void;
    /**
     * Copies the contents of a source AABB.
     *
     * @param {BoundingBox} src - The AABB to copy from.
     */
    copy(src: BoundingBox): void;
    /**
     * Returns a clone of the AABB.
     *
     * @returns {BoundingBox} A duplicate AABB.
     */
    clone(): BoundingBox;
    /**
     * Test whether two axis-aligned bounding boxes intersect.
     *
     * @param {BoundingBox} other - Bounding box to test against.
     * @returns {boolean} True if there is an intersection.
     */
    intersects(other: BoundingBox): boolean;
    _intersectsRay(ray: any, point: any): boolean;
    _fastIntersectsRay(ray: any): boolean;
    /**
     * Test if a ray intersects with the AABB.
     *
     * @param {Ray} ray - Ray to test against (direction must be normalized).
     * @param {Vec3} [point] - If there is an intersection, the intersection point will be copied
     * into here.
     * @returns {boolean} True if there is an intersection.
     */
    intersectsRay(ray: Ray, point?: Vec3): boolean;
    /**
     * Sets the minimum and maximum corner of the AABB. Using this function is faster than
     * assigning min and max separately.
     *
     * @param {Vec3} min - The minimum corner of the AABB.
     * @param {Vec3} max - The maximum corner of the AABB.
     */
    setMinMax(min: Vec3, max: Vec3): void;
    /**
     * Return the minimum corner of the AABB.
     *
     * @returns {Vec3} Minimum corner.
     */
    getMin(): Vec3;
    /**
     * Return the maximum corner of the AABB.
     *
     * @returns {Vec3} Maximum corner.
     */
    getMax(): Vec3;
    /**
     * Test if a point is inside an AABB.
     *
     * @param {Vec3} point - Point to test.
     * @returns {boolean} True if the point is inside the AABB and false otherwise.
     */
    containsPoint(point: Vec3): boolean;
    /**
     * Return the point on the AABB closest to a given point. If the point is inside the AABB, the
     * point itself is returned.
     *
     * @param {Vec3} point - Point to find the closest point to.
     * @param {Vec3} [result] - The vector to store the result in. If not provided, a new Vec3 is
     * created and returned.
     * @returns {Vec3} The closest point on the AABB.
     * @example
     * const box = new BoundingBox(new Vec3(0, 0, 0), new Vec3(1, 1, 1));
     * const point = new Vec3(2, 0, 0);
     * const closest = box.closestPoint(point); // Returns Vec3(1, 0, 0)
     * @example
     * // Reuse a result vector to avoid allocations in hot paths
     * const result = new Vec3();
     * box.closestPoint(point, result);
     */
    closestPoint(point: Vec3, result?: Vec3): Vec3;
    /**
     * Set an AABB to enclose the specified AABB if it were to be transformed by the specified 4x4
     * matrix.
     *
     * @param {BoundingBox} aabb - Box to transform and enclose.
     * @param {Mat4} m - Transformation matrix to apply to source AABB.
     * @param {boolean} ignoreScale - If true is specified, a scale from the matrix is ignored. Defaults to false.
     */
    setFromTransformedAabb(aabb: BoundingBox, m: Mat4, ignoreScale?: boolean): void;
    /**
     * Compute the size of the AABB to encapsulate all specified vertices.
     *
     * @param {number[]|Float32Array} vertices - The vertices used to compute the new size for the
     * AABB.
     * @param {number} [numVerts] - Number of vertices to use from the beginning of vertices array.
     * All vertices are used if not specified.
     */
    compute(vertices: number[] | Float32Array, numVerts?: number): void;
    /**
     * Test if a Bounding Sphere is overlapping, enveloping, or inside this AABB.
     *
     * @param {BoundingSphere} sphere - Bounding Sphere to test.
     * @returns {boolean} True if the Bounding Sphere is overlapping, enveloping, or inside the
     * AABB and false otherwise.
     */
    intersectsBoundingSphere(sphere: BoundingSphere): boolean;
    _distanceToBoundingSphereSq(sphere: any): number;
    _expand(expandMin: any, expandMax: any): void;
}

/**
 * A Morph Target (also known as Blend Shape) contains deformation data to apply to existing mesh.
 * Multiple morph targets can be blended together on a mesh. This is useful for effects that are
 * hard to achieve with conventional animation and skinning.
 *
 * @category Graphics
 */
declare class MorphTarget {
    /**
     * Create a new MorphTarget instance.
     *
     * @param {object} options - Object for passing optional arguments.
     * @param {ArrayBuffer} options.deltaPositions - An array of 3-dimensional vertex position
     * offsets.
     * @param {ArrayBuffer} [options.deltaNormals] - An array of 3-dimensional vertex normal
     * offsets.
     * @param {string} [options.name] - Name.
     * @param {BoundingBox} [options.aabb] - Bounding box. Will be automatically generated, if
     * undefined.
     * @param {number} [options.defaultWeight] - Default blend weight to use for this morph target.
     * @param {boolean} [options.preserveData] - When true, the morph target keeps its data passed using the options,
     * allowing the clone operation.
     */
    constructor(options: {
        deltaPositions: ArrayBuffer;
        deltaNormals?: ArrayBuffer;
        name?: string;
        aabb?: BoundingBox;
        defaultWeight?: number;
        preserveData?: boolean;
    }, ...args: any[]);
    /**
     * A used flag. A morph target can be used / owned by the Morph class only one time.
     *
     * @type {boolean}
     */
    used: boolean;
    options: {
        deltaPositions: ArrayBuffer;
        deltaNormals?: ArrayBuffer;
        name?: string;
        aabb?: BoundingBox;
        defaultWeight?: number;
        preserveData?: boolean;
    };
    _name: string;
    _defaultWeight: number;
    _aabb: BoundingBox;
    deltaPositions: ArrayBuffer;
    morphPositions: boolean;
    morphNormals: boolean;
    /**
     * Gets the name of the morph target.
     *
     * @type {string}
     */
    get name(): string;
    /**
     * Gets the default weight of the morph target.
     *
     * @type {number}
     */
    get defaultWeight(): number;
    get aabb(): BoundingBox;
    /**
     * Returns an identical copy of the specified morph target. This can only be used if the morph target
     * was created with options.preserveData set to true.
     *
     * @returns {MorphTarget} A morph target instance containing the result of the cloning.
     */
    clone(): MorphTarget;
    _postInit(): void;
}

/**
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 * @import { MorphTarget } from './morph-target.js'
 */
/**
 * Contains a list of {@link MorphTarget}s, a combined delta AABB and some associated data.
 *
 * @category Graphics
 */
declare class Morph extends RefCountedObject {
    /**
     * Create a new Morph instance.
     *
     * @param {MorphTarget[]} targets - A list of morph targets.
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this morph target.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {boolean} [options.preferHighPrecision] - True if high precision storage should be
     * preferred. This is faster to create and allows higher precision, but takes more memory and
     * might be slower to render. Defaults to false.
     */
    constructor(targets: MorphTarget[], graphicsDevice: GraphicsDevice, { preferHighPrecision }?: {
        preferHighPrecision?: boolean;
    });
    /**
     * @type {BoundingBox}
     * @private
     */
    private _aabb;
    /** @type {boolean} */
    preferHighPrecision: boolean;
    device: GraphicsDevice;
    _targets: MorphTarget[];
    _renderTextureFormat: number;
    intRenderFormat: boolean;
    _textureFormat: number;
    /**
     * Frees video memory allocated by this object.
     */
    destroy(): void;
    vertexBufferIds: VertexBuffer;
    targetsTexturePositions: Texture;
    targetsTextureNormals: Texture;
    get aabb(): BoundingBox;
    get morphPositions(): boolean;
    get morphNormals(): boolean;
    _init(): void;
    _findSparseSet(deltaArrays: any, ids: any, usedDataIndices: any): number;
    _initTextureBased(): boolean;
    morphTextureWidth: number;
    morphTextureHeight: number;
    /**
     * Gets the array of morph targets.
     *
     * @type {MorphTarget[]}
     */
    get targets(): MorphTarget[];
    _updateMorphFlags(): void;
    _morphPositions: boolean;
    _morphNormals: boolean;
    /**
     * Creates a texture / texture array. Used to create both source morph target data, as well as
     * render target used to morph these into, positions and normals.
     *
     * @param {string} name - The name of the texture.
     * @param {number} format - The format of the texture.
     * @param {Array} [levels] - The levels of the texture.
     * @param {number} [arrayLength] - The length of the texture array.
     * @returns {Texture} The created texture.
     * @private
     */
    private _createTexture;
}

/**
 * The Geometry class serves as a container for storing geometric information. It encapsulates data
 * such as positions, normals, colors, and indices.
 *
 * @category Graphics
 */
declare class Geometry {
    /**
     * Positions.
     *
     * @type {number[]|undefined}
     */
    positions: number[] | undefined;
    /**
     * Normals.
     *
     * @type {number[]|undefined}
     */
    normals: number[] | undefined;
    /**
     * Colors.
     *
     * @type {number[]|undefined}
     */
    colors: number[] | undefined;
    /**
     * UVs.
     *
     * @type {number[]|undefined}
     */
    uvs: number[] | undefined;
    /**
     * Additional Uvs.
     *
     * @type {number[]|undefined}
     */
    uvs1: number[] | undefined;
    /**
     * Blend indices.
     *
     * @type {number[]|undefined}
     */
    blendIndices: number[] | undefined;
    /**
     * Blend weights.
     *
     * @type {number[]|undefined}
     */
    blendWeights: number[] | undefined;
    /**
     * Tangents.
     *
     * @type {number[]|undefined}
     */
    tangents: number[] | undefined;
    /**
     * Indices.
     *
     * @type {number[]|undefined}
     */
    indices: number[] | undefined;
    /**
     * Generates normal information from the positions and triangle indices.
     */
    calculateNormals(): void;
    /**
     * Generates tangent information from the positions, normals, texture coordinates and triangle
     * indices.
     */
    calculateTangents(): void;
}

/**
 * A graphical primitive. The mesh is defined by a {@link VertexBuffer} and an optional
 * {@link IndexBuffer}. It also contains a primitive definition which controls the type of the
 * primitive and the portion of the vertex or index buffer to use.
 *
 * ## Mesh APIs
 * There are two ways a mesh can be generated or updated.
 *
 * ### Simple Mesh API
 * {@link Mesh} class provides interfaces such as {@link Mesh#setPositions} and {@link Mesh#setUvs}
 * that provide a simple way to provide vertex and index data for the Mesh, and hiding the
 * complexity of creating the {@link VertexFormat}. This is the recommended interface to use.
 *
 * A simple example which creates a Mesh with 3 vertices, containing position coordinates only, to
 * form a single triangle.
 *
 * ```javascript
 * const mesh = new pc.Mesh(device);
 * const positions = [
 *     0, 0, 0, // pos 0
 *     1, 0, 0, // pos 1
 *     1, 1, 0  // pos 2
 * ];
 * mesh.setPositions(positions);
 * mesh.update();
 * ```
 *
 * An example which creates a Mesh with 4 vertices, containing position and uv coordinates in
 * channel 0, and an index buffer to form two triangles. Float32Array is used for positions and uvs.
 *
 * ```javascript
 * const mesh = new pc.Mesh(device);
 * const positions = new Float32Array([
 *     0, 0, 0, // pos 0
 *     1, 0, 0, // pos 1
 *     1, 1, 0, // pos 2
 *     0, 1, 0  // pos 3
 * ]);
 * const uvs = new Float32Array([
 *     0, 1  // uv 3
 *     1, 1, // uv 2
 *     1, 0, // uv 1
 *     0, 0, // uv 0
 * ]);
 * const indices = [
 *     0, 1, 2, // triangle 0
 *     0, 2, 3  // triangle 1
 * ];
 * mesh.setPositions(positions);
 * mesh.setNormals(pc.calculateNormals(positions, indices));
 * mesh.setUvs(0, uvs);
 * mesh.setIndices(indices);
 * mesh.update();
 * ```
 *
 * This example demonstrates that vertex attributes such as position and normals, and also indices
 * can be provided using Arrays ([]) and also Typed Arrays (Float32Array and similar). Note that
 * typed arrays have higher performance, and are generally recommended for per-frame operations or
 * larger meshes, but their construction using new operator is costly operation. If you only need
 * to operate on a small number of vertices or indices, consider using Arrays to avoid the overhead
 * associated with allocating Typed Arrays.
 *
 * Follow these links for more complex examples showing the functionality.
 *
 * - {@link https://playcanvas.github.io/#graphics/mesh-decals}
 * - {@link https://playcanvas.github.io/#graphics/mesh-deformation}
 * - {@link https://playcanvas.github.io/#graphics/mesh-generation}
 * - {@link https://playcanvas.github.io/#graphics/point-cloud-simulation}
 *
 * ### Update Vertex and Index buffers
 * This allows greater flexibility, but is more complex to use. It allows more advanced setups, for
 * example sharing a Vertex or Index Buffer between multiple meshes. See {@link VertexBuffer},
 * {@link IndexBuffer} and {@link VertexFormat} for details.
 *
 * @category Graphics
 */
declare class Mesh extends RefCountedObject {
    /**
     * Create a new Mesh instance from {@link Geometry} object.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this mesh.
     * @param {Geometry} geometry - The geometry object to create the mesh from.
     * @param {object} [options] - An object that specifies optional inputs for the function as follows:
     * @param {boolean} [options.storageVertex] - Defines if the vertex buffer of the mesh can be used as
     * a storage buffer by a compute shader. Defaults to false. Only supported on WebGPU.
     * @param {boolean} [options.storageIndex] - Defines if the index buffer of the mesh can be used as
     * a storage buffer by a compute shader. Defaults to false. Only supported on WebGPU.
     * @returns {Mesh} A new mesh.
     */
    static fromGeometry(graphicsDevice: GraphicsDevice, geometry: Geometry, options?: {
        storageVertex?: boolean;
        storageIndex?: boolean;
    }): Mesh;
    /**
     * Create a new Mesh instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this mesh.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {boolean} [options.storageVertex] - Defines if the vertex buffer can be used as
     * a storage buffer by a compute shader. Defaults to false. Only supported on WebGPU.
     * @param {boolean} [options.storageIndex] - Defines if the index buffer can be used as
     * a storage buffer by a compute shader. Defaults to false. Only supported on WebGPU.
     */
    constructor(graphicsDevice: GraphicsDevice, options?: {
        storageVertex?: boolean;
        storageIndex?: boolean;
    });
    /**
     * An array of index buffers. For unindexed meshes, this array can be empty. The first index
     * buffer in the array is used by {@link MeshInstance}s with a `renderStyle` property set to
     * {@link RENDERSTYLE_SOLID}. The second index buffer in the array is used if `renderStyle` is
     * set to {@link RENDERSTYLE_WIREFRAME}.
     *
     * @type {IndexBuffer[]}
     */
    indexBuffer: IndexBuffer[];
    /**
     * The vertex buffer holding the vertex data of the mesh.
     *
     * @type {VertexBuffer}
     */
    vertexBuffer: VertexBuffer;
    /**
     * Array of primitive objects defining how vertex (and index) data in the mesh should be
     * interpreted by the graphics device.
     *
     * - `type` is the type of primitive to render. Can be:
     *
     *   - {@link PRIMITIVE_POINTS}
     *   - {@link PRIMITIVE_LINES}
     *   - {@link PRIMITIVE_LINELOOP}
     *   - {@link PRIMITIVE_LINESTRIP}
     *   - {@link PRIMITIVE_TRIANGLES}
     *   - {@link PRIMITIVE_TRISTRIP}
     *   - {@link PRIMITIVE_TRIFAN}
     *
     * - `base` is the offset of the first index or vertex to dispatch in the draw call.
     * - `baseVertex` is the number added to each index value before indexing into the vertex buffers. (supported only in WebGPU, ignored in WebGL2)
     * - `count` is the number of indices or vertices to dispatch in the draw call.
     * - `indexed` specifies whether to interpret the primitive as indexed, thereby using the
     * currently set index buffer.
     *
     * @type {{type: number, base: number, baseVertex: number, count: number, indexed?: boolean}[]}
     */
    primitive: {
        type: number;
        base: number;
        baseVertex: number;
        count: number;
        indexed?: boolean;
    }[];
    /**
     * The skin data (if any) that drives skinned mesh animations for this mesh.
     *
     * @type {Skin|null}
     */
    skin: Skin | null;
    /**
     * Array of object space AABBs of vertices affected by each bone.
     *
     * @type {BoundingBox[]|null}
     * @ignore
     */
    boneAabb: BoundingBox[] | null;
    /**
     * Internal version of AABB, incremented when local AABB changes.
     *
     * @ignore
     */
    _aabbVer: number;
    /**
     * AABB representing object space bounds of the mesh.
     *
     * @type {BoundingBox}
     * @private
     */
    private _aabb;
    /**
     * @type {GeometryData|null}
     * @private
     */
    private _geometryData;
    /**
     * @type {Morph|null}
     * @private
     */
    private _morph;
    /**
     * True if the created index buffer should be accessible as a storage buffer in compute shader.
     *
     * @type {boolean}
     * @private
     */
    private _storageIndex;
    /**
     * True if the created vertex buffer should be accessible as a storage buffer in compute shader.
     *
     * @type {boolean}
     * @private
     */
    private _storageVertex;
    id: number;
    device: GraphicsDevice;
    /**
     * Sets the morph data that drives morph target animations for this mesh. Set to null if
     * morphing is not used.
     *
     * @type {Morph|null}
     */
    set morph(morph: Morph | null);
    /**
     * Gets the morph data that drives morph target animations for this mesh.
     *
     * @type {Morph|null}
     */
    get morph(): Morph | null;
    /**
     * Sets the axis-aligned bounding box for the object space vertices of this mesh.
     *
     * @type {BoundingBox}
     */
    set aabb(aabb: BoundingBox);
    /**
     * Gets the axis-aligned bounding box for the object space vertices of this mesh.
     *
     * @type {BoundingBox}
     */
    get aabb(): BoundingBox;
    /**
     * Destroys the {@link VertexBuffer} and {@link IndexBuffer}s associated with the mesh. This is
     * normally called by {@link Model#destroy} and does not need to be called manually.
     */
    destroy(): void;
    _destroyIndexBuffer(index: any): void;
    _initBoneAabbs(morphTargets: any): void;
    boneUsed: any[];
    _initGeometryData(): void;
    /**
     * Clears the mesh of existing vertices and indices and resets the {@link VertexFormat}
     * associated with the mesh. This call is typically followed by calls to methods such as
     * {@link Mesh#setPositions}, {@link Mesh#setVertexStream} or {@link Mesh#setIndices} and
     * finally {@link Mesh#update} to rebuild the mesh, allowing different {@link VertexFormat}.
     *
     * @param {boolean} [verticesDynamic] - Indicates the {@link VertexBuffer} should be created
     * with {@link BUFFER_DYNAMIC} usage. If not specified, {@link BUFFER_STATIC} is used.
     * @param {boolean} [indicesDynamic] - Indicates the {@link IndexBuffer} should be created with
     * {@link BUFFER_DYNAMIC} usage. If not specified, {@link BUFFER_STATIC} is used.
     * @param {number} [maxVertices] - A {@link VertexBuffer} will be allocated with at least
     * maxVertices, allowing additional vertices to be added to it without the allocation. If no
     * value is provided, a size to fit the provided vertices will be allocated.
     * @param {number} [maxIndices] - An {@link IndexBuffer} will be allocated with at least
     * maxIndices, allowing additional indices to be added to it without the allocation. If no
     * value is provided, a size to fit the provided indices will be allocated.
     */
    clear(verticesDynamic?: boolean, indicesDynamic?: boolean, maxVertices?: number, maxIndices?: number): void;
    /**
     * Sets the vertex data for any supported semantic.
     *
     * @param {string} semantic - The meaning of the vertex element. For supported semantics, see
     * SEMANTIC_* in {@link VertexFormat}.
     * @param {number[]|ArrayBufferView} data - Vertex data for the specified semantic.
     * @param {number} componentCount - The number of values that form a single Vertex element. For
     * example when setting a 3D position represented by 3 numbers per vertex, number 3 should be
     * specified.
     * @param {number} [numVertices] - The number of vertices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     * @param {number} [dataType] - The format of data when stored in the {@link VertexBuffer}, see
     * TYPE_* in {@link VertexFormat}. When not specified, {@link TYPE_FLOAT32} is used.
     * @param {boolean} [dataTypeNormalize] - If true, vertex attribute data will be mapped from a
     * 0 to 255 range down to 0 to 1 when fed to a shader. If false, vertex attribute data is left
     * unchanged. If this property is unspecified, false is assumed.
     * @param {boolean} [asInt] - If true, vertex attribute data will be accessible as integer
     * numbers in shader code. Defaults to false, which means that vertex attribute data will be
     * accessible as floating point numbers. Can be only used with INT and UINT data types.
     */
    setVertexStream(semantic: string, data: number[] | ArrayBufferView, componentCount: number, numVertices?: number, dataType?: number, dataTypeNormalize?: boolean, asInt?: boolean): void;
    /**
     * Gets the vertex data corresponding to a semantic.
     *
     * @param {string} semantic - The semantic of the vertex element to get. For supported
     * semantics, see SEMANTIC_* in {@link VertexFormat}.
     * @param {number[]|ArrayBufferView} data - An array to populate with the vertex data. When
     * typed array is supplied, enough space needs to be reserved, otherwise only partial data is
     * copied.
     * @returns {number} Returns the number of vertices populated.
     */
    getVertexStream(semantic: string, data: number[] | ArrayBufferView): number;
    /**
     * Sets the vertex positions array. Vertices are stored using {@link TYPE_FLOAT32} format.
     *
     * @param {number[]|ArrayBufferView} positions - Vertex data containing positions.
     * @param {number} [componentCount] - The number of values that form a single position element.
     * Defaults to 3 if not specified, corresponding to x, y and z coordinates.
     * @param {number} [numVertices] - The number of vertices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     */
    setPositions(positions: number[] | ArrayBufferView, componentCount?: number, numVertices?: number): void;
    /**
     * Sets the vertex normals array. Normals are stored using {@link TYPE_FLOAT32} format.
     *
     * @param {number[]|ArrayBufferView} normals - Vertex data containing normals.
     * @param {number} [componentCount] - The number of values that form a single normal element.
     * Defaults to 3 if not specified, corresponding to x, y and z direction.
     * @param {number} [numVertices] - The number of vertices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     */
    setNormals(normals: number[] | ArrayBufferView, componentCount?: number, numVertices?: number): void;
    /**
     * Sets the vertex uv array. Uvs are stored using {@link TYPE_FLOAT32} format.
     *
     * @param {number} channel - The uv channel in [0..7] range.
     * @param {number[]|ArrayBufferView} uvs - Vertex data containing uv-coordinates.
     * @param {number} [componentCount] - The number of values that form a single uv element.
     * Defaults to 2 if not specified, corresponding to u and v coordinates.
     * @param {number} [numVertices] - The number of vertices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     */
    setUvs(channel: number, uvs: number[] | ArrayBufferView, componentCount?: number, numVertices?: number): void;
    /**
     * Sets the vertex color array. Colors are stored using {@link TYPE_FLOAT32} format, which is
     * useful for HDR colors.
     *
     * @param {number[]|ArrayBufferView} colors - Vertex data containing colors.
     * @param {number} [componentCount] - The number of values that form a single color element.
     * Defaults to 4 if not specified, corresponding to r, g, b and a.
     * @param {number} [numVertices] - The number of vertices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     */
    setColors(colors: number[] | ArrayBufferView, componentCount?: number, numVertices?: number): void;
    /**
     * Sets the vertex color array. Colors are stored using {@link TYPE_UINT8} format, which is
     * useful for LDR colors. Values in the array are expected in [0..255] range, and are mapped to
     * [0..1] range in the shader.
     *
     * @param {number[]|ArrayBufferView} colors - Vertex data containing colors. The array is
     * expected to contain 4 components per vertex, corresponding to r, g, b and a.
     * @param {number} [numVertices] - The number of vertices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     */
    setColors32(colors: number[] | ArrayBufferView, numVertices?: number): void;
    /**
     * Sets the index array. Indices are stored using 16-bit format by default, unless more than
     * 65535 vertices are specified, in which case 32-bit format is used.
     *
     * @param {number[]|Uint8Array|Uint16Array|Uint32Array} indices - The array of indices that
     * define primitives (lines, triangles, etc.).
     * @param {number} [numIndices] - The number of indices to be used from data array. If not
     * provided, the whole data array is used. This allows to use only part of the data array.
     */
    setIndices(indices: number[] | Uint8Array | Uint16Array | Uint32Array, numIndices?: number): void;
    /**
     * Gets the vertex positions data.
     *
     * @param {number[]|ArrayBufferView} positions - An array to populate with the vertex data.
     * When typed array is supplied, enough space needs to be reserved, otherwise only partial data
     * is copied.
     * @returns {number} Returns the number of vertices populated.
     */
    getPositions(positions: number[] | ArrayBufferView): number;
    /**
     * Gets the vertex normals data.
     *
     * @param {number[]|ArrayBufferView} normals - An array to populate with the vertex data. When
     * typed array is supplied, enough space needs to be reserved, otherwise only partial data is
     * copied.
     * @returns {number} Returns the number of vertices populated.
     */
    getNormals(normals: number[] | ArrayBufferView): number;
    /**
     * Gets the vertex uv data.
     *
     * @param {number} channel - The uv channel in [0..7] range.
     * @param {number[]|ArrayBufferView} uvs - An array to populate with the vertex data. When
     * typed array is supplied, enough space needs to be reserved, otherwise only partial data is
     * copied.
     * @returns {number} Returns the number of vertices populated.
     */
    getUvs(channel: number, uvs: number[] | ArrayBufferView): number;
    /**
     * Gets the vertex color data.
     *
     * @param {number[]|ArrayBufferView} colors - An array to populate with the vertex data. When
     * typed array is supplied, enough space needs to be reserved, otherwise only partial data is
     * copied.
     * @returns {number} Returns the number of vertices populated.
     */
    getColors(colors: number[] | ArrayBufferView): number;
    /**
     * Gets the index data.
     *
     * @param {number[]|Uint8Array|Uint16Array|Uint32Array} indices - An array to populate with the
     * index data. When a typed array is supplied, enough space needs to be reserved, otherwise
     * only partial data is copied.
     * @returns {number} Returns the number of indices populated.
     */
    getIndices(indices: number[] | Uint8Array | Uint16Array | Uint32Array): number;
    /**
     * Applies any changes to vertex stream and indices to mesh. This allocates or reallocates
     * {@link vertexBuffer} or {@link indexBuffer} to fit all provided vertices and indices, and
     * fills them with data.
     *
     * @param {number} [primitiveType] - The type of primitive to render.  Can be:
     *
     * - {@link PRIMITIVE_POINTS}
     * - {@link PRIMITIVE_LINES}
     * - {@link PRIMITIVE_LINELOOP}
     * - {@link PRIMITIVE_LINESTRIP}
     * - {@link PRIMITIVE_TRIANGLES}
     * - {@link PRIMITIVE_TRISTRIP}
     * - {@link PRIMITIVE_TRIFAN}
     *
     * Defaults to {@link PRIMITIVE_TRIANGLES} if not specified.
     * @param {boolean} [updateBoundingBox] - True to update bounding box. Bounding box is updated
     * only if positions were set since last time update was called, and `componentCount` for
     * position was 3, otherwise bounding box is not updated. See {@link Mesh#setPositions}.
     * Defaults to true if not specified. Set this to false to avoid update of the bounding box and
     * use aabb property to set it instead.
     */
    update(primitiveType?: number, updateBoundingBox?: boolean): void;
    _buildVertexFormat(vertexCount: any): VertexFormat;
    _updateVertexBuffer(): void;
    _updateIndexBuffer(): void;
    prepareRenderState(renderStyle: any): void;
    updateRenderStates(): void;
    generateWireframe(): void;
}

/**
 * An Animation contains the data that defines how a {@link Skeleton} animates over time. The
 * Animation contains an array of {@link AnimationNode}s, where each AnimationNode targets a
 * specific {@link GraphNode} referenced by a {@link Skeleton}.
 *
 * An Animation can be played back by an {@link AnimationComponent}.
 *
 * @category Animation
 */
declare class Animation {
    /**
     * Human-readable name of the animation.
     *
     * @type {string}
     */
    name: string;
    /**
     * Duration of the animation in seconds.
     *
     * @type {number}
     */
    duration: number;
    _nodes: any[];
    _nodeDict: {};
    /**
     * Gets a {@link AnimationNode} by name.
     *
     * @param {string} name - The name of the {@link AnimationNode}.
     * @returns {AnimationNode} The {@link AnimationNode} with the specified name.
     */
    getNode(name: string): AnimationNode;
    /**
     * Adds a node to the internal nodes array.
     *
     * @param {AnimationNode} node - The node to add.
     */
    addNode(node: AnimationNode): void;
    /**
     * A read-only property to get array of animation nodes.
     *
     * @type {AnimationNode[]}
     */
    get nodes(): AnimationNode[];
}
declare class AnimationKey {
    constructor(time: any, position: any, rotation: any, scale: any);
    time: any;
    position: any;
    rotation: any;
    scale: any;
}
/**
 * AnimationNode represents an array of keyframes that animate the transform of a {@link GraphNode}
 * over time. Typically, an {@link Animation} maintains a collection of AnimationNodes, one for
 * each GraphNode in a {@link Skeleton}.
 *
 * @category Animation
 */
declare class AnimationNode {
    _name: string;
    _keys: any[];
}

type ChunkValidation = {
    /**
     * - Deprecation message to display.
     */
    message?: string;
    /**
     * - Validation callback receiving chunk name and code.
     */
    callback?: (arg0: string, arg1: string) => void;
    /**
     * - Default GLSL code. If matches, no warning.
     */
    defaultCodeGLSL?: string;
    /**
     * - Default WGSL code. If matches, no warning.
     */
    defaultCodeWGSL?: string;
};
/**
 * @typedef {object} ChunkValidation
 * @property {string} [message] - Deprecation message to display.
 * @property {function(string, string):void} [callback] - Validation callback receiving chunk name and code.
 * @property {string} [defaultCodeGLSL] - Default GLSL code. If matches, no warning.
 * @property {string} [defaultCodeWGSL] - Default WGSL code. If matches, no warning.
 */
/**
 * A collection of shader chunks, used by {@link ShaderChunks}. This is a map of shader chunk names
 * to their code.  As this class extends `Map`, it can be used as a `Map` as well in addition to
 * custom functionality it provides.
 *
 * @category Graphics
 */
declare class ShaderChunkMap extends Map<any, any> {
    /**
     * Create a new ShaderChunkMap instance.
     *
     * @param {Map<string, ChunkValidation>} [validations] - Optional map of chunk validations.
     * @ignore
     */
    constructor(validations?: Map<string, ChunkValidation>);
    /**
     * Reference to chunk validations map.
     *
     * @type {Map<string, ChunkValidation>|undefined}
     * @private
     */
    private _validations;
    _keyDirty: boolean;
    _key: string;
    /**
     * Adds a new shader chunk with a specified name and shader source code to the Map. If an
     * element with the same name already exists, the element will be updated.
     *
     * @param {string} name - The name of the shader chunk.
     * @param {string} code - The shader source code.
     * @returns {this} The ShaderChunkMap instance.
     */
    set(name: string, code: string): this;
    /**
     * Adds multiple shader chunks to the Map. This method accepts an object where the keys are the
     * names of the shader chunks and the values are the shader source code. If an element with the
     * same name already exists, the element will be updated.
     *
     * @param {Object} object - Object containing shader chunks.
     * @param {boolean} override - Whether to override existing shader chunks. Defaults to true.
     * @returns {this} The ShaderChunkMap instance.
     */
    add(object: any, override?: boolean): this;
    /**
     * Removes a shader chunk by name from the Map. If the element does not exist, no action is
     * taken.
     *
     * @param {string} name - The name of the shader chunk to remove.
     * @returns {boolean} True if an element in the Map existed and has been removed, or false if the
     * element does not exist.
     */
    delete(name: string): boolean;
    markDirty(): void;
    _dirty: boolean;
    isDirty(): boolean;
    resetDirty(): void;
    get key(): string;
    /**
     * Copy the shader chunk map.
     *
     * @param {ShaderChunkMap} source - The instance to copy.
     * @returns {this} The destination instance.
     * @ignore
     */
    copy(source: ShaderChunkMap): this;
}

/**
 * A collection of GLSL and WGSL shader chunks, used to generate shaders.
 *
 * @category Graphics
 */
declare class ShaderChunks {
    /**
     * Static map of chunk validations shared by all instances.
     *
     * @type {Map<string, ChunkValidation>}
     * @private
     */
    private static _validations;
    /**
     * Returns a shader chunks map for the given device and shader language.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {string} shaderLanguage - The shader language to use (GLSL or WGSL).
     * @returns {ShaderChunkMap} The shader chunks for the specified language.
     */
    static get(device: GraphicsDevice, shaderLanguage?: string): ShaderChunkMap;
    /**
     * Register a validation for a shader chunk. When the chunk is set, the validation will be
     * executed. This is useful for deprecation warnings or content validation.
     *
     * @param {string} name - The name of the shader chunk.
     * @param {ChunkValidation} options - Validation options.
     * @example
     * // Deprecate an existing chunk - only warn when overridden with non-default code
     * import { myChunksGLSL } from './glsl/collections/my-chunks-glsl.js';
     * import { myChunksWGSL } from './wgsl/collections/my-chunks-wgsl.js';
     *
     * ShaderChunks.registerValidation('myChunkVS', {
     *     message: 'myChunkVS is deprecated. Use newChunkVS instead.',
     *     defaultCodeGLSL: myChunksGLSL.myChunkVS,
     *     defaultCodeWGSL: myChunksWGSL.myChunkVS
     * });
     * @example
     * // Warn for a removed chunk - any attempt to use it triggers warning
     * ShaderChunks.registerValidation('removedChunkVS', {
     *     message: 'removedChunkVS has been removed. Use replacementChunkVS instead.'
     * });
     * @example
     * // Use callback for custom validation logic
     * ShaderChunks.registerValidation('myChunkVS', {
     *     callback: (name, code) => {
     *         if (code.includes('gl_FragColor')) {
     *             Debug.error(`Chunk ${name} uses deprecated gl_FragColor. Use pcFragColor instead.`);
     *         }
     *     }
     * });
     * @ignore
     */
    static registerValidation(name: string, options: ChunkValidation): void;
    /**
     * A map of shader chunks for GLSL.
     *
     * @type {ShaderChunkMap}
     * @ignore
     */
    glsl: ShaderChunkMap;
    /**
     * A map of shader chunks for WGSL.
     *
     * @type {ShaderChunkMap}
     * @ignore
     */
    wgsl: ShaderChunkMap;
    /**
     * Specifies the API version of the shader chunks.
     *
     * This should be a string containing the current engine major and minor version (e.g., '2.8'
     * for engine v2.8.1) and ensures compatibility with the current engine version. When providing
     * custom shader chunks, set this to the latest supported version. If a future engine release no
     * longer supports the specified version, a warning will be issued. In that case, update your
     * shader chunks to match the new format and set this to the latest version accordingly.
     *
     * @type {string}
     */
    version: string;
    get useWGSL(): boolean;
    get key(): string;
    isDirty(): boolean;
    resetDirty(): void;
    /**
     * Copy the shader chunks.
     *
     * @param {ShaderChunks} source - The instance to copy.
     * @returns {ShaderChunks} The destination instance.
     * @ignore
     */
    copy(source: ShaderChunks): ShaderChunks;
}

/**
 * @import { ShaderChunks } from '../shader-chunks.js';
 */
/**
 * The lit shader options determines how the lit-shader gets generated. It specifies a set of
 * parameters which triggers different fragment and vertex shader generation in the backend.
 *
 * @category Graphics
 */
declare class LitShaderOptions {
    hasTangents: boolean;
    /**
     * Custom shader chunks that will replace default ones.
     *
     * @type {ShaderChunks|null}
     */
    shaderChunks: ShaderChunks | null;
    pass: number;
    /**
     * Enable alpha testing. See {@link Material#alphaTest}.
     *
     * @type {boolean}
     */
    alphaTest: boolean;
    /**
     * The value of {@link Material#blendType}.
     *
     * @type {number}
     */
    blendType: number;
    separateAmbient: boolean;
    screenSpace: boolean;
    skin: boolean;
    batch: boolean;
    /**
     * If hardware instancing compatible shader should be generated. Transform is read from
     * per-instance {@link VertexBuffer} instead of shader's uniforms.
     *
     * @type {boolean}
     */
    useInstancing: boolean;
    /**
     * If morphing code should be generated to morph positions.
     *
     * @type {boolean}
     */
    useMorphPosition: boolean;
    /**
     * If morphing code should be generated to morph normals.
     *
     * @type {boolean}
     */
    useMorphNormal: boolean;
    useMorphTextureBasedInt: boolean;
    nineSlicedMode: number;
    clusteredLightingEnabled: boolean;
    clusteredLightingCookiesEnabled: boolean;
    clusteredLightingShadowsEnabled: boolean;
    clusteredLightingShadowType: number;
    clusteredLightingAreaLightsEnabled: boolean;
    vertexColors: boolean;
    useVertexColorGamma: boolean;
    lightMapEnabled: boolean;
    dirLightMapEnabled: boolean;
    useHeights: boolean;
    useNormals: boolean;
    useClearCoatNormals: boolean;
    useAo: boolean;
    diffuseMapEnabled: boolean;
    pixelSnap: boolean;
    /**
     * If ambient spherical harmonics are used. Ambient SH replace prefiltered cubemap ambient on
     * certain platforms (mostly Android) for performance reasons.
     *
     * @type {boolean}
     */
    ambientSH: boolean;
    /**
     * Apply SSAO during the lighting.
     *
     * @type {boolean}
     */
    ssao: boolean;
    /**
     * The value of {@link StandardMaterial#twoSidedLighting}.
     *
     * @type {boolean}
     */
    twoSidedLighting: boolean;
    /**
     * The value of {@link StandardMaterial#occludeDirect}.
     *
     * @type {boolean}
     */
    occludeDirect: boolean;
    /**
     * The value of {@link StandardMaterial#occludeSpecular}.
     *
     * @type {number}
     */
    occludeSpecular: number;
    /**
     * Defines if {@link StandardMaterial#occludeSpecularIntensity} constant should affect specular
     * occlusion.
     *
     * @type {boolean}
     */
    occludeSpecularFloat: boolean;
    useMsdf: boolean;
    msdfTextAttribute: boolean;
    /**
     * Enable alpha to coverage. See {@link Material#alphaToCoverage}.
     *
     * @type {boolean}
     */
    alphaToCoverage: boolean;
    /**
     * Enable specular fade. See {@link StandardMaterial#opacityFadesSpecular}.
     *
     * @type {boolean}
     */
    opacityFadesSpecular: boolean;
    /**
     * Enable opacity dithering. See {@link StandardMaterial#opacityDither}.
     *
     * @type {string}
     */
    opacityDither: string;
    /**
     * Enable opacity shadow dithering. See {@link StandardMaterial#opacityShadowDither}.
     *
     * @type {string}
     */
    opacityShadowDither: string;
    /**
     * The value of {@link StandardMaterial#cubeMapProjection}.
     *
     * @type {number}
     */
    cubeMapProjection: number;
    /**
     * If any specular or reflections are needed at all.
     *
     * @type {boolean}
     */
    useSpecular: boolean;
    useSpecularityFactor: boolean;
    enableGGXSpecular: boolean;
    /**
     * The value of {@link StandardMaterial#fresnelModel}.
     *
     * @type {number}
     */
    fresnelModel: number;
    /**
     * If refraction is used.
     *
     * @type {boolean}
     */
    useRefraction: boolean;
    useClearCoat: boolean;
    useSheen: boolean;
    useIridescence: boolean;
    /**
     * The value of {@link StandardMaterial#useMetalness}.
     *
     * @type {boolean}
     */
    useMetalness: boolean;
    useDynamicRefraction: boolean;
    dispersion: boolean;
    /**
     * The type of fog being applied in the shader. See {@link Scene#fog} for the list of possible
     * values.
     *
     * @type {string}
     */
    fog: string;
    /**
     * The type of gamma correction being applied in the shader. See
     * {@link CameraComponent#gammaCorrection} for the list of possible values.
     *
     * @type {number}
     */
    gamma: number;
    /**
     * The type of tone mapping being applied in the shader. See {@link CameraComponent#toneMapping}
     * for the list of possible values.
     *
     * @type {number}
     */
    toneMap: number;
    /**
     * One of REFLECTIONSRC_*** constants.
     *
     * @type {string}
     */
    reflectionSource: string;
    reflectionEncoding: any;
    reflectionCubemapEncoding: any;
    /**
     * One of "ambientSH", "envAtlas", "constant".
     *
     * @type {string}
     */
    ambientSource: string;
    ambientEncoding: any;
    /**
     * Skybox intensity factor.
     *
     * @type {number}
     */
    skyboxIntensity: number;
    /**
     * If cube map rotation is enabled.
     *
     * @type {boolean}
     */
    useCubeMapRotation: boolean;
    lightMapWithoutAmbient: boolean;
    lights: any[];
    noShadow: boolean;
    lightMaskDynamic: number;
    /**
     * Object containing a map of user defined vertex attributes to attached shader semantics.
     *
     * @type {Object<string, string>}
     */
    userAttributes: {
        [x: string]: string;
    };
    /**
     * Make vLinearDepth available in the shader.
     *
     * @type {boolean}
     */
    linearDepth: boolean;
    /**
     * Shader outputs the accumulated shadow value, used for shadow catcher materials.
     */
    shadowCatcher: boolean;
}

declare function createSphere(device: any, opts: any): Mesh;
declare function createPlane(device: any, opts: any): Mesh;
declare function createBox(device: any, opts: any): Mesh;
declare function createTorus(device: any, opts: any): Mesh;
declare function createCapsule(device: any, opts: any): Mesh;
declare function createCone(device: any, opts: any): Mesh;
declare function createCylinder(device: any, opts: any): Mesh;
declare function createMesh(device: any, positions: any, opts?: {}): Mesh;
declare function drawFullscreenQuad(device: any, target: any, vertexBuffer: any, shader: any, rect: any): void;
declare const PIXELFORMAT_L8_A8: 2;
declare const PIXELFORMAT_R5_G6_B5: 3;
declare const PIXELFORMAT_R5_G5_B5_A1: 4;
declare const PIXELFORMAT_R4_G4_B4_A4: 5;
declare const PIXELFORMAT_R8_G8_B8: 6;
declare const PIXELFORMAT_R8_G8_B8_A8: 7;
declare const PIXELFORMAT_SRGB: 19;
declare const PIXELFORMAT_SRGBA: 20;
declare const BLENDMODE_CONSTANT_COLOR: 11;
declare const BLENDMODE_ONE_MINUS_CONSTANT_COLOR: 12;
declare const BLENDMODE_CONSTANT_ALPHA: 11;
declare const BLENDMODE_ONE_MINUS_CONSTANT_ALPHA: 12;
declare const CHUNKAPI_1_51: "1.51";
declare const CHUNKAPI_1_55: "1.55";
declare const CHUNKAPI_1_56: "1.56";
declare const CHUNKAPI_1_57: "1.57";
declare const CHUNKAPI_1_58: "1.58";
declare const CHUNKAPI_1_60: "1.60";
declare const CHUNKAPI_1_62: "1.62";
declare const CHUNKAPI_1_65: "1.65";
declare const CHUNKAPI_1_70: "1.70";
declare const CHUNKAPI_2_1: "2.1";
declare const CHUNKAPI_2_3: "2.3";
declare const CHUNKAPI_2_5: "2.5";
declare const CHUNKAPI_2_6: "2.6";
declare const CHUNKAPI_2_7: "2.7";
declare const CHUNKAPI_2_8: "2.8";
declare const Key: typeof AnimationKey;
declare const Node: typeof AnimationNode;
declare const LitOptions: typeof LitShaderOptions;
declare const shaderChunks: {};
declare const EVENT_KEYDOWN: "keydown";
declare const EVENT_KEYUP: "keyup";
declare const EVENT_MOUSEDOWN: "mousedown";
declare const EVENT_MOUSEMOVE: "mousemove";
declare const EVENT_MOUSEUP: "mouseup";
declare const EVENT_MOUSEWHEEL: "mousewheel";
declare const EVENT_TOUCHSTART: "touchstart";
declare const EVENT_TOUCHEND: "touchend";
declare const EVENT_TOUCHMOVE: "touchmove";
declare const EVENT_TOUCHCANCEL: "touchcancel";
declare const EVENT_GAMEPADCONNECTED: "gamepadconnected";
declare const EVENT_GAMEPADDISCONNECTED: "gamepaddisconnected";
declare const EVENT_SELECT: "select";
declare const EVENT_SELECTSTART: "selectstart";
declare const EVENT_SELECTEND: "selectend";
declare const RIGIDBODY_TYPE_STATIC: "static";
declare const RIGIDBODY_TYPE_DYNAMIC: "dynamic";
declare const RIGIDBODY_TYPE_KINEMATIC: "kinematic";
declare const RIGIDBODY_CF_STATIC_OBJECT: 1;
declare const RIGIDBODY_CF_KINEMATIC_OBJECT: 2;
declare const RIGIDBODY_CF_NORESPONSE_OBJECT: 4;
declare const RIGIDBODY_ACTIVE_TAG: 1;
declare const RIGIDBODY_ISLAND_SLEEPING: 2;
declare const RIGIDBODY_WANTS_DEACTIVATION: 3;
declare const RIGIDBODY_DISABLE_DEACTIVATION: 4;
declare const RIGIDBODY_DISABLE_SIMULATION: 5;

declare class WordAtlas {
    constructor(device: any, words: any);
    placements: Map<any, any>;
    texture: Texture;
    destroy(): void;
    render(render2d: any, word: any, x: any, y: any): any;
}

/**
 * Tags is a powerful tag management system for categorizing and filtering objects in PlayCanvas
 * applications. It provides an efficient way to attach string identifiers to objects and query them
 * using logical operations.
 *
 * Tags are automatically available on {@link Asset}s and {@link Entity}s (see {@link Asset#tags}
 * and {@link GraphNode#tags}). You can search for specific assets via {@link AssetRegistry#findByTag}
 * and specific entities via {@link GraphNode#findByTag}.
 */
declare class Tags extends EventHandler {
    /**
     * Fired for each individual tag that is added.
     *
     * @event
     * @example
     * tags.on('add', (tag, parent) => {
     *    console.log(`${tag} added to ${parent.name}`);
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired for each individual tag that is removed.
     *
     * @event
     * @example
     * tags.on('remove', (tag, parent) => {
     *   console.log(`${tag} removed from ${parent.name}`);
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when tags have been added or removed. It will fire once on bulk changes, while `add`
     * and `remove` will fire on each tag operation.
     *
     * @event
     * @example
     * tags.on('change', (parent) => {
     *    console.log(`Tags changed on ${parent.name}`);
     * });
     */
    static EVENT_CHANGE: string;
    /**
     * Create a new Tags instance.
     *
     * @param {object} [parent] - Parent object who tags belong to.
     */
    constructor(parent?: object);
    /** @private */
    private _index;
    /** @private */
    private _list;
    _parent: any;
    /**
     * Add a tag, duplicates are ignored. Can be array or comma separated arguments for multiple tags.
     *
     * @param {...*} args - Name of a tag, or array of tags.
     * @returns {boolean} True if any tag were added.
     * @example
     * tags.add('level-1');
     * @example
     * tags.add('ui', 'settings');
     * @example
     * tags.add(['level-2', 'mob']);
     */
    add(...args: any[]): boolean;
    /**
     * Remove tag.
     *
     * @param {...*} args - Name of a tag or array of tags.
     * @returns {boolean} True if any tag were removed.
     * @example
     * tags.remove('level-1');
     * @example
     * tags.remove('ui', 'settings');
     * @example
     * tags.remove(['level-2', 'mob']);
     */
    remove(...args: any[]): boolean;
    /**
     * Remove all tags.
     *
     * @example
     * tags.clear();
     */
    clear(): void;
    /**
     * Check if tags satisfy filters. Filters can be provided by simple name of tag, as well as by
     * array of tags. When an array is provided it will check if tags contain each tag within the
     * array. If any of comma separated argument is satisfied, then it will return true. Any number
     * of combinations are valid, and order is irrelevant.
     *
     * @param {...*} query - Name of a tag or array of tags.
     * @returns {boolean} True if filters are satisfied.
     * @example
     * tags.has('player'); // player
     * @example
     * tags.has('mob', 'player'); // player OR mob
     * @example
     * tags.has(['level-1', 'mob']); // monster AND level-1
     * @example
     * tags.has(['ui', 'settings'], ['ui', 'levels']); // (ui AND settings) OR (ui AND levels)
     */
    has(...query: any[]): boolean;
    /**
     * @param {string[]|string[][]} tags - Array of tags.
     * @returns {boolean} True if the supplied tags are present.
     * @private
     */
    private _has;
    /**
     * Returns immutable array of tags.
     *
     * @returns {string[]} Copy of tags array.
     */
    list(): string[];
    /**
     * @param {Array} args - Arguments to process.
     * @param {boolean} [flat] - If true, will flatten array of tags. Defaults to false.
     * @returns {string[]|string[][]} Array of tags.
     * @private
     */
    private _processArguments;
    /**
     * Number of tags in set.
     *
     * @type {number}
     */
    get size(): number;
}

/**
 * @import { Mat4 } from './mat4.js'
 * @import { Quat } from './quat.js'
 */
/**
 * A 3x3 matrix. Mat3 is commonly used to represent rotation matrices, 2D transformations or the
 * upper-left portion of a 4x4 matrix for transforming normals.
 *
 * @category Math
 */
declare class Mat3 {
    /**
     * A constant matrix set to the identity.
     *
     * @type {Mat3}
     * @readonly
     */
    static readonly IDENTITY: Mat3;
    /**
     * A constant matrix with all elements set to 0.
     *
     * @type {Mat3}
     * @readonly
     */
    static readonly ZERO: Mat3;
    /**
     * Matrix elements in the form of a flat array.
     *
     * @type {Float32Array}
     */
    data: Float32Array;
    /**
     * Creates a duplicate of the specified matrix.
     *
     * @returns {this} A duplicate matrix.
     * @example
     * const src = new pc.Mat3().setFromQuat(new pc.Quat(0, 0, 0.383, 0.924));
     * const dst = src.clone();
     * console.log("The two matrices are " + (src.equals(dst) ? "equal" : "different"));
     */
    clone(): this;
    /**
     * Copies the contents of a source 3x3 matrix to a destination 3x3 matrix.
     *
     * @param {Mat3} rhs - A 3x3 matrix to be copied.
     * @returns {Mat3} Self for chaining.
     * @example
     * const src = new pc.Mat3().setFromQuat(new pc.Quat(0, 0, 0.383, 0.924));
     * const dst = new pc.Mat3();
     * dst.copy(src);
     * console.log("The two matrices are " + (src.equals(dst) ? "equal" : "different"));
     */
    copy(rhs: Mat3): Mat3;
    /**
     * Copies the contents of a source array[9] to a destination 3x3 matrix.
     *
     * @param {number[]} src - An array[9] to be copied.
     * @returns {Mat3} Self for chaining.
     * @example
     * const dst = new pc.Mat3();
     * dst.set([0, 1, 2, 3, 4, 5, 6, 7, 8]);
     */
    set(src: number[]): Mat3;
    /**
     * Extracts the x-axis from the specified matrix.
     *
     * @param {Vec3} [x] - The vector to receive the x axis of the matrix.
     * @returns {Vec3} The x-axis of the specified matrix.
     * @example
     * const m = new pc.Mat3();
     * const xAxis = m.getX(); // Vec3(1, 0, 0) for identity matrix
     */
    getX(x?: Vec3): Vec3;
    /**
     * Extracts the y-axis from the specified matrix.
     *
     * @param {Vec3} [y] - The vector to receive the y axis of the matrix.
     * @returns {Vec3} The y-axis of the specified matrix.
     * @example
     * const m = new pc.Mat3();
     * const yAxis = m.getY(); // Vec3(0, 1, 0) for identity matrix
     */
    getY(y?: Vec3): Vec3;
    /**
     * Extracts the z-axis from the specified matrix.
     *
     * @param {Vec3} [z] - The vector to receive the z axis of the matrix.
     * @returns {Vec3} The z-axis of the specified matrix.
     * @example
     * const m = new pc.Mat3();
     * const zAxis = m.getZ(); // Vec3(0, 0, 1) for identity matrix
     */
    getZ(z?: Vec3): Vec3;
    /**
     * Reports whether two matrices are equal.
     *
     * @param {Mat3} rhs - The other matrix.
     * @returns {boolean} True if the matrices are equal and false otherwise.
     * @example
     * const a = new pc.Mat3().setFromQuat(new pc.Quat(0, 0, 0.383, 0.924));
     * const b = new pc.Mat3();
     * console.log("The two matrices are " + (a.equals(b) ? "equal" : "different"));
     */
    equals(rhs: Mat3): boolean;
    /**
     * Reports whether the specified matrix is the identity matrix.
     *
     * @returns {boolean} True if the matrix is identity and false otherwise.
     * @example
     * const m = new pc.Mat3();
     * console.log("The matrix is " + (m.isIdentity() ? "identity" : "not identity"));
     */
    isIdentity(): boolean;
    /**
     * Sets the matrix to the identity matrix.
     *
     * @returns {Mat3} Self for chaining.
     * @example
     * m.setIdentity();
     * console.log("The matrix is " + (m.isIdentity() ? "identity" : "not identity"));
     */
    setIdentity(): Mat3;
    /**
     * Converts the matrix to string form.
     *
     * @returns {string} The matrix in string form.
     * @example
     * const m = new pc.Mat3();
     * // Outputs [1, 0, 0, 0, 1, 0, 0, 0, 1]
     * console.log(m.toString());
     */
    toString(): string;
    /**
     * Generates the transpose of the specified 3x3 matrix.
     *
     * @param {Mat3} [src] - The matrix to transpose. If not set, the matrix is transposed in-place.
     * @returns {Mat3} Self for chaining.
     * @example
     * const m = new pc.Mat3();
     *
     * // Transpose in place
     * m.transpose();
     */
    transpose(src?: Mat3): Mat3;
    /**
     * Converts the specified 4x4 matrix to a Mat3.
     *
     * @param {Mat4} m - The 4x4 matrix to convert.
     * @returns {Mat3} Self for chaining.
     * @example
     * const m4 = new pc.Mat4();
     * const m3 = new pc.Mat3().setFromMat4(m4);
     */
    setFromMat4(m: Mat4): Mat3;
    /**
     * Sets this matrix to the given quaternion rotation.
     *
     * @param {Quat} r - A quaternion rotation.
     * @returns {Mat3} Self for chaining.
     * @example
     * const r = new pc.Quat(1, 2, 3, 4).normalize();
     *
     * const m = new pc.Mat3();
     * m.setFromQuat(r);
     */
    setFromQuat(r: Quat): Mat3;
    /**
     * Set the matrix to the inverse of the specified 4x4 matrix.
     *
     * @param {Mat4} src - The 4x4 matrix to invert.
     * @returns {Mat3} Self for chaining.
     *
     * @ignore
     */
    invertMat4(src: Mat4): Mat3;
    /**
     * Transforms a 3-dimensional vector by a 3x3 matrix.
     *
     * @param {Vec3} vec - The 3-dimensional vector to be transformed.
     * @param {Vec3} [res] - An optional 3-dimensional vector to receive the result of the
     * transformation.
     * @returns {Vec3} The input vector v transformed by the current instance.
     * @example
     * const m = new pc.Mat3();
     * const v = new pc.Vec3(1, 2, 3);
     * const result = m.transformVector(v);
     */
    transformVector(vec: Vec3, res?: Vec3): Vec3;
}

/**
 * Callback used by {@link GraphNode#find} and {@link GraphNode#findOne} to search through a graph
 * node and all of its descendants.
 */
type FindNodeCallback = (node: GraphNode) => boolean;
/**
 * Callback used by {@link GraphNode#forEach} to iterate through a graph node and all of its
 * descendants.
 */
type ForEachNodeCallback = (node: GraphNode) => void;
/**
 * @callback FindNodeCallback
 * Callback used by {@link GraphNode#find} and {@link GraphNode#findOne} to search through a graph
 * node and all of its descendants.
 * @param {GraphNode} node - The current graph node.
 * @returns {boolean} Returning `true` will result in that node being returned from
 * {@link GraphNode#find} or {@link GraphNode#findOne}.
 */
/**
 * @callback ForEachNodeCallback
 * Callback used by {@link GraphNode#forEach} to iterate through a graph node and all of its
 * descendants.
 * @param {GraphNode} node - The current graph node.
 * @returns {void}
 */
/**
 * The GraphNode class represents a node within a hierarchical scene graph. Each GraphNode can
 * reference an array of {@link children}. This creates a tree-like structure that is fundamental
 * for organizing and managing the spatial relationships between objects in a 3D scene. This class
 * provides a comprehensive API for manipulating the position, rotation, and scale of nodes both
 * locally (relative to the {@link parent}) and in world space (relative to the {@link Scene}
 * origin).
 *
 * During the application's (see {@link AppBase}) main update loop, the engine automatically
 * synchronizes the entire GraphNode hierarchy each frame. This process ensures that the world
 * transformation matrices for all nodes are up-to-date. A node's world transformation matrix is
 * calculated by combining its local transformation matrix (derived from its local position,
 * rotation, and scale) with the world transformation matrix of its parent node. For the scene
 * graph's {@link root} node (which has no parent), its world matrix is simply its local matrix.
 * This hierarchical update mechanism ensures that changes made to a parent node's transform
 * correctly propagate down to all its children and descendants, accurately reflecting their final
 * position, orientation, and scale in the world. This synchronized world transform is essential
 * for systems like rendering and physics.
 *
 * GraphNode is the superclass of {@link Entity}, which is the primary class for creating objects
 * in a PlayCanvas application. For this reason, developers typically interact with the scene
 * hierarchy and transformations through the Entity interface rather than using GraphNode directly.
 * However, GraphNode provides the underlying powerful set of features for hierarchical
 * transformations that Entity leverages.
 */
declare class GraphNode extends EventHandler {
    /**
     * Create a new GraphNode instance.
     *
     * @param {string} [name] - The non-unique name of a graph node. Defaults to 'Untitled'.
     */
    constructor(name?: string);
    /**
     * The non-unique name of a graph node. Defaults to 'Untitled'.
     *
     * @type {string}
     */
    name: string;
    /**
     * Interface for tagging graph nodes. Tag based searches can be performed using the
     * {@link findByTag} function.
     *
     * @type {Tags}
     */
    tags: Tags;
    /**
     * @type {Vec3}
     * @private
     */
    private localPosition;
    /**
     * @type {Quat}
     * @private
     */
    private localRotation;
    /**
     * @type {Vec3}
     * @private
     */
    private localScale;
    /**
     * @type {Vec3}
     * @private
     */
    private localEulerAngles;
    /**
     * @type {Vec3}
     * @private
     */
    private position;
    /**
     * @type {Quat}
     * @private
     */
    private rotation;
    /**
     * @type {Vec3}
     * @private
     */
    private eulerAngles;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _scale;
    /**
     * @type {Mat4}
     * @private
     */
    private localTransform;
    /**
     * @type {boolean}
     * @private
     */
    private _dirtyLocal;
    /**
     * @type {number}
     * @private
     */
    private _aabbVer;
    /**
     * Marks the node to ignore hierarchy sync entirely (including children nodes). The engine code
     * automatically freezes and unfreezes objects whenever required. Segregating dynamic and
     * stationary nodes into subhierarchies allows to reduce sync time significantly.
     *
     * @type {boolean}
     * @private
     */
    private _frozen;
    /**
     * @type {Mat4}
     * @private
     */
    private worldTransform;
    /**
     * @type {boolean}
     * @private
     */
    private _dirtyWorld;
    /**
     * Cached value representing the negatively scaled world transform. If the value is 0, this
     * marks this value as dirty and it needs to be recalculated. If the value is 1, the world
     * transform is not negatively scaled. If the value is -1, the world transform is negatively
     * scaled.
     *
     * @type {number}
     * @private
     */
    private _worldScaleSign;
    /**
     * @type {Mat3}
     * @private
     */
    private _normalMatrix;
    /**
     * @type {boolean}
     * @private
     */
    private _dirtyNormal;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _right;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _up;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _forward;
    /**
     * @type {GraphNode|null}
     * @private
     */
    private _parent;
    /**
     * @type {GraphNode[]}
     * @protected
     */
    protected _children: GraphNode[];
    /**
     * @type {number}
     * @private
     */
    private _graphDepth;
    /**
     * Represents enabled state of the entity. If the entity is disabled, the entity including all
     * children are excluded from updates.
     *
     * @type {boolean}
     * @private
     */
    private _enabled;
    /**
     * Represents enabled state of the entity in the hierarchy. It's true only if this entity and
     * all parent entities all the way to the scene's root are enabled.
     *
     * @type {boolean}
     * @private
     */
    private _enabledInHierarchy;
    /**
     * @type {boolean}
     * @ignore
     */
    scaleCompensation: boolean;
    /**
     * Gets the normalized local space X-axis vector of the graph node in world space.
     *
     * @type {Vec3}
     */
    get right(): Vec3;
    /**
     * Gets the normalized local space Y-axis vector of the graph node in world space.
     *
     * @type {Vec3}
     */
    get up(): Vec3;
    /**
     * Gets the normalized local space negative Z-axis vector of the graph node in world space.
     *
     * @type {Vec3}
     */
    get forward(): Vec3;
    /**
     * Gets the 3x3 transformation matrix used to transform normals.
     *
     * @type {Mat3}
     * @ignore
     */
    get normalMatrix(): Mat3;
    /**
     * Sets the enabled state of the GraphNode. If one of the GraphNode's parents is disabled there
     * will be no other side effects. If all the parents are enabled then the new value will
     * activate or deactivate all the enabled children of the GraphNode.
     *
     * @type {boolean}
     */
    set enabled(enabled: boolean);
    /**
     * Gets the enabled state of the GraphNode.
     *
     * @type {boolean}
     */
    get enabled(): boolean;
    /**
     * Gets the parent of this graph node.
     *
     * @type {GraphNode|null}
     */
    get parent(): GraphNode | null;
    /**
     * Gets the path of this graph node relative to the root of the hierarchy.
     *
     * @type {string}
     */
    get path(): string;
    /**
     * Gets the oldest ancestor graph node from this graph node.
     *
     * @type {GraphNode}
     */
    get root(): GraphNode;
    /**
     * Gets the children of this graph node.
     *
     * @type {GraphNode[]}
     */
    get children(): GraphNode[];
    /**
     * Gets the depth of this child within the graph. Note that for performance reasons this is
     * only recalculated when a node is added to a new parent. In other words, it is not
     * recalculated when a node is simply removed from the graph.
     *
     * @type {number}
     */
    get graphDepth(): number;
    /**
     * @param {GraphNode} node - Graph node to update.
     * @param {boolean} enabled - True if enabled in the hierarchy, false if disabled.
     * @protected
     */
    protected _notifyHierarchyStateChanged(node: GraphNode, enabled: boolean): void;
    /**
     * Called when the enabled flag of the entity or one of its parents changes.
     *
     * @param {boolean} enabled - True if enabled in the hierarchy, false if disabled.
     * @protected
     */
    protected _onHierarchyStateChanged(enabled: boolean): void;
    /**
     * @param {this} clone - The cloned graph node to copy into.
     * @private
     */
    private _cloneInternal;
    /**
     * Clone a graph node.
     *
     * @returns {this} A clone of the specified graph node.
     */
    clone(): this;
    /**
     * Copy a graph node.
     *
     * @param {GraphNode} source - The graph node to copy.
     * @returns {GraphNode} The destination graph node.
     * @ignore
     */
    copy(source: GraphNode): GraphNode;
    /**
     * Destroy the graph node and all of its descendants. First, the graph node is removed from the
     * hierarchy. This is then repeated recursively for all descendants of the graph node.
     *
     * The last thing the graph node does is fire the `destroy` event.
     *
     * @example
     * const firstChild = graphNode.children[0];
     * firstChild.destroy(); // destroy child and all of its descendants
     */
    destroy(): void;
    /**
     * Search the graph node and all of its descendants for the nodes that satisfy some search
     * criteria.
     *
     * @param {FindNodeCallback|string} attr - This can either be a function or a string. If it's a
     * function, it is executed for each descendant node to test if node satisfies the search
     * logic. Returning true from the function will include the node into the results. If it's a
     * string then it represents the name of a field or a method of the node. If this is the name
     * of a field then the value passed as the second argument will be checked for equality. If
     * this is the name of a function then the return value of the function will be checked for
     * equality against the valued passed as the second argument to this function.
     * @param {*} [value] - If the first argument (attr) is a property name then this value
     * will be checked against the value of the property.
     * @returns {GraphNode[]} The array of graph nodes that match the search criteria.
     * @example
     * // Finds all nodes that have a model component and have 'door' in their lower-cased name
     * const doors = house.find((node) => {
     *     return node.model && node.name.toLowerCase().indexOf('door') !== -1;
     * });
     * @example
     * // Finds all nodes that have the name property set to 'Test'
     * const entities = parent.find('name', 'Test');
     */
    find(attr: FindNodeCallback | string, value?: any): GraphNode[];
    /**
     * Search the graph node and all of its descendants for the first node that satisfies some
     * search criteria.
     *
     * @param {FindNodeCallback|string} attr - This can either be a function or a string. If it's a
     * function, it is executed for each descendant node to test if node satisfies the search
     * logic. Returning true from the function will result in that node being returned from
     * findOne. If it's a string then it represents the name of a field or a method of the node. If
     * this is the name of a field then the value passed as the second argument will be checked for
     * equality. If this is the name of a function then the return value of the function will be
     * checked for equality against the valued passed as the second argument to this function.
     * @param {*} [value] - If the first argument (attr) is a property name then this value
     * will be checked against the value of the property.
     * @returns {GraphNode|null} A graph node that match the search criteria. Returns null if no
     * node is found.
     * @example
     * // Find the first node that is called 'head' and has a model component
     * const head = player.findOne((node) => {
     *     return node.model && node.name === 'head';
     * });
     * @example
     * // Finds the first node that has the name property set to 'Test'
     * const node = parent.findOne('name', 'Test');
     */
    findOne(attr: FindNodeCallback | string, value?: any): GraphNode | null;
    /**
     * Return all graph nodes that satisfy the search query. Query can be simply a string, or comma
     * separated strings, to have inclusive results of assets that match at least one query. A
     * query that consists of an array of tags can be used to match graph nodes that have each tag
     * of array.
     *
     * @param {...*} query - Name of a tag or array of tags.
     * @returns {GraphNode[]} A list of all graph nodes that match the query.
     * @example
     * // Return all graph nodes that tagged by `animal`
     * const animals = node.findByTag("animal");
     * @example
     * // Return all graph nodes that tagged by `bird` OR `mammal`
     * const birdsAndMammals = node.findByTag("bird", "mammal");
     * @example
     * // Return all assets that tagged by `carnivore` AND `mammal`
     * const meatEatingMammals = node.findByTag(["carnivore", "mammal"]);
     * @example
     * // Return all assets that tagged by (`carnivore` AND `mammal`) OR (`carnivore` AND `reptile`)
     * const meatEatingMammalsAndReptiles = node.findByTag(["carnivore", "mammal"], ["carnivore", "reptile"]);
     */
    findByTag(...query: any[]): GraphNode[];
    /**
     * Get the first node found in the graph with the name. The search is depth first.
     *
     * @param {string} name - The name of the graph.
     * @returns {GraphNode|null} The first node to be found matching the supplied name. Returns
     * null if no node is found.
     */
    findByName(name: string): GraphNode | null;
    /**
     * Get the first node found in the graph by its full path in the graph. The full path has this
     * form 'parent/child/sub-child'. The search is depth first.
     *
     * @param {string|string[]} path - The full path of the GraphNode as either a string or array
     * of GraphNode names.
     * @returns {GraphNode|null} The first node to be found matching the supplied path. Returns
     * null if no node is found.
     * @example
     * // String form
     * const grandchild = this.entity.findByPath('child/grandchild');
     * @example
     * // Array form
     * const grandchild = this.entity.findByPath(['child', 'grandchild']);
     */
    findByPath(path: string | string[]): GraphNode | null;
    /**
     * Executes a provided function once on this graph node and all of its descendants.
     *
     * @param {ForEachNodeCallback} callback - The function to execute on the graph node and each
     * descendant.
     * @param {object} [thisArg] - Optional value to use as this when executing callback function.
     * @example
     * // Log the path and name of each node in descendant tree starting with "parent"
     * parent.forEach((node) => {
     *     console.log(node.path + "/" + node.name);
     * });
     */
    forEach(callback: ForEachNodeCallback, thisArg?: object): void;
    /**
     * Check if node is descendant of another node.
     *
     * @param {GraphNode} node - Potential ancestor of node.
     * @returns {boolean} If node is descendant of another node.
     * @example
     * if (roof.isDescendantOf(house)) {
     *     // roof is descendant of house entity
     * }
     */
    isDescendantOf(node: GraphNode): boolean;
    /**
     * Check if node is ancestor for another node.
     *
     * @param {GraphNode} node - Potential descendant of node.
     * @returns {boolean} If node is ancestor for another node.
     * @example
     * if (body.isAncestorOf(foot)) {
     *     // foot is within body's hierarchy
     * }
     */
    isAncestorOf(node: GraphNode): boolean;
    /**
     * Get the world space rotation for the specified GraphNode in Euler angles. The angles are in
     * degrees and in XYZ order.
     *
     * Important: The value returned by this function should be considered read-only. In order to
     * set the world space rotation of the graph node, use {@link setEulerAngles}.
     *
     * @returns {Vec3} The world space rotation of the graph node in Euler angle form.
     * @example
     * const angles = this.entity.getEulerAngles();
     * angles.y = 180; // rotate the entity around Y by 180 degrees
     * this.entity.setEulerAngles(angles);
     */
    getEulerAngles(): Vec3;
    /**
     * Get the local space rotation for the specified GraphNode in Euler angles. The angles are in
     * degrees and in XYZ order.
     *
     * Important: The value returned by this function should be considered read-only. In order to
     * set the local space rotation of the graph node, use {@link setLocalEulerAngles}.
     *
     * @returns {Vec3} The local space rotation of the graph node as Euler angles in XYZ order.
     * @example
     * const angles = this.entity.getLocalEulerAngles();
     * angles.y = 180;
     * this.entity.setLocalEulerAngles(angles);
     */
    getLocalEulerAngles(): Vec3;
    /**
     * Get the position in local space for the specified GraphNode. The position is returned as a
     * {@link Vec3}. The returned vector should be considered read-only. To update the local
     * position, use {@link setLocalPosition}.
     *
     * @returns {Vec3} The local space position of the graph node.
     * @example
     * const position = this.entity.getLocalPosition();
     * position.x += 1; // move the entity 1 unit along x.
     * this.entity.setLocalPosition(position);
     */
    getLocalPosition(): Vec3;
    /**
     * Get the rotation in local space for the specified GraphNode. The rotation is returned as a
     * {@link Quat}. The returned quaternion should be considered read-only. To update the local
     * rotation, use {@link setLocalRotation}.
     *
     * @returns {Quat} The local space rotation of the graph node as a quaternion.
     * @example
     * const rotation = this.entity.getLocalRotation();
     */
    getLocalRotation(): Quat;
    /**
     * Get the scale in local space for the specified GraphNode. The scale is returned as a
     * {@link Vec3}. The returned vector should be considered read-only. To update the local scale,
     * use {@link setLocalScale}.
     *
     * @returns {Vec3} The local space scale of the graph node.
     * @example
     * const scale = this.entity.getLocalScale();
     * scale.x = 100;
     * this.entity.setLocalScale(scale);
     */
    getLocalScale(): Vec3;
    /**
     * Get the local transform matrix for this graph node. This matrix is the transform relative to
     * the node's parent's world transformation matrix.
     *
     * @returns {Mat4} The node's local transformation matrix.
     * @example
     * const transform = this.entity.getLocalTransform();
     */
    getLocalTransform(): Mat4;
    /**
     * Get the world space position for the specified GraphNode. The position is returned as a
     * {@link Vec3}. The value returned by this function should be considered read-only. In order
     * to set the world space position of the graph node, use {@link setPosition}.
     *
     * @returns {Vec3} The world space position of the graph node.
     * @example
     * const position = this.entity.getPosition();
     * position.x = 10;
     * this.entity.setPosition(position);
     */
    getPosition(): Vec3;
    /**
     * Get the world space rotation for the specified GraphNode. The rotation is returned as a
     * {@link Quat}. The value returned by this function should be considered read-only. In order
     * to set the world space rotation of the graph node, use {@link setRotation}.
     *
     * @returns {Quat} The world space rotation of the graph node as a quaternion.
     * @example
     * const rotation = this.entity.getRotation();
     */
    getRotation(): Quat;
    /**
     * Get the world space scale for the specified GraphNode. The returned value will only be
     * correct for graph nodes that have a non-skewed world transform (a skew can be introduced by
     * the compounding of rotations and scales higher in the graph node hierarchy). The scale is
     * returned as a {@link Vec3}. The value returned by this function should be considered
     * read-only. Note that it is not possible to set the world space scale of a graph node
     * directly.
     *
     * @returns {Vec3} The world space scale of the graph node.
     * @example
     * const scale = this.entity.getScale();
     * @ignore
     */
    getScale(): Vec3;
    /**
     * Get the world transformation matrix for this graph node.
     *
     * @returns {Mat4} The node's world transformation matrix.
     * @example
     * const transform = this.entity.getWorldTransform();
     */
    getWorldTransform(): Mat4;
    /**
     * Gets the cached value of negative scale sign of the world transform.
     *
     * @returns {number} -1 if world transform has negative scale, 1 otherwise.
     * @ignore
     */
    get worldScaleSign(): number;
    /**
     * Remove graph node from current parent.
     */
    remove(): void;
    /**
     * Remove graph node from current parent and add as child to new parent.
     *
     * @param {GraphNode} parent - New parent to attach graph node to.
     * @param {number} [index] - The child index where the child node should be placed.
     */
    reparent(parent: GraphNode, index?: number): void;
    /**
     * Sets the local space rotation of the specified graph node using Euler angles. Eulers are
     * interpreted in XYZ order.
     *
     * @overload
     * @param {number} x - Rotation around local space x-axis in degrees.
     * @param {number} y - Rotation around local space y-axis in degrees.
     * @param {number} z - Rotation around local space z-axis in degrees.
     * @returns {void}
     * @example
     * // Set rotation of 90 degrees around y-axis via 3 numbers
     * this.entity.setLocalEulerAngles(0, 90, 0);
     */
    setLocalEulerAngles(x: number, y: number, z: number): void;
    /**
     * Sets the local space rotation of the specified graph node using Euler angles. Eulers are
     * interpreted in XYZ order.
     *
     * @overload
     * @param {Vec3} angles - Vector holding rotations around local space axes in degrees.
     * @returns {void}
     * @example
     * // Set rotation of 90 degrees around y-axis via a vector
     * const angles = new pc.Vec3(0, 90, 0);
     * this.entity.setLocalEulerAngles(angles);
     */
    setLocalEulerAngles(angles: Vec3): void;
    /**
     * Sets the local space position of the specified graph node.
     *
     * @overload
     * @param {number} x - X-coordinate of local space position.
     * @param {number} y - Y-coordinate of local space position.
     * @param {number} z - Z-coordinate of local space position.
     * @returns {void}
     * @example
     * this.entity.setLocalPosition(0, 10, 0);
     */
    setLocalPosition(x: number, y: number, z: number): void;
    /**
     * Sets the local space position of the specified graph node.
     *
     * @overload
     * @param {Vec3} position - Vector holding local space position.
     * @returns {void}
     * @example
     * const pos = new pc.Vec3(0, 10, 0);
     * this.entity.setLocalPosition(pos);
     */
    setLocalPosition(position: Vec3): void;
    /**
     * Sets the local space rotation of the specified graph node.
     *
     * @overload
     * @param {number} x - X-component of local space quaternion rotation.
     * @param {number} y - Y-component of local space quaternion rotation.
     * @param {number} z - Z-component of local space quaternion rotation.
     * @param {number} w - W-component of local space quaternion rotation.
     * @returns {void}
     * @example
     * this.entity.setLocalRotation(0, 0, 0, 1);
     */
    setLocalRotation(x: number, y: number, z: number, w: number): void;
    /**
     * Sets the local space rotation of the specified graph node.
     *
     * @overload
     * @param {Quat} rotation - Quaternion holding local space rotation.
     * @returns {void}
     * @example
     * const q = new pc.Quat();
     * this.entity.setLocalRotation(q);
     */
    setLocalRotation(rotation: Quat): void;
    /**
     * Sets the local space scale factor of the specified graph node.
     *
     * @overload
     * @param {number} x - X-coordinate of local space scale.
     * @param {number} y - Y-coordinate of local space scale.
     * @param {number} z - Z-coordinate of local space scale.
     * @returns {void}
     * @example
     * this.entity.setLocalScale(10, 10, 10);
     */
    setLocalScale(x: number, y: number, z: number): void;
    /**
     * Sets the local space scale factor of the specified graph node.
     *
     * @overload
     * @param {Vec3} scale - Vector holding local space scale.
     * @returns {void}
     * @example
     * const scale = new pc.Vec3(10, 10, 10);
     * this.entity.setLocalScale(scale);
     */
    setLocalScale(scale: Vec3): void;
    /** @private */
    private _dirtifyLocal;
    /** @private */
    private _unfreezeParentToRoot;
    /** @private */
    private _dirtifyWorld;
    /** @private */
    private _dirtifyWorldInternal;
    /**
     * Sets the world space position of the specified graph node.
     *
     * @overload
     * @param {number} x - X-coordinate of world space position.
     * @param {number} y - Y-coordinate of world space position.
     * @param {number} z - Z-coordinate of world space position.
     * @returns {void}
     * @example
     * this.entity.setPosition(0, 10, 0);
     */
    setPosition(x: number, y: number, z: number): void;
    /**
     * Sets the world space position of the specified graph node.
     *
     * @overload
     * @param {Vec3} position - Vector holding world space position.
     * @returns {void}
     * @example
     * const position = new pc.Vec3(0, 10, 0);
     * this.entity.setPosition(position);
     */
    setPosition(position: Vec3): void;
    /**
     * Sets the world space rotation of the specified graph node.
     *
     * @overload
     * @param {number} x - X-component of world space quaternion rotation.
     * @param {number} y - Y-component of world space quaternion rotation.
     * @param {number} z - Z-component of world space quaternion rotation.
     * @param {number} w - W-component of world space quaternion rotation.
     * @returns {void}
     * @example
     * this.entity.setRotation(0, 0, 0, 1);
     */
    setRotation(x: number, y: number, z: number, w: number): void;
    /**
     * Sets the world space rotation of the specified graph node.
     *
     * @overload
     * @param {Quat} rotation - Quaternion holding world space rotation.
     * @returns {void}
     * @example
     * const rotation = new pc.Quat();
     * this.entity.setRotation(rotation);
     */
    setRotation(rotation: Quat): void;
    /**
     * Sets the world space position and rotation of the specified graph node. This is faster than
     * setting the position and rotation independently.
     *
     * @param {Vec3} position - The world space position to set.
     * @param {Quat} rotation - The world space rotation to set.
     * @example
     * const position = new pc.Vec3(0, 10, 0);
     * const rotation = new pc.Quat().setFromEulerAngles(0, 90, 0);
     * this.entity.setPositionAndRotation(position, rotation);
     */
    setPositionAndRotation(position: Vec3, rotation: Quat): void;
    /**
     * Sets the world space rotation of the specified graph node using Euler angles. Eulers are
     * interpreted in XYZ order.
     *
     * @overload
     * @param {number} x - Rotation around world space x-axis in degrees.
     * @param {number} y - Rotation around world space y-axis in degrees.
     * @param {number} z - Rotation around world space z-axis in degrees.
     * @returns {void}
     * @example
     * this.entity.setEulerAngles(0, 90, 0);
     */
    setEulerAngles(x: number, y: number, z: number): void;
    /**
     * Sets the world space rotation of the specified graph node using Euler angles. Eulers are
     * interpreted in XYZ order.
     *
     * @overload
     * @param {Vec3} angles - Vector holding rotations around world space axes in degrees.
     * @returns {void}
     * @example
     * const angles = new pc.Vec3(0, 90, 0);
     * this.entity.setEulerAngles(angles);
     */
    setEulerAngles(angles: Vec3): void;
    /**
     * Add a new child to the child list and update the parent value of the child node.
     * If the node already had a parent, it is removed from its child list.
     *
     * @param {GraphNode} node - The new child to add.
     * @example
     * const e = new pc.Entity(app);
     * this.entity.addChild(e);
     */
    addChild(node: GraphNode): void;
    /**
     * Add a child to this node, maintaining the child's transform in world space.
     * If the node already had a parent, it is removed from its child list.
     *
     * @param {GraphNode} node - The child to add.
     * @example
     * const e = new pc.Entity(app);
     * this.entity.addChildAndSaveTransform(e);
     * @ignore
     */
    addChildAndSaveTransform(node: GraphNode): void;
    /**
     * Insert a new child to the child list at the specified index and update the parent value of
     * the child node. If the node already had a parent, it is removed from its child list.
     *
     * @param {GraphNode} node - The new child to insert.
     * @param {number} index - The index in the child list of the parent where the new node will be
     * inserted.
     * @example
     * const e = new pc.Entity(app);
     * this.entity.insertChild(e, 1);
     */
    insertChild(node: GraphNode, index: number): void;
    /**
     * Prepares node for being inserted to a parent node, and removes it from the previous parent.
     *
     * @param {GraphNode} node - The node being inserted.
     * @private
     */
    private _prepareInsertChild;
    /**
     * Fires an event on all children of the node. The event `name` is fired on the first (root)
     * node only. The event `nameHierarchy` is fired for all children.
     *
     * @param {string} name - The name of the event to fire on the root.
     * @param {string} nameHierarchy - The name of the event to fire for all descendants.
     * @param {GraphNode} parent - The parent of the node being added/removed from the hierarchy.
     * @private
     */
    private _fireOnHierarchy;
    /**
     * Called when a node is inserted into a node's child list.
     *
     * @param {GraphNode} node - The node that was inserted.
     * @private
     */
    private _onInsertChild;
    /**
     * Recurse the hierarchy and update the graph depth at each node.
     *
     * @private
     */
    private _updateGraphDepth;
    /**
     * Remove the node from the child list and update the parent value of the child.
     *
     * @param {GraphNode} child - The node to remove.
     * @example
     * const child = this.entity.children[0];
     * this.entity.removeChild(child);
     */
    removeChild(child: GraphNode): void;
    _sync(): void;
    /**
     * Updates the world transformation matrices at this node and all of its descendants.
     *
     * @ignore
     */
    syncHierarchy(): void;
    /**
     * Reorients the graph node so that the negative z-axis points towards the target.
     *
     * @overload
     * @param {number} x - X-component of the world space coordinate to look at.
     * @param {number} y - Y-component of the world space coordinate to look at.
     * @param {number} z - Z-component of the world space coordinate to look at.
     * @param {number} [ux] - X-component of the up vector for the look at transform. Defaults to 0.
     * @param {number} [uy] - Y-component of the up vector for the look at transform. Defaults to 1.
     * @param {number} [uz] - Z-component of the up vector for the look at transform. Defaults to 0.
     * @returns {void}
     * @example
     * // Look at the world space origin, using the (default) positive y-axis for up
     * this.entity.lookAt(0, 0, 0);
     * @example
     * // Look at world space coordinate [10, 10, 10], using the negative world y-axis for up
     * this.entity.lookAt(10, 10, 10, 0, -1, 0);
     */
    lookAt(x: number, y: number, z: number, ux?: number, uy?: number, uz?: number): void;
    /**
     * Reorients the graph node so that the negative z-axis points towards the target.
     *
     * @overload
     * @param {Vec3} target - The world space coordinate to look at.
     * @param {Vec3} [up] - The world space up vector for look at transform. Defaults to {@link Vec3.UP}.
     * @returns {void}
     * @example
     * // Look at another entity, using the (default) positive y-axis for up
     * const target = otherEntity.getPosition();
     * this.entity.lookAt(target);
     * @example
     * // Look at another entity, using the negative world y-axis for up
     * const target = otherEntity.getPosition();
     * this.entity.lookAt(target, pc.Vec3.DOWN);
     */
    lookAt(target: Vec3, up?: Vec3): void;
    /**
     * Translates the graph node in world space by the specified translation vector.
     *
     * @overload
     * @param {number} x - X-coordinate of world space translation.
     * @param {number} y - Y-coordinate of world space translation.
     * @param {number} z - Z-coordinate of world space translation.
     * @returns {void}
     * @example
     * this.entity.translate(10, 0, 0);
     */
    translate(x: number, y: number, z: number): void;
    /**
     * Translates the graph node in world space by the specified translation vector.
     *
     * @overload
     * @param {Vec3} translation - Vector holding world space translation.
     * @returns {void}
     * @example
     * const translation = new pc.Vec3(10, 0, 0);
     * this.entity.translate(translation);
     */
    translate(translation: Vec3): void;
    /**
     * Translates the graph node in local space by the specified translation vector.
     *
     * @overload
     * @param {number} x - X-coordinate of local space translation.
     * @param {number} y - Y-coordinate of local space translation.
     * @param {number} z - Z-coordinate of local space translation.
     * @returns {void}
     * @example
     * this.entity.translateLocal(10, 0, 0);
     */
    translateLocal(x: number, y: number, z: number): void;
    /**
     * Translates the graph node in local space by the specified translation vector.
     *
     * @overload
     * @param {Vec3} translation - Vector holding local space translation.
     * @returns {void}
     * @example
     * const t = new pc.Vec3(10, 0, 0);
     * this.entity.translateLocal(t);
     */
    translateLocal(translation: Vec3): void;
    /**
     * Rotates the graph node in world space by the specified Euler angles. Eulers are specified in
     * degrees in XYZ order.
     *
     * @overload
     * @param {number} x - Rotation around world space x-axis in degrees.
     * @param {number} y - Rotation around world space y-axis in degrees.
     * @param {number} z - Rotation around world space z-axis in degrees.
     * @returns {void}
     * @example
     * this.entity.rotate(0, 90, 0);
     */
    rotate(x: number, y: number, z: number): void;
    /**
     * Rotates the graph node in world space by the specified Euler angles. Eulers are specified in
     * degrees in XYZ order.
     *
     * @overload
     * @param {Vec3} rotation - Vector holding world space rotation.
     * @returns {void}
     * @example
     * const rotation = new pc.Vec3(0, 90, 0);
     * this.entity.rotate(rotation);
     */
    rotate(rotation: Vec3): void;
    /**
     * Rotates the graph node in local space by the specified Euler angles. Eulers are specified in
     * degrees in XYZ order.
     *
     * @overload
     * @param {number} x - Rotation around local space x-axis in degrees.
     * @param {number} y - Rotation around local space y-axis in degrees.
     * @param {number} z - Rotation around local space z-axis in degrees.
     * @returns {void}
     * @example
     * this.entity.rotateLocal(0, 90, 0);
     */
    rotateLocal(x: number, y: number, z: number): void;
    /**
     * Rotates the graph node in local space by the specified Euler angles. Eulers are specified in
     * degrees in XYZ order.
     *
     * @overload
     * @param {Vec3} rotation - Vector holding local space rotation.
     * @returns {void}
     * @example
     * const rotation = new pc.Vec3(0, 90, 0);
     * this.entity.rotateLocal(rotation);
     */
    rotateLocal(rotation: Vec3): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 * @import { Component } from './component.js'
 * @import { Entity } from '../entity.js'
 */
/**
 * Component Systems contain the logic and functionality to update all Components of a particular
 * type.
 */
declare class ComponentSystem extends EventHandler {
    /**
     * Create a new ComponentSystem instance.
     *
     * @param {AppBase} app - The application managing this system.
     */
    constructor(app: AppBase);
    /**
     * The id type of the ComponentSystem.
     *
     * @type {string}
     * @readonly
     */
    readonly id: string;
    app: AppBase;
    store: {};
    schema: any[];
    /**
     * Create new {@link Component} and component data instances and attach them to the entity.
     *
     * @param {Entity} entity - The Entity to attach this component to.
     * @param {object} [data] - The source data with which to create the component.
     * @returns {Component} Returns a Component of type defined by the component system.
     * @example
     * const entity = new pc.Entity(app);
     * app.systems.model.addComponent(entity, { type: 'box' });
     * // entity.model is now set to a pc.ModelComponent
     * @ignore
     */
    addComponent(entity: Entity, data?: object): Component;
    /**
     * Remove the {@link Component} from the entity and delete the associated component data.
     *
     * @param {Entity} entity - The entity to remove the component from.
     * @example
     * app.systems.model.removeComponent(entity);
     * // entity.model === undefined
     * @ignore
     */
    removeComponent(entity: Entity): void;
    /**
     * Create a clone of component. This creates a copy of all component data variables.
     *
     * @param {Entity} entity - The entity to clone the component from.
     * @param {Entity} clone - The entity to clone the component into.
     * @returns {Component} The newly cloned component.
     * @ignore
     */
    cloneComponent(entity: Entity, clone: Entity): Component;
    /**
     * Called during {@link ComponentSystem#addComponent} to initialize the component data in the
     * store. This can be overridden by derived Component Systems and either called by the derived
     * System or replaced entirely.
     *
     * @param {Component} component - The component being initialized.
     * @param {object} data - The data block used to initialize the component.
     * @param {Array<string | {name: string, type: string}>} properties - The array of property
     * descriptors for the component. A descriptor can be either a plain property name, or an
     * object specifying the name and type.
     * @ignore
     */
    initializeComponentData(component: Component, data: object, properties: Array<string | {
        name: string;
        type: string;
    }>): void;
    /**
     * Searches the component schema for properties that match the specified type.
     *
     * @param {string} type - The type to search for.
     * @returns {string[]|object[]} An array of property descriptors matching the specified type.
     * @ignore
     */
    getPropertiesOfType(type: string): string[] | object[];
    destroy(): void;
}

/**
 * @import { ComponentSystem } from './system.js'
 * @import { Entity } from '../entity.js'
 */
/**
 * Components are used to attach functionality on a {@link Entity}. Components can receive update
 * events each frame, and expose properties to the PlayCanvas Editor.
 *
 * @hideconstructor
 */
declare class Component extends EventHandler {
    /**
     * Component order. When an entity with multiple components gets enabled, this order specifies
     * in which order the components get enabled. The lowest number gets enabled first.
     *
     * @type {number} - Component order number.
     * @private
     */
    private static order;
    /** @ignore */
    static _buildAccessors(obj: any, schema: any): void;
    /**
     * Base constructor for a Component.
     *
     * @param {ComponentSystem} system - The ComponentSystem used to create this component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ComponentSystem, entity: Entity);
    /**
     * The ComponentSystem used to create this Component.
     *
     * @type {ComponentSystem}
     */
    system: ComponentSystem;
    /**
     * The Entity that this Component is attached to.
     *
     * @type {Entity}
     */
    entity: Entity;
    /** @ignore */
    buildAccessors(schema: any): void;
    /** @ignore */
    onSetEnabled(name: any, oldValue: any, newValue: any): void;
    /** @ignore */
    onEnable(): void;
    /** @ignore */
    onDisable(): void;
    /** @ignore */
    onPostStateChange(): void;
    /**
     * Access the component data directly. Usually you should access the data properties via the
     * individual properties as modifying this data directly will not fire 'set' events.
     *
     * @type {*}
     * @ignore
     */
    get data(): any;
    /**
     * Sets the enabled state of the component.
     *
     * @type {boolean}
     */
    set enabled(arg: boolean);
    /**
     * Gets the enabled state of the component.
     *
     * @type {boolean}
     */
    get enabled(): boolean;
}

/**
 * Wraps a set of data used in animation.
 *
 * @category Animation
 */
declare class AnimData {
    /**
     * Create a new animation AnimData instance.
     *
     * @param {number} components - Specifies how many components make up an element of data. For
     * example, specify 3 for a set of 3-dimensional vectors. The number of elements in data array
     * must be a multiple of components.
     * @param {Float32Array|number[]} data - The set of data.
     */
    constructor(components: number, data: Float32Array | number[]);
    _components: number;
    _data: number[] | Float32Array<ArrayBufferLike>;
    /**
     * Gets the number of components that make up an element.
     *
     * @type {number}
     */
    get components(): number;
    /**
     * Gets the data.
     *
     * @type {Float32Array|number[]}
     */
    get data(): Float32Array | number[];
}

/**
 * Animation curve links an input data set to an output data set and defines the interpolation
 * method to use.
 *
 * @category Animation
 */
declare class AnimCurve {
    /**
     * Create a new animation curve.
     *
     * @param {string[]} paths - Array of path strings identifying the targets of this curve, for
     * example "rootNode.translation".
     * @param {number} input - Index of the curve which specifies the key data.
     * @param {number} output - Index of the curve which specifies the value data.
     * @param {number} interpolation - The interpolation method to use. One of the following:
     *
     * - {@link INTERPOLATION_STEP}
     * - {@link INTERPOLATION_LINEAR}
     * - {@link INTERPOLATION_CUBIC}
     */
    constructor(paths: string[], input: number, output: number, interpolation: number);
    _paths: string[];
    _input: number;
    _output: number;
    _interpolation: number;
    /**
     * The list of paths which identify targets of this curve.
     *
     * @type {string[]}
     */
    get paths(): string[];
    /**
     * The index of the AnimTrack input which contains the key data for this curve.
     *
     * @type {number}
     */
    get input(): number;
    /**
     * The index of the AnimTrack input which contains the key data for this curve.
     *
     * @type {number}
     */
    get output(): number;
    /**
     * The interpolation method used by this curve.
     *
     * @type {number}
     */
    get interpolation(): number;
}

/**
 * AnimEvents stores a sorted array of animation events which should fire sequentially during the
 * playback of an {@link AnimTrack}.
 *
 * @category Animation
 */
declare class AnimEvents {
    /**
     * Create a new AnimEvents instance.
     *
     * @param {object[]} events - An array of animation events.
     * @example
     * const events = new pc.AnimEvents([
     *     {
     *         name: 'my_event',
     *         time: 1.3, // given in seconds
     *         // any additional properties added are optional and will be available in the EventHandler callback's event object
     *         myProperty: 'test',
     *         myOtherProperty: true
     *     }
     * ]);
     * animTrack.events = events;
     */
    constructor(events: object[]);
    _events: any[];
    get events(): any[];
}

/**
 * @import { AnimCurve } from './anim-curve.js'
 * @import { AnimData } from './anim-data.js'
 */
/**
 * An AnimTrack stores the curve data necessary to animate a set of target nodes. It can be linked
 * to the nodes it should animate using the {@link AnimComponent#assignAnimation} method.
 *
 * @category Animation
 */
declare class AnimTrack {
    /**
     * This AnimTrack can be used as a placeholder track when creating a state graph before having all associated animation data available.
     *
     * @type {AnimTrack}
     */
    static EMPTY: AnimTrack;
    /**
     * Create a new AnimTrack instance.
     *
     * @param {string} name - The track name.
     * @param {number} duration - The duration of the track in seconds.
     * @param {AnimData[]} inputs - List of curve key data.
     * @param {AnimData[]} outputs - List of curve value data.
     * @param {AnimCurve[]} curves - The list of curves.
     * @param {AnimEvents} animEvents - A sequence of animation events.
     * @ignore
     */
    constructor(name: string, duration: number, inputs: AnimData[], outputs: AnimData[], curves: AnimCurve[], animEvents?: AnimEvents);
    _name: string;
    _duration: number;
    _inputs: AnimData[];
    _outputs: AnimData[];
    _curves: AnimCurve[];
    _animEvents: AnimEvents;
    /**
     * Gets the name of the AnimTrack.
     *
     * @type {string}
     */
    get name(): string;
    /**
     * Gets the duration of the AnimTrack.
     *
     * @type {number}
     */
    get duration(): number;
    /**
     * Gets the list of curve key data contained in the AnimTrack.
     *
     * @type {AnimData[]}
     */
    get inputs(): AnimData[];
    /**
     * Gets the list of curve values contained in the AnimTrack.
     *
     * @type {AnimData[]}
     */
    get outputs(): AnimData[];
    /**
     * Gets the list of curves contained in the AnimTrack.
     *
     * @type {AnimCurve[]}
     */
    get curves(): AnimCurve[];
    /**
     * Sets the animation events that will fire during the playback of this anim track.
     *
     * @type {AnimEvents}
     */
    set events(animEvents: AnimEvents);
    /**
     * Gets the animation events that will fire during the playback of this anim track.
     *
     * @type {AnimEvents}
     */
    get events(): AnimEvents;
    eval(time: any, snapshot: any): void;
}

/**
 * Stores the information required by {@link AnimEvaluator} for updating a target value.
 *
 * @ignore
 */
declare class AnimTarget {
    /**
     * Create a new AnimTarget instance.
     *
     * @param {(value: number[]) => void} func - This function will be called when a new animation value is output
     * by the {@link AnimEvaluator}.
     * @param {'vector'|'quaternion'} type - The type of animation data this target expects.
     * @param {number} components - The number of components on this target (this should ideally
     * match the number of components found on all attached animation curves).
     * @param {string} targetPath - The path to the target value.
     */
    constructor(func: (value: number[]) => void, type: "vector" | "quaternion", components: number, targetPath: string);
    _set: any;
    _get: any;
    _type: "quaternion" | "vector";
    _components: number;
    _targetPath: string;
    _isTransform: boolean;
    _isWeight: boolean;
    get set(): any;
    get get(): any;
    get type(): "quaternion" | "vector";
    get components(): number;
    get targetPath(): string;
    get isTransform(): boolean;
    get isWeight(): boolean;
    /**
     * Returns true if this target should use layer blending (transforms and weights).
     */
    get usesLayerBlending(): boolean;
}

/**
 * @import { AnimTarget } from '../evaluator/anim-target.js'
 */
/**
 * This interface is used by {@link AnimEvaluator} to resolve unique animation target path strings
 * into instances of {@link AnimTarget}.
 *
 * @ignore
 */
declare class AnimBinder {
    static joinPath(pathSegments: any, character: any): any;
    static splitPath(path: any, character: any): string[];
    /**
     * Converts a locator array into its string version.
     *
     * @param {string|string[]} entityPath - The entity location in the scene defined as an array or
     * string path.
     * @param {string} component - The component of the entity the property is located under.
     * @param {string|string[]} propertyPath - The property location in the entity defined as an array
     * or string path.
     * @returns {string} The locator encoded as a string.
     * @example
     * // returns 'spotLight/light/color.r'
     * encode(['spotLight'], 'light', ['color', 'r']);
     */
    static encode(entityPath: string | string[], component: string, propertyPath: string | string[]): string;
    /**
     * Resolve the provided target path and return an instance of {@link AnimTarget} which will
     * handle setting the value, or return null if no such target exists.
     *
     * @param {string} path - The animation curve path to resolve.
     * @returns {AnimTarget|null} - Returns the target
     * instance on success and null otherwise.
     */
    resolve(path: string): AnimTarget | null;
    /**
     * Called when the {@link AnimEvaluator} no longer has a curve driving the given key.
     *
     * @param {string} path - The animation curve path which is no longer driven.
     */
    unresolve(path: string): void;
    /**
     * Called by {@link AnimEvaluator} once a frame after animation updates are done.
     *
     * @param {number} deltaTime - Amount of time that passed in the current update.
     */
    update(deltaTime: number): void;
}

/**
 * Internal cache data for the evaluation of a single curve timeline.
 *
 * @ignore
 */
declare class AnimCache {
    _left: number;
    _right: number;
    _len: number;
    _recip: number;
    _p0: number;
    _p1: number;
    _t: number;
    _hermite: {
        valid: boolean;
        p0: number;
        m0: number;
        p1: number;
        m1: number;
    };
    update(time: any, input: any): void;
    _findKey(time: any, input: any): number;
    eval(result: any, interpolation: any, output: any): void;
}

/**
 * @import { AnimTrack } from './anim-track.js'
 */
/**
 * AnimSnapshot stores the state of an animation track at a particular time.
 *
 * @ignore
 */
declare class AnimSnapshot {
    /**
     * Create a new animation snapshot.
     *
     * @param {AnimTrack} animTrack - The source track.
     */
    constructor(animTrack: AnimTrack);
    _name: string;
    _time: number;
    _cache: AnimCache[];
    _results: number[][];
}

/**
 * @import { AnimTrack } from './anim-track.js'
 * @import { EventHandler } from '../../../core/event-handler.js'
 */
/**
 * AnimClip wraps the running state of an animation track. It contains and update the animation
 * 'cursor' and performs looping logic.
 *
 * @ignore
 */
declare class AnimClip {
    static eventFrame: {
        start: number;
        end: number;
        residual: number;
    };
    /**
     * Create a new animation clip.
     *
     * @param {AnimTrack} track - The animation data.
     * @param {number} time - The initial time of the clip.
     * @param {number} speed - Speed of the animation playback.
     * @param {boolean} playing - true if the clip is playing and false otherwise.
     * @param {boolean} loop - Whether the clip should loop.
     * @param {EventHandler} [eventHandler] - The handler to call when an event is fired by the clip.
     */
    constructor(track: AnimTrack, time: number, speed: number, playing: boolean, loop: boolean, eventHandler?: EventHandler);
    _name: string;
    _track: AnimTrack;
    _snapshot: AnimSnapshot;
    _playing: boolean;
    _time: number;
    _speed: number;
    _loop: boolean;
    _blendWeight: number;
    _blendOrder: number;
    _eventHandler: EventHandler;
    set name(name: string);
    get name(): string;
    set track(track: AnimTrack);
    get track(): AnimTrack;
    get snapshot(): AnimSnapshot;
    set time(time: number);
    get time(): number;
    set speed(speed: number);
    get speed(): number;
    set loop(loop: boolean);
    get loop(): boolean;
    set blendWeight(blendWeight: number);
    get blendWeight(): number;
    set blendOrder(blendOrder: number);
    get blendOrder(): number;
    set eventCursor(value: any);
    get eventCursor(): any;
    _eventCursor: any;
    get eventCursorEnd(): number;
    get nextEvent(): any;
    get isReverse(): boolean;
    nextEventAheadOfTime(time: any): boolean;
    nextEventBehindTime(time: any): boolean;
    resetEventCursor(): void;
    moveEventCursor(): void;
    clipFrameTime(frameEndTime: any): void;
    alignCursorToCurrentTime(): void;
    fireNextEvent(): void;
    fireNextEventInFrame(frameStartTime: any, frameEndTime: any): boolean;
    activeEventsForFrame(frameStartTime: any, frameEndTime: any): void;
    progressForTime(time: any): number;
    _update(deltaTime: any): void;
    play(): void;
    stop(): void;
    pause(): void;
    resume(): void;
    reset(): void;
}

/**
 * @import { AnimBinder } from '../binder/anim-binder.js'
 * @import { AnimClip } from './anim-clip.js'
 */
/**
 * AnimEvaluator blends multiple sets of animation clips together.
 *
 * @ignore
 */
declare class AnimEvaluator {
    /**
     * Create a new animation evaluator.
     *
     * @param {AnimBinder} binder - Interface that resolves curve paths to instances of
     * {@link AnimTarget}.
     */
    constructor(binder: AnimBinder);
    _binder: AnimBinder;
    _clips: any[];
    _inputs: any[];
    _outputs: any[];
    _targets: {};
    /**
     * The list of animation clips.
     *
     * @type {AnimClip[]}
     */
    get clips(): AnimClip[];
    /**
     * Add a clip to the evaluator.
     *
     * @param {AnimClip} clip - The clip to add to the evaluator.
     */
    addClip(clip: AnimClip): void;
    /**
     * Remove a clip from the evaluator.
     *
     * @param {number} index - Index of the clip to remove.
     */
    removeClip(index: number): void;
    /**
     * Remove all clips from the evaluator.
     */
    removeClips(): void;
    updateClipTrack(name: any, animTrack: any): void;
    /**
     * Returns the first clip which matches the given name, or null if no such clip was found.
     *
     * @param {string} name - Name of the clip to find.
     * @returns {AnimClip|null} - The clip with the given name or null if no such clip was found.
     */
    findClip(name: string): AnimClip | null;
    rebind(): void;
    assignMask(mask: any): any;
    /**
     * Evaluator frame update function. All the attached {@link AnimClip}s are evaluated, blended
     * and the results set on the {@link AnimTarget}.
     *
     * @param {number} deltaTime - The amount of time that has passed since the last update, in
     * seconds.
     * @param {boolean} [outputAnimation] - Whether the evaluator should output the results of the
     * update to the bound animation targets.
     */
    update(deltaTime: number, outputAnimation?: boolean): void;
}

/**
 * @import { AnimState } from './anim-state.js'
 * @import { Vec2 } from '../../../core/math/vec2.js'
 */
/**
 * AnimBlendTrees are used to store and blend multiple {@link AnimNode}s together. BlendTrees can
 * be the child of other AnimBlendTrees, in order to create a hierarchy of AnimNodes. It takes a
 * blend type as an argument which defines which function should be used to determine the weights
 * of each of its children, based on the current parameter value.
 *
 * @category Animation
 */
declare class AnimBlendTree extends AnimNode {
    /**
     * Create a new AnimBlendTree instance.
     *
     * @param {AnimState} state - The AnimState that this AnimBlendTree belongs to.
     * @param {AnimBlendTree|null} parent - The parent of the AnimBlendTree. If not null, the
     * AnimNode is stored as part of a {@link AnimBlendTree} hierarchy.
     * @param {string} name - The name of the BlendTree. Used when assigning an {@link AnimTrack}
     * to its children.
     * @param {number|Vec2} point - The coordinate/vector that's used to determine the weight of
     * this node when it's part of an {@link AnimBlendTree}.
     * @param {string[]} parameters - The anim component parameters which are used to calculate the
     * current weights of the blend trees children.
     * @param {object[]} children - The child nodes that this blend tree should create. Can either
     * be of type {@link AnimNode} or {@link AnimBlendTree}.
     * @param {boolean} syncAnimations - If true, the speed of each blended animation will be
     * synchronized.
     * @param {Function} createTree - Used to create child blend trees of varying types.
     * @param {Function} findParameter - Used at runtime to get the current parameter values.
     */
    constructor(state: AnimState, parent: AnimBlendTree | null, name: string, point: number | Vec2, parameters: string[], children: object[], syncAnimations: boolean, createTree: Function, findParameter: Function);
    _parameters: string[];
    _parameterValues: any[];
    _children: any[];
    _findParameter: Function;
    _syncAnimations: boolean;
    _pointCache: {};
    get weight(): any;
    get syncAnimations(): boolean;
    getChild(name: any): any;
    updateParameterValues(): boolean;
    getNodeWeightedDuration(i: any): number;
    getNodeCount(): number;
}

/**
 * @import { AnimBlendTree } from './anim-blend-tree.js'
 * @import { AnimState } from './anim-state.js'
 */
/**
 * AnimNodes are used to represent a single animation track in the current state. Each state can
 * contain multiple AnimNodes, in which case they are stored in a BlendTree hierarchy, which will
 * control the weight (contribution to the states final animation) of its child AnimNodes.
 *
 * @category Animation
 */
declare class AnimNode {
    /**
     * Create a new AnimNode instance.
     *
     * @param {AnimState} state - The AnimState that this BlendTree belongs to.
     * @param {AnimBlendTree|null} parent - The parent of the AnimNode. If not null, the AnimNode
     * is stored as part of an {@link AnimBlendTree} hierarchy.
     * @param {string} name - The name of the AnimNode. Used when assigning an {@link AnimTrack} to
     * it.
     * @param {number[]|number} point - The coordinate/vector thats used to determine the weight of
     * this node when it's part of an {@link AnimBlendTree}.
     * @param {number} [speed] - The speed that its {@link AnimTrack} should play at. Defaults to 1.
     */
    constructor(state: AnimState, parent: AnimBlendTree | null, name: string, point: number[] | number, speed?: number);
    _state: AnimState;
    _parent: AnimBlendTree;
    _name: string;
    _point: number | Vec2;
    _pointLength: number;
    _speed: number;
    _weightedSpeed: number;
    _weight: number;
    _animTrack: any;
    get parent(): AnimBlendTree;
    get name(): string;
    get path(): any;
    get point(): number | Vec2;
    get pointLength(): number;
    set weight(value: number);
    get weight(): number;
    get normalizedWeight(): number;
    get speed(): number;
    get absoluteSpeed(): number;
    set weightedSpeed(weightedSpeed: number);
    get weightedSpeed(): number;
    set animTrack(value: any);
    get animTrack(): any;
}

/**
 * @import { AnimState } from './anim-state.js'
 * @import { Vec2 } from '../../../core/math/vec2.js'
 */
/**
 * An AnimBlendTree that calculates its weights using a 1D algorithm based on the thesis
 * http://runevision.com/thesis/rune_skovbo_johansen_thesis.pdf Chapter 6.
 *
 * @category Animation
 */
declare class AnimBlendTree1D extends AnimBlendTree {
    calculateWeights(): void;
}

/**
 * An AnimBlendTree that calculates its weights using a 2D Cartesian algorithm based on the thesis
 * http://runevision.com/thesis/rune_skovbo_johansen_thesis.pdf Chapter 6 Section 3.
 *
 * @category Animation
 */
declare class AnimBlendTreeCartesian2D extends AnimBlendTree {
    static _p: Vec2;
    static _pip: Vec2;
    pointDistanceCache(i: any, j: any): any;
    calculateWeights(): void;
}

/**
 * An AnimBlendTree that calculates its weights using a 2D directional algorithm based on the thesis
 * http://runevision.com/thesis/rune_skovbo_johansen_thesis.pdf Chapter 6.
 *
 * @category Animation
 */
declare class AnimBlendTreeDirectional2D extends AnimBlendTree {
    static _p: Vec2;
    static _pip: Vec2;
    pointCache(i: any, j: any): any;
    calculateWeights(): void;
}

/**
 * An AnimBlendTree that calculates normalized weight values based on the total weight.
 *
 * @category Animation
 */
declare class AnimBlendTreeDirect extends AnimBlendTree {
    calculateWeights(): void;
}

/**
 * @import { AnimController } from './anim-controller.js'
 */
/**
 * Defines a single state that the controller can be in. Each state contains either a single
 * {@link AnimNode} or an {@link AnimBlendTree} of multiple {@link AnimNode}s, which will be used
 * to animate the {@link Entity} while the state is active. An AnimState will stay active and play
 * as long as there is no {@link AnimTransition} with its conditions met that has that AnimState
 * as its source state.
 *
 * @category Animation
 */
declare class AnimState {
    /**
     * Create a new AnimState instance.
     *
     * @param {AnimController} controller - The controller this AnimState is associated with.
     * @param {string} name - The name of the state. Used to find this state when the controller
     * transitions between states and links animations.
     * @param {number} [speed] - The speed animations in the state should play at. Individual
     * {@link AnimNode}s can override this value.
     * @param {boolean} [loop] - Determines whether animations in this state should loop.
     * @param {object|null} [blendTree] - If supplied, the AnimState will recursively build a
     * {@link AnimBlendTree} hierarchy, used to store, blend and play multiple animations.
     */
    constructor(controller: AnimController, name: string, speed?: number, loop?: boolean, blendTree?: object | null);
    /** @private */
    private _animations;
    /** @private */
    private _animationList;
    _controller: AnimController;
    _name: string;
    _speed: number;
    _loop: boolean;
    _hasAnimations: boolean;
    _blendTree: AnimNode | AnimBlendTree1D | AnimBlendTreeCartesian2D | AnimBlendTreeDirectional2D | AnimBlendTreeDirect;
    _createTree(type: any, state: any, parent: any, name: any, point: any, parameters: any, children: any, syncAnimations: any, createTree: any, findParameter: any): AnimBlendTree1D | AnimBlendTreeCartesian2D | AnimBlendTreeDirectional2D | AnimBlendTreeDirect;
    _getNodeFromPath(path: any): AnimNode | AnimBlendTree1D | AnimBlendTreeCartesian2D | AnimBlendTreeDirectional2D | AnimBlendTreeDirect;
    addAnimation(path: any, animTrack: any): void;
    _updateHasAnimations(): void;
    get name(): string;
    set animations(value: any[]);
    get animations(): any[];
    get hasAnimations(): boolean;
    set speed(value: number);
    get speed(): number;
    set loop(value: boolean);
    get loop(): boolean;
    get nodeCount(): any;
    get playable(): boolean;
    get looping(): boolean;
    get totalWeight(): number;
    get timelineDuration(): number;
}

/**
 * @import { AnimEvaluator } from '../evaluator/anim-evaluator.js'
 * @import { EventHandler } from '../../../core/event-handler.js'
 */
/**
 * The AnimController manages the animations for its entity, based on the provided state graph and
 * parameters. Its update method determines which state the controller should be in based on the
 * current time, parameters and available states / transitions. It also ensures the AnimEvaluator
 * is supplied with the correct animations, based on the currently active state.
 *
 * @ignore
 */
declare class AnimController {
    /**
     * Create a new AnimController.
     *
     * @param {AnimEvaluator} animEvaluator - The animation evaluator used to blend all current
     * playing animation keyframes and update the entities properties based on the current
     * animation values.
     * @param {object[]} states - The list of states used to form the controller state graph.
     * @param {object[]} transitions - The list of transitions used to form the controller state
     * graph.
     * @param {boolean} activate - Determines whether the anim controller should automatically play
     * once all {@link AnimNodes} are assigned animations.
     * @param {EventHandler} eventHandler - The event handler which should be notified with anim
     * events.
     * @param {Function} findParameter - Retrieves a parameter which is used to control the
     * transition between states.
     * @param {Function} consumeTrigger - Used to set triggers back to their default state after
     * they have been consumed by a transition.
     */
    constructor(animEvaluator: AnimEvaluator, states: object[], transitions: object[], activate: boolean, eventHandler: EventHandler, findParameter: Function, consumeTrigger: Function);
    /**
     * @type {Object<string, AnimState>}
     * @private
     */
    private _states;
    /**
     * @type {string[]}
     * @private
     */
    private _stateNames;
    /**
     * @type {Object<string, AnimTransition[]>}
     * @private
     */
    private _findTransitionsFromStateCache;
    /**
     * @type {Object<string, AnimTransition[]>}
     * @private
     */
    private _findTransitionsBetweenStatesCache;
    /**
     * @type {string|null}
     * @private
     */
    private _previousStateName;
    /** @private */
    private _activeStateName;
    /** @private */
    private _activeStateDuration;
    /** @private */
    private _activeStateDurationDirty;
    /** @private */
    private _playing;
    /**
     * @type {boolean}
     * @private
     */
    private _activate;
    /**
     * @type {AnimTransition[]}
     * @private
     */
    private _transitions;
    /** @private */
    private _currTransitionTime;
    /** @private */
    private _totalTransitionTime;
    /** @private */
    private _isTransitioning;
    /** @private */
    private _transitionInterruptionSource;
    /** @private */
    private _transitionPreviousStates;
    /** @private */
    private _timeInState;
    /** @private */
    private _timeInStateBefore;
    _animEvaluator: AnimEvaluator;
    _eventHandler: EventHandler;
    _findParameter: Function;
    _consumeTrigger: Function;
    get animEvaluator(): AnimEvaluator;
    set activeState(stateName: AnimState);
    get activeState(): AnimState;
    get activeStateName(): string;
    get activeStateAnimations(): any[];
    set previousState(stateName: AnimState);
    get previousState(): AnimState;
    get previousStateName(): string;
    get playable(): boolean;
    set playing(value: boolean);
    get playing(): boolean;
    get activeStateProgress(): number;
    get activeStateDuration(): number;
    set activeStateCurrentTime(time: number);
    get activeStateCurrentTime(): number;
    get transitioning(): boolean;
    get transitionProgress(): number;
    get states(): string[];
    assignMask(mask: any): any;
    /**
     * @param {string} stateName - The name of the state to find.
     * @returns {AnimState} The state with the given name.
     * @private
     */
    private _findState;
    _getActiveStateProgressForTime(time: any): number;
    /**
     * Return all the transitions that have the given stateName as their source state.
     *
     * @param {string} stateName - The name of the state to find transitions from.
     * @returns {AnimTransition[]} The transitions that have the given stateName as their source
     * state.
     * @private
     */
    private _findTransitionsFromState;
    /**
     * Return all the transitions that contain the given source and destination states.
     *
     * @param {string} sourceStateName - The name of the source state to find transitions from.
     * @param {string} destinationStateName - The name of the destination state to find transitions
     * to.
     * @returns {AnimTransition[]} The transitions that have the given source and destination states.
     * @private
     */
    private _findTransitionsBetweenStates;
    _transitionHasConditionsMet(transition: any): boolean;
    _findTransition(from: any, to: any): any;
    updateStateFromTransition(transition: any): void;
    _transitionToState(newStateName: any): void;
    assignAnimation(pathString: any, animTrack: any, speed: any, loop: any): void;
    removeNodeAnimations(nodeName: any): boolean;
    play(stateName: any): void;
    pause(): void;
    reset(): void;
    rebind(): void;
    update(dt: any): void;
    findParameter: (name: any) => any;
}

/**
 * @import { AnimComponent } from './component.js'
 * @import { AnimController } from '../../anim/controller/anim-controller.js'
 */
/**
 * The Anim Component Layer allows managers a single layer of the animation state graph.
 *
 * @category Animation
 */
declare class AnimComponentLayer {
    /**
     * Create a new AnimComponentLayer instance.
     *
     * @param {string} name - The name of the layer.
     * @param {AnimController} controller - The controller to manage this layers animations.
     * @param {AnimComponent} component - The component that this layer is a member of.
     * @param {number} [weight] - The weight of this layer. Defaults to 1.
     * @param {string} [blendType] - The blend type of this layer. Defaults to {@link ANIM_LAYER_OVERWRITE}.
     * @ignore
     */
    constructor(name: string, controller: AnimController, component: AnimComponent, weight?: number, blendType?: string);
    /**
     * @type {string}
     * @private
     */
    private _name;
    /**
     * @type {AnimController}
     * @private
     */
    private _controller;
    /**
     * @type {AnimComponent}
     * @private
     */
    private _component;
    /**
     * @type {number}
     * @private
     */
    private _weight;
    /**
     * @type {string}
     * @private
     */
    private _blendType;
    /** @private */
    private _mask;
    /** @private */
    private _blendTime;
    /** @private */
    private _blendTimeElapsed;
    /** @private */
    private _startingWeight;
    /** @private */
    private _targetWeight;
    /**
     * Returns the name of the layer.
     *
     * @type {string}
     */
    get name(): string;
    /**
     * Sets whether this layer is currently playing.
     *
     * @type {boolean}
     */
    set playing(value: boolean);
    /**
     * Gets whether this layer is currently playing.
     *
     * @type {boolean}
     */
    get playing(): boolean;
    /**
     * Returns true if a state graph has been loaded and all states in the graph have been assigned
     * animation tracks.
     *
     * @type {boolean}
     */
    get playable(): boolean;
    /**
     * Gets the currently active state name.
     *
     * @type {string}
     */
    get activeState(): string;
    /**
     * Gets the previously active state name.
     *
     * @type {string|null}
     */
    get previousState(): string | null;
    /**
     * Gets the currently active state's progress as a value normalized by the state's animation
     * duration. Looped animations will return values greater than 1.
     *
     * @type {number}
     */
    get activeStateProgress(): number;
    /**
     * Gets the currently active states duration.
     *
     * @type {number}
     */
    get activeStateDuration(): number;
    /**
     * Sets the active state's time in seconds.
     *
     * @type {number}
     */
    set activeStateCurrentTime(time: number);
    /**
     * Gets the active state's time in seconds.
     *
     * @type {number}
     */
    get activeStateCurrentTime(): number;
    /**
     * Gets whether the anim component layer is currently transitioning between states.
     *
     * @type {boolean}
     */
    get transitioning(): boolean;
    /**
     * Gets the progress, if the anim component layer is currently transitioning between states.
     * Otherwise returns null.
     *
     * @type {number|null}
     */
    get transitionProgress(): number | null;
    /**
     * Gets all available states in this layers state graph.
     *
     * @type {string[]}
     */
    get states(): string[];
    /**
     * Sets the blending weight of this layer. Used when calculating the value of properties that
     * are animated by more than one layer.
     *
     * @type {number}
     */
    set weight(value: number);
    /**
     * Sets the blending weight of this layer.
     *
     * @type {number}
     */
    get weight(): number;
    set blendType(value: string);
    get blendType(): string;
    /**
     * Sets the mask of bones which should be animated or ignored by this layer.
     *
     * @type {object}
     * @example
     * entity.anim.baseLayer.mask = {
     *     // include the spine of the current model and all of its children
     *     "path/to/spine": {
     *         children: true
     *     },
     *     // include the hip of the current model but not all of its children
     *     "path/to/hip": true
     * };
     */
    set mask(value: object);
    /**
     * Gets the mask of bones which should be animated or ignored by this layer.
     *
     * @type {object}
     */
    get mask(): object;
    /**
     * Start playing the animation in the current state.
     *
     * @param {string} [name] - If provided, will begin playing from the start of the state with
     * this name.
     */
    play(name?: string): void;
    /**
     * Pause the animation in the current state.
     */
    pause(): void;
    /**
     * Reset the animation component to its initial state, including all parameters. The system
     * will be paused.
     */
    reset(): void;
    /**
     * Rebind any animations in the layer to the currently present components and model of the anim
     * components entity.
     */
    rebind(): void;
    update(dt: any): void;
    /**
     * Blend from the current weight value to the provided weight value over a given amount of time.
     *
     * @param {number} weight - The new weight value to blend to.
     * @param {number} time - The duration of the blend in seconds.
     */
    blendToWeight(weight: number, time: number): void;
    /**
     * Assigns an animation track to a state or blend tree node in the current graph. If a state
     * for the given nodePath doesn't exist, it will be created. If all states nodes are linked and
     * the {@link AnimComponent#activate} value was set to true then the component will begin
     * playing.
     *
     * @param {string} nodePath - Either the state name or the path to a blend tree node that this
     * animation should be associated with. Each section of a blend tree path is split using a
     * period (`.`) therefore state names should not include this character (e.g "MyStateName" or
     * "MyStateName.BlendTreeNode").
     * @param {AnimTrack} animTrack - The animation track that will be assigned to this state and
     * played whenever this state is active.
     * @param {number} [speed] - Update the speed of the state you are assigning an animation to.
     * Defaults to 1.
     * @param {boolean} [loop] - Update the loop property of the state you are assigning an
     * animation to. Defaults to true.
     */
    assignAnimation(nodePath: string, animTrack: AnimTrack, speed?: number, loop?: boolean): void;
    /**
     * Removes animations from a node in the loaded state graph.
     *
     * @param {string} nodeName - The name of the node that should have its animation tracks removed.
     */
    removeNodeAnimations(nodeName: string): void;
    /**
     * Returns an object holding the animation asset id that is associated with the given state.
     *
     * @param {string} stateName - The name of the state to get the asset for.
     * @returns {{ asset: number }} An object containing the animation asset id associated with the given state.
     */
    getAnimationAsset(stateName: string): {
        asset: number;
    };
    /**
     * Transition to any state in the current layers graph. Transitions can be instant or take an
     * optional blend time.
     *
     * @param {string} to - The state that this transition will transition to.
     * @param {number} [time] - The duration of the transition in seconds. Defaults to 0.
     * @param {number} [transitionOffset] - If provided, the destination state will begin playing
     * its animation at this time. Given in normalized time, based on the states duration & must be
     * between 0 and 1. Defaults to null.
     */
    transition(to: string, time?: number, transitionOffset?: number): void;
}

/**
 * The AnimComponent allows an {@link Entity} to playback animations on models and entity
 * properties.
 *
 * @hideconstructor
 * @category Animation
 */
declare class AnimComponent extends Component {
    /** @private */
    private _stateGraphAsset;
    /** @private */
    private _animationAssets;
    /** @private */
    private _speed;
    /** @private */
    private _activate;
    /** @private */
    private _playing;
    /** @private */
    private _rootBone;
    /** @private */
    private _stateGraph;
    /** @private */
    private _layers;
    /** @private */
    private _layerIndices;
    /** @private */
    private _parameters;
    /** @private */
    private _targets;
    /** @private */
    private _consumedTriggers;
    /** @private */
    private _normalizeWeights;
    set stateGraphAsset(value: any);
    get stateGraphAsset(): any;
    /**
     * Sets whether the animation component will normalize the weights of its layers by their sum total.
     *
     * @type {boolean}
     */
    set normalizeWeights(value: boolean);
    /**
     * Gets whether the animation component will normalize the weights of its layers by their sum total.
     *
     * @type {boolean}
     */
    get normalizeWeights(): boolean;
    set animationAssets(value: {});
    get animationAssets(): {};
    /**
     * Sets the speed multiplier for animation play back speed. 1.0 is playback at normal speed, 0.0 pauses
     * the animation.
     *
     * @type {number}
     */
    set speed(value: number);
    /**
     * Gets the speed multiplier for animation play back speed.
     *
     * @type {number}
     */
    get speed(): number;
    /**
     * Sets whether the first animation will begin playing when the scene is loaded.
     *
     * @type {boolean}
     */
    set activate(value: boolean);
    /**
     * Gets whether the first animation will begin playing when the scene is loaded.
     *
     * @type {boolean}
     */
    get activate(): boolean;
    /**
     * Sets whether to play or pause all animations in the component.
     *
     * @type {boolean}
     */
    set playing(value: boolean);
    /**
     * Gets whether to play or pause all animations in the component.
     *
     * @type {boolean}
     */
    get playing(): boolean;
    /**
     * Sets the entity that this anim component should use as the root of the animation hierarchy.
     *
     * @type {Entity}
     */
    set rootBone(value: Entity);
    /**
     * Gets the entity that this anim component should use as the root of the animation hierarchy.
     *
     * @type {Entity}
     */
    get rootBone(): Entity;
    set stateGraph(value: any);
    get stateGraph(): any;
    /**
     * Returns the animation layers available in this anim component.
     *
     * @type {AnimComponentLayer[]}
     */
    get layers(): AnimComponentLayer[];
    set layerIndices(value: {});
    get layerIndices(): {};
    set parameters(value: {});
    get parameters(): {};
    set targets(value: {});
    get targets(): {};
    /**
     * Returns whether all component layers are currently playable.
     *
     * @type {boolean}
     */
    get playable(): boolean;
    /**
     * Returns the base layer of the state graph.
     *
     * @type {AnimComponentLayer|null}
     */
    get baseLayer(): AnimComponentLayer | null;
    _onStateGraphAssetChangeEvent(asset: any): void;
    dirtifyTargets(): void;
    _addLayer({ name, states, transitions, weight, mask, blendType }: {
        name: any;
        states: any;
        transitions: any;
        weight: any;
        mask: any;
        blendType: any;
    }): any;
    /**
     * Adds a new anim component layer to the anim component.
     *
     * @param {string} name - The name of the layer to create.
     * @param {number} [weight] - The blending weight of the layer. Defaults to 1.
     * @param {object[]} [mask] - A list of paths to bones in the model which should be animated in
     * this layer. If omitted the full model is used. Defaults to null.
     * @param {string} [blendType] - Defines how properties animated by this layer blend with
     * animations of those properties in previous layers. Defaults to pc.ANIM_LAYER_OVERWRITE.
     * @returns {AnimComponentLayer} The created anim component layer.
     */
    addLayer(name: string, weight?: number, mask?: object[], blendType?: string): AnimComponentLayer;
    _assignParameters(stateGraph: any): void;
    /**
     * Initializes component animation controllers using the provided state graph.
     *
     * @param {object} stateGraph - The state graph asset to load into the component. Contains the
     * states, transitions and parameters used to define a complete animation controller.
     * @example
     * entity.anim.loadStateGraph({
     *     "layers": [
     *         {
     *             "name": layerName,
     *             "states": [
     *                 {
     *                     "name": "START",
     *                     "speed": 1
     *                 },
     *                 {
     *                     "name": "Initial State",
     *                     "speed": speed,
     *                     "loop": loop,
     *                     "defaultState": true
     *                 }
     *             ],
     *             "transitions": [
     *                 {
     *                     "from": "START",
     *                     "to": "Initial State"
     *                 }
     *             ]
     *         }
     *     ],
     *     "parameters": {}
     * });
     */
    loadStateGraph(stateGraph: object): void;
    setupAnimationAssets(): void;
    loadAnimationAssets(): void;
    onAnimationAssetLoaded(layerName: any, stateName: any, asset: any): void;
    /**
     * Removes all layers from the anim component.
     */
    removeStateGraph(): void;
    /**
     * Reset all of the components layers and parameters to their initial states. If a layer was
     * playing before it will continue playing.
     */
    reset(): void;
    unbind(): void;
    /**
     * Rebind all of the components layers.
     */
    rebind(): void;
    /**
     * Finds an {@link AnimComponentLayer} in this component.
     *
     * @param {string} name - The name of the anim component layer to find.
     * @returns {AnimComponentLayer} Layer.
     */
    findAnimationLayer(name: string): AnimComponentLayer;
    addAnimationState(nodeName: any, animTrack: any, speed?: number, loop?: boolean, layerName?: string): void;
    /**
     * Associates an animation with a state or blend tree node in the loaded state graph. If all
     * states are linked and the {@link activate} value was set to true then the component will
     * begin playing. If no state graph is loaded, a default state graph will be created with a
     * single state based on the provided nodePath parameter.
     *
     * @param {string} nodePath - Either the state name or the path to a blend tree node that this
     * animation should be associated with. Each section of a blend tree path is split using a
     * period (`.`) therefore state names should not include this character (e.g "MyStateName" or
     * "MyStateName.BlendTreeNode").
     * @param {AnimTrack} animTrack - The animation track that will be assigned to this state and
     * played whenever this state is active.
     * @param {string} [layerName] - The name of the anim component layer to update. If omitted the
     * default layer is used. If no state graph has been previously loaded this parameter is
     * ignored.
     * @param {number} [speed] - Update the speed of the state you are assigning an animation to.
     * Defaults to 1.
     * @param {boolean} [loop] - Update the loop property of the state you are assigning an
     * animation to. Defaults to true.
     */
    assignAnimation(nodePath: string, animTrack: AnimTrack, layerName?: string, speed?: number, loop?: boolean): void;
    /**
     * Removes animations from a node in the loaded state graph.
     *
     * @param {string} nodeName - The name of the node that should have its animation tracks removed.
     * @param {string} [layerName] - The name of the anim component layer to update. If omitted the
     * default layer is used.
     */
    removeNodeAnimations(nodeName: string, layerName?: string): void;
    getParameterValue(name: any, type: any): any;
    setParameterValue(name: any, type: any, value: any): void;
    /**
     * Returns the parameter object for the specified parameter name. This function is anonymous so that it can be passed to the AnimController
     * while still being called in the scope of the AnimComponent.
     *
     * @param {string} name - The name of the parameter to return the value of.
     * @returns {object} The parameter object.
     * @private
     */
    private findParameter;
    /**
     * Sets a trigger parameter as having been used by a transition. This function is anonymous so that it can be passed to the AnimController
     * while still being called in the scope of the AnimComponent.
     *
     * @param {string} name - The name of the trigger to set as consumed.
     * @private
     */
    private consumeTrigger;
    /**
     * Returns a float parameter value by name.
     *
     * @param {string} name - The name of the float to return the value of.
     * @returns {number} A float.
     */
    getFloat(name: string): number;
    /**
     * Sets the value of a float parameter that was defined in the animation components state graph.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {number} value - The new float value to set this parameter to.
     */
    setFloat(name: string, value: number): void;
    /**
     * Returns an integer parameter value by name.
     *
     * @param {string} name - The name of the integer to return the value of.
     * @returns {number} An integer.
     */
    getInteger(name: string): number;
    /**
     * Sets the value of an integer parameter that was defined in the animation components state
     * graph.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {number} value - The new integer value to set this parameter to.
     */
    setInteger(name: string, value: number): void;
    /**
     * Returns a boolean parameter value by name.
     *
     * @param {string} name - The name of the boolean to return the value of.
     * @returns {boolean} A boolean.
     */
    getBoolean(name: string): boolean;
    /**
     * Sets the value of a boolean parameter that was defined in the animation components state
     * graph.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {boolean} value - The new boolean value to set this parameter to.
     */
    setBoolean(name: string, value: boolean): void;
    /**
     * Returns a trigger parameter value by name.
     *
     * @param {string} name - The name of the trigger to return the value of.
     * @returns {boolean} A boolean.
     */
    getTrigger(name: string): boolean;
    /**
     * Sets the value of a trigger parameter that was defined in the animation components state
     * graph to true.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {boolean} [singleFrame] - If true, this trigger will be set back to false at the end
     * of the animation update. Defaults to false.
     */
    setTrigger(name: string, singleFrame?: boolean): void;
    /**
     * Resets the value of a trigger parameter that was defined in the animation components state
     * graph to false.
     *
     * @param {string} name - The name of the parameter to set.
     */
    resetTrigger(name: string): void;
    onBeforeRemove(): void;
    update(dt: any): void;
    resolveDuplicatedEntityReferenceProperties(oldAnim: any, duplicatedIdsMap: any): void;
}

/**
 * Lighting parameters, allow configuration of the global lighting parameters. For details see
 * [Clustered Lighting](https://developer.playcanvas.com/user-manual/graphics/lighting/clustered-lighting/).
 *
 * @category Graphics
 */
declare class LightingParams {
    /**
     * Creates a new LightingParams object.
     *
     * @ignore
     */
    constructor(supportsAreaLights: any, maxTextureSize: any, dirtyLightsFnc: any);
    /** @private */
    private _areaLightsEnabled;
    /** @private */
    private _cells;
    /** @private */
    private _maxLightsPerCell;
    /** @private */
    private _shadowsEnabled;
    /** @private */
    private _shadowType;
    /** @private */
    private _shadowAtlasResolution;
    /** @private */
    private _cookiesEnabled;
    /** @private */
    private _cookieAtlasResolution;
    /**
     * Layer ID of a layer to contain the debug rendering of clustered lighting. Defaults to
     * undefined, which disables the debug rendering. Debug rendering is only included in the debug
     * version of the engine.
     *
     * @type {number}
     */
    debugLayer: number;
    /**
     * Atlas textures split description, which applies to both the shadow and cookie texture atlas.
     * Defaults to null, which enables to automatic split mode. For details see [Configuring Atlas
     * Split](https://developer.playcanvas.com/user-manual/graphics/lighting/clustered-lighting/#configuring-atlas).
     *
     * @type {number[]|null}
     */
    atlasSplit: number[] | null;
    _supportsAreaLights: any;
    _maxTextureSize: any;
    _dirtyLightsFnc: any;
    applySettings(render: any): void;
    /**
     * Sets whether clustered lighting supports shadow casting. Defaults to true.
     *
     * @type {boolean}
     */
    set shadowsEnabled(value: boolean);
    /**
     * Gets whether clustered lighting supports shadow casting.
     *
     * @type {boolean}
     */
    get shadowsEnabled(): boolean;
    /**
     * Sets whether clustered lighting supports cookie textures. Defaults to false.
     *
     * @type {boolean}
     */
    set cookiesEnabled(value: boolean);
    /**
     * Gets whether clustered lighting supports cookie textures.
     *
     * @type {boolean}
     */
    get cookiesEnabled(): boolean;
    /**
     * Sets whether clustered lighting supports area lights. Defaults to false.
     *
     * @type {boolean}
     */
    set areaLightsEnabled(value: boolean);
    /**
     * Gets whether clustered lighting supports area lights.
     *
     * @type {boolean}
     */
    get areaLightsEnabled(): boolean;
    /**
     * Sets the resolution of the atlas texture storing all non-directional shadow textures.
     * Defaults to 2048.
     *
     * @type {number}
     */
    set shadowAtlasResolution(value: number);
    /**
     * Gets the resolution of the atlas texture storing all non-directional shadow textures.
     *
     * @type {number}
     */
    get shadowAtlasResolution(): number;
    /**
     * Sets the resolution of the atlas texture storing all non-directional cookie textures.
     * Defaults to 2048.
     *
     * @type {number}
     */
    set cookieAtlasResolution(value: number);
    /**
     * Gets the resolution of the atlas texture storing all non-directional cookie textures.
     *
     * @type {number}
     */
    get cookieAtlasResolution(): number;
    /**
     * Sets the maximum number of lights a cell can store. Defaults to 255.
     *
     * @type {number}
     */
    set maxLightsPerCell(value: number);
    /**
     * Gets the maximum number of lights a cell can store.
     *
     * @type {number}
     */
    get maxLightsPerCell(): number;
    /**
     * Sets the type of shadow filtering used by all shadows. Can be:
     *
     * - {@link SHADOW_PCF1_32F}
     * - {@link SHADOW_PCF3_32F}
     * - {@link SHADOW_PCF5_32F}
     * - {@link SHADOW_PCF1_16F}
     * - {@link SHADOW_PCF3_16F}
     * - {@link SHADOW_PCF5_16F}
     *
     * Defaults to {@link SHADOW_PCF3_32F}
     *
     * @type {number}
     */
    set shadowType(value: number);
    /**
     * Gets the type of shadow filtering used by all shadows.
     *
     * @type {number}
     */
    get shadowType(): number;
    /**
     * Sets the number of cells along each world space axis the space containing lights is
     * subdivided into. Defaults to `[10, 3, 10]`.
     *
     * @type {Vec3}
     */
    set cells(value: Vec3);
    /**
     * Gets the number of cells along each world space axis the space containing lights is
     * subdivided into.
     *
     * @type {Vec3}
     */
    get cells(): Vec3;
}

/**
 * - Defines the vertex and fragment shader source for
 * {@link ShaderMaterial}, supporting both GLSL and WGSL formats. WebGL always uses the GLSL code.
 * WebGPU prefers the WGSL code if available, otherwise it automatically transpiles the provided
 * GLSL code at runtime.
 */
type ShaderDesc = {
    /**
     * - Unique name for the shader. If a shader with this name already
     * exists, it will be returned instead of a new shader instance.
     */
    uniqueName: string;
    /**
     * - The vertex shader code in GLSL.
     */
    vertexGLSL?: string;
    /**
     * - The fragment shader code in GLSL.
     */
    fragmentGLSL?: string;
    /**
     * - The vertex shader code in WGSL.
     */
    vertexWGSL?: string;
    /**
     * - The fragment shader code in WGSL.
     */
    fragmentWGSL?: string;
    /**
     * - Object detailing the mapping of vertex shader
     * attribute names to semantics SEMANTIC_*. This enables the engine to match vertex buffer data as
     * inputs to the shader. Defaults to undefined, which generates the default attributes.
     */
    attributes?: {
        [x: string]: string;
    };
    /**
     * - Fragment shader output types, which default to
     * vec4. Passing a string will set the output type for all color attachments. Passing an array will
     * set the output type for each color attachment.
     */
    fragmentOutputTypes?: string | string[];
};
/**
 * @typedef {object} ShaderDesc - Defines the vertex and fragment shader source for
 * {@link ShaderMaterial}, supporting both GLSL and WGSL formats. WebGL always uses the GLSL code.
 * WebGPU prefers the WGSL code if available, otherwise it automatically transpiles the provided
 * GLSL code at runtime.
 * @property {string} uniqueName - Unique name for the shader. If a shader with this name already
 * exists, it will be returned instead of a new shader instance.
 * @property {string} [vertexGLSL] - The vertex shader code in GLSL.
 * @property {string} [fragmentGLSL] - The fragment shader code in GLSL.
 * @property {string} [vertexWGSL] - The vertex shader code in WGSL.
 * @property {string} [fragmentWGSL] - The fragment shader code in WGSL.
 * @property {Object<string, string>} [attributes] - Object detailing the mapping of vertex shader
 * attribute names to semantics SEMANTIC_*. This enables the engine to match vertex buffer data as
 * inputs to the shader. Defaults to undefined, which generates the default attributes.
 * @property {string | string[]} [fragmentOutputTypes] - Fragment shader output types, which default to
 * vec4. Passing a string will set the output type for all color attachments. Passing an array will
 * set the output type for each color attachment. @see ShaderDefinitionUtils.createDefinition
 */
/**
 * A ShaderMaterial is a type of material that utilizes a specified shader for rendering purposes.
 *
 * A simple example which creates a material with custom vertex and fragment shaders specified in
 * GLSL format:
 *
 * ```javascript
 * const material = new pc.ShaderMaterial({
 *     uniqueName: 'MyShader',
 *     attributes: { aPosition: pc.SEMANTIC_POSITION },
 *     vertexGLSL: `
 *         attribute vec3 aPosition;
 *         uniform mat4 matrix_viewProjection;
 *         void main(void)
 *         {
 *             gl_Position = matrix_viewProjection * pos;
 *         }`,
 *     fragmentGLSL: `
 *         void main(void) {
 *             gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
 *         }`
 * });
 * ```
 *
 * @category Graphics
 */
declare class ShaderMaterial extends Material {
    /**
     * Create a new ShaderMaterial instance.
     *
     * @param {ShaderDesc} [shaderDesc] - The description of the shader to be used by the material.
     */
    constructor(shaderDesc?: ShaderDesc);
    /**
     * @type {ShaderDesc|undefined}
     * @private
     */
    private _shaderDesc;
    /**
     * Sets the shader description.
     *
     * @type {ShaderDesc|undefined}
     */
    set shaderDesc(value: ShaderDesc | undefined);
    /**
     * Gets the shader description.
     *
     * @type {ShaderDesc|undefined}
     */
    get shaderDesc(): ShaderDesc | undefined;
    /**
     * Copy a `ShaderMaterial`.
     *
     * @param {ShaderMaterial} source - The material to copy from.
     * @returns {ShaderMaterial} The destination material.
     */
    copy(source: ShaderMaterial): ShaderMaterial;
    getShaderVariant(params: any): Shader;
}

type GSplatStreamDescriptor = {
    /**
     * - The name of the stream (used as texture uniform name).
     */
    name: string;
    /**
     * - The pixel format of the texture (e.g. PIXELFORMAT_RGBA32F).
     * When used as an extra stream for work buffers or as a destination stream for
     * GSplatProcessor, the format must be renderable as these textures are used as render
     * targets. Ensure the format is renderable on all target devices. See {@link Texture} for
     * details on renderable formats and device capabilities.
     */
    format: number;
    /**
     * - Storage type: GSPLAT_STREAM_RESOURCE (default, shared across
     * instances) or GSPLAT_STREAM_INSTANCE (per-component instance). Note: Work buffer formats
     * (accessed via `app.scene.gsplat.format`) do not support GSPLAT_STREAM_INSTANCE.
     */
    storage?: number;
};
/**
 * Gsplat resources store per-splat data (positions, colors, rotations, scales, spherical
 * harmonics) in GPU textures. This class describes those texture streams and generates the
 * shader code needed to access them.
 *
 * Each stream defines a texture with a name and pixel format. The class automatically generates
 * shader declarations (uniforms/samplers) and load functions (e.g. `loadColor()`) for each
 * stream. A read shader can be provided to define how splat attributes are extracted from
 * these textures.
 *
 * Users can add extra streams via {@link addExtraStreams} for custom per-splat data. These
 * can be per-resource (shared across instances) or per-instance (unique to each gsplat
 * component).
 *
 * For loaded gsplat resources, base streams are automatically configured based on the loaded
 * data format. For {@link GSplatContainer}, users define both base and extra streams to
 * specify the complete data layout.
 *
 * @category Graphics
 */
declare class GSplatFormat {
    /**
     * Creates a default format using 32F/16F textures, simple to use for CPU data population.
     * This format can be rendered to by {@link GSplatProcessor} when supported. Check
     * {@link GraphicsDevice#textureFloatRenderable} (for RGBA32F) and
     * {@link GraphicsDevice#textureHalfFloatRenderable} (for RGBA16F).
     *
     * The format stores:
     * - `dataColor` (RGBA16F): color.rgba as half floats
     * - `dataCenter` (RGBA32F): center.xyz as floats (w unused)
     * - `dataScale` (RGBA16F): scale.xyz as half floats (w unused)
     * - `dataRotation` (RGBA16F): rotation.xyzw as half floats (w stored directly, not derived)
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @returns {GSplatFormat} The default format.
     */
    static createDefaultFormat(device: GraphicsDevice): GSplatFormat;
    /**
     * Creates a simple format with uniform-scale splats and no rotation.
     * Streams:
     * - `dataCenter` (RGBA32F): center.xyz + uniform size in w
     * - `dataColor` (RGBA16F): color.rgba as half floats
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @returns {GSplatFormat} The simple format.
     */
    static createSimpleFormat(device: GraphicsDevice): GSplatFormat;
    /**
     * Creates a new GSplatFormat instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatStreamDescriptor[]} streams - Array of stream descriptors.
     * @param {object} options - Format options.
     * @param {string} [options.readGLSL] - GLSL code defining getCenter(), getColor(),
     * getRotation(), getScale() functions. Can include additional declarations at module scope.
     * Required for WebGL.
     * @param {string} [options.readWGSL] - WGSL code defining getCenter(), getColor(),
     * getRotation(), getScale() functions. Can include additional declarations at module scope.
     * Required for WebGPU.
     */
    constructor(device: GraphicsDevice, streams: GSplatStreamDescriptor[], options: {
        readGLSL?: string;
        readWGSL?: string;
    });
    /**
     * @type {GraphicsDevice}
     * @private
     */
    private _device;
    /**
     * Array of stream descriptors.
     *
     * @type {GSplatStreamDescriptor[]}
     * @readonly
     */
    readonly streams: GSplatStreamDescriptor[];
    /**
     * User-provided code for reading splat data (GLSL or WGSL based on device).
     * Must define getCenter(), getColor(), getRotation(), getScale() functions.
     *
     * @type {string}
     * @private
     */
    private _read;
    /**
     * When true, allows extra streams to be removed via {@link removeExtraStreams}.
     * Only work buffer formats (returned by {@link GSplatParams#format}) should set this.
     *
     * @type {boolean}
     * @ignore
     */
    allowStreamRemoval: boolean;
    /**
     * Extra streams added via addExtraStreams(). For resource formats, streams can only be
     * added, never removed. For work buffer formats (where {@link allowStreamRemoval} is true),
     * streams can also be removed via {@link removeExtraStreams}.
     *
     * @type {GSplatStreamDescriptor[]}
     * @private
     */
    private _extraStreams;
    /**
     * Set of all stream names (base + extra) for fast duplicate checking.
     *
     * @type {Set<string>}
     * @private
     */
    private _streamNames;
    /**
     * Version counter that increments when extra streams change.
     *
     * @type {number}
     * @private
     */
    private _extraStreamsVersion;
    /**
     * Cached hash value.
     *
     * @type {number|undefined}
     * @private
     */
    private _hash;
    /**
     * Cached resource streams array.
     *
     * @type {GSplatStreamDescriptor[]|null}
     * @private
     */
    private _resourceStreams;
    /**
     * Cached instance streams array.
     *
     * @type {GSplatStreamDescriptor[]|null}
     * @private
     */
    private _instanceStreams;
    /**
     * Returns a hash of this format's configuration. Used for shader caching.
     * Computed from raw inputs to avoid generating shader code just for the hash.
     *
     * @type {number}
     * @ignore
     */
    get hash(): number;
    /**
     * Returns the version counter. Increments when extra streams change.
     *
     * @type {number}
     * @ignore
     */
    get extraStreamsVersion(): number;
    /**
     * Gets the extra streams array. Streams can only be added via {@link addExtraStreams},
     * not removed. Do not modify the returned array directly.
     *
     * @type {GSplatStreamDescriptor[]}
     */
    get extraStreams(): GSplatStreamDescriptor[];
    /**
     * Returns all resource-level streams (base streams + extra streams where instance !== true).
     * Used by GSplatStreams for resource texture management.
     *
     * @type {GSplatStreamDescriptor[]}
     * @ignore
     */
    get resourceStreams(): GSplatStreamDescriptor[];
    /**
     * Returns all instance-level streams (extra streams with GSPLAT_STREAM_INSTANCE storage).
     * Used by GSplatStreams for per-component-instance texture management.
     *
     * @type {GSplatStreamDescriptor[]}
     * @ignore
     */
    get instanceStreams(): GSplatStreamDescriptor[];
    /**
     * Adds additional texture streams for custom gsplat data. Each stream defines a texture
     * that can store extra information, accessible in shaders via generated load functions.
     * Streams with `storage: GSPLAT_STREAM_INSTANCE` are created per gsplat component instance,
     * while others are shared across all instances of the same resource.
     *
     * Note: Streams cannot be removed once added currently.
     *
     * @param {GSplatStreamDescriptor[]} streams - Array of stream descriptors to add.
     */
    addExtraStreams(streams: GSplatStreamDescriptor[]): void;
    /**
     * Removes extra streams by name. Only supported on work buffer formats
     * (returned by {@link GSplatParams#format}). Removing streams from resource
     * formats is not supported.
     *
     * @param {string[]} names - Array of stream names to remove.
     * @ignore
     */
    removeExtraStreams(names: string[]): void;
    /**
     * Generates input declarations (texture uniforms + load functions).
     *
     * @param {string[]} [streamNames] - Optional array of stream names to filter. If not provided,
     * generates declarations for all streams.
     * @returns {string} Shader code for declarations.
     * @ignore
     */
    getInputDeclarations(streamNames?: string[]): string;
    /**
     * Returns the read code.
     *
     * @returns {string} Shader code for reading splat data.
     * @ignore
     */
    getReadCode(): string;
    /**
     * Generates compute shader input declarations with explicit binding annotations.
     * Format texture bindings are placed at indices starting from startBinding.
     *
     * @param {number} startBinding - The first @group(0) @binding() index for format textures.
     * @param {string[]} [streamNames] - Optional array of stream names to filter.
     * @returns {string} WGSL code for compute shader declarations.
     * @ignore
     */
    getComputeInputDeclarations(startBinding: number, streamNames?: string[]): string;
    /**
     * Returns an array of BindTextureFormat entries for the format's streams, suitable for
     * appending to a compute shader's BindGroupFormat. Sample types are derived from pixel formats.
     *
     * @param {string[]} [streamNames] - Optional array of stream names to filter.
     * @returns {BindTextureFormat[]} Array of bind texture format entries.
     * @ignore
     */
    getComputeBindFormats(streamNames?: string[]): BindTextureFormat[];
    /**
     * Sets the write code for encoding splat data into the work buffer. The appropriate code
     * for the current backend (GLSL or WGSL) is stored.
     *
     * @param {string} writeGLSL - GLSL code for writing/encoding splat data.
     * @param {string} writeWGSL - WGSL code for writing/encoding splat data.
     * @ignore
     */
    setWriteCode(writeGLSL: string, writeWGSL: string): void;
    _write: string;
    /**
     * Returns the write code for encoding splat data into the work buffer.
     *
     * @returns {string|undefined} Shader code for writing splat data, or undefined if not set.
     * @ignore
     */
    getWriteCode(): string | undefined;
    /**
     * Generates output declarations (write functions) for MRT output streams.
     * Used by GSplatProcessor to generate output functions for dstStreams.
     * Each stream maps to an MRT slot (pcFragColor0, pcFragColor1, etc. in GLSL or
     * processOutput.color, processOutput.color1, etc. in WGSL).
     *
     * @param {GSplatStreamDescriptor[]} outputStreams - Stream descriptors for output.
     * @returns {string} Shader code for output write functions.
     * @ignore
     */
    getOutputDeclarations(outputStreams: GSplatStreamDescriptor[]): string;
    /**
     * Generates no-op stub functions for streams that aren't render targets.
     * Used in color-only mode so user modifier code compiles but writes are ignored.
     *
     * @param {GSplatStreamDescriptor[]} streams - Stream descriptors to generate stubs for.
     * @returns {string} Shader code for no-op write functions.
     * @ignore
     */
    getOutputStubs(streams: GSplatStreamDescriptor[]): string;
    /**
     * Returns a stream descriptor by name.
     *
     * @param {string} name - The name of the stream to find.
     * @returns {GSplatStreamDescriptor|undefined} The stream descriptor, or undefined if not found.
     * @ignore
     */
    getStream(name: string): GSplatStreamDescriptor | undefined;
    /**
     * Invalidates all cached values when streams change.
     *
     * @private
     */
    private _invalidateCaches;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * Parameters for GSplat unified system.
 *
 * @category Graphics
 */
declare class GSplatParams {
    /**
     * Creates a new GSplatParams instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     */
    constructor(device: GraphicsDevice);
    /**
     * @type {ShaderMaterial}
     * @private
     */
    private _material;
    /**
     * Format descriptor for work buffer streams.
     *
     * @type {GSplatFormat}
     * @private
     */
    private _format;
    /**
     * @type {GraphicsDevice}
     * @private
     */
    private _device;
    /**
     * @type {string}
     * @private
     */
    private _dataFormat;
    /**
     * @param {string} dataFormat - The data format constant.
     * @returns {GSplatFormat} The created format.
     * @private
     */
    private _createFormat;
    /**
     * Enables debug rendering of AABBs for GSplat objects. Defaults to false.
     *
     * @type {boolean}
     */
    debugAabbs: boolean;
    /**
     * Enables radial sorting based on distance from camera (for cubemap rendering). When false,
     * uses directional sorting along camera forward vector. Defaults to false.
     *
     * Note: Radial sorting helps reduce sorting artifacts when the camera rotates (looks around),
     * while linear sorting is better at minimizing artifacts when the camera translates (moves).
     *
     * @type {boolean}
     */
    radialSorting: boolean;
    /**
     * @type {number}
     * @private
     */
    private _renderer;
    /**
     * @type {number}
     * @private
     */
    private _currentRenderer;
    /**
     * The rendering pipeline used for gaussian splatting. Can be:
     *
     * - {@link GSPLAT_RENDERER_AUTO}: Automatically selects the best pipeline for the platform.
     * - {@link GSPLAT_RENDERER_RASTER_CPU_SORT}: Rasterization with CPU-side sorting.
     * - {@link GSPLAT_RENDERER_RASTER_GPU_SORT}: Rasterization with compute shader sorting
     * (WebGPU only, experimental).
     * - {@link GSPLAT_RENDERER_COMPUTE}: Full compute pipeline (WebGPU only, experimental).
     *
     * Defaults to {@link GSPLAT_RENDERER_AUTO}. Modes requiring WebGPU fall back to
     * {@link GSPLAT_RENDERER_RASTER_CPU_SORT} on WebGL devices.
     *
     * @type {number}
     */
    set renderer(value: number);
    get renderer(): number;
    /**
     * The current rendering pipeline in effect after platform-based fallback resolution. When
     * {@link renderer} is set to a mode requiring WebGPU on a WebGL device, this returns the
     * fallback mode actually being used.
     *
     * @type {number}
     */
    get currentRenderer(): number;
    /**
     * Enables debug rendering of AABBs for GSplat octree nodes. Defaults to false.
     *
     * @type {boolean}
     */
    debugNodeAabbs: boolean;
    /**
     * Internal dirty flag to trigger update of gsplat managers when some params change.
     *
     * @ignore
     * @type {boolean}
     */
    dirty: boolean;
    /**
     * @type {number}
     * @private
     */
    private _debug;
    /**
     * Debug rendering mode for Gaussian splats. Can be:
     *
     * - {@link GSPLAT_DEBUG_NONE}: Normal rendering (default).
     * - {@link GSPLAT_DEBUG_LOD}: Colorize splats by their selected LOD level.
     * - {@link GSPLAT_DEBUG_SH_UPDATE}: Random color per SH update pass to visualize update
     * frequency.
     * - {@link GSPLAT_DEBUG_HEATMAP}: Heatmap visualization of average splats processed per
     * pixel in each tile. Only supported with {@link GSPLAT_RENDERER_COMPUTE}.
     *
     * Only one debug mode can be active at a time. Defaults to {@link GSPLAT_DEBUG_NONE}.
     *
     * @type {number}
     */
    set debug(value: number);
    get debug(): number;
    /** @deprecated Use {@link GSplatParams#debug} with {@link GSPLAT_DEBUG_LOD} instead. */
    set colorizeLod(value: boolean);
    /**
     * @deprecated Use {@link GSplatParams#debug} with {@link GSPLAT_DEBUG_LOD} instead.
     * @returns {boolean} Whether LOD colorization is enabled.
     */
    get colorizeLod(): boolean;
    /**
     * @type {boolean}
     * @private
     */
    private _enableIds;
    /**
     * Enables or disables per-component ID storage in the work buffer. When enabled, each GSplat
     * component gets a unique ID written to the work buffer. This ID is used by the picking
     * system to identify which component was picked, but is also available to custom shaders for
     * effects like highlighting, animation, or any per-component differentiation.
     *
     * @type {boolean}
     */
    set enableIds(value: boolean);
    /**
     * Gets the ID storage enabled state.
     *
     * @type {boolean}
     */
    get enableIds(): boolean;
    /**
     * Distance threshold in world units to trigger LOD updates for camera and gsplat instances.
     * Defaults to 1.
     *
     * @type {number}
     */
    lodUpdateDistance: number;
    /**
     * Angle threshold in degrees to trigger LOD updates based on camera rotation. Set to 0 to
     * disable rotation-based updates. Defaults to 0.
     *
     * @type {number}
     */
    lodUpdateAngle: number;
    /**
     * @type {number}
     * @private
     */
    private _lodBehindPenalty;
    /**
     * Multiplier applied to effective distance for nodes behind the camera when determining LOD.
     * Value 1 means no penalty; higher values drop LOD faster for nodes behind the camera.
     *
     * Note: when using a penalty > 1, it often makes sense to set a positive
     * {@link GSplatParams#lodUpdateAngle} so LOD is re-evaluated on camera rotation,
     * not just translation.
     *
     * @type {number}
     */
    set lodBehindPenalty(value: number);
    /**
     * Gets behind-camera LOD penalty multiplier.
     *
     * @type {number}
     */
    get lodBehindPenalty(): number;
    /**
     * @type {number}
     * @private
     */
    private _lodRangeMin;
    /**
     * Minimum allowed LOD index (inclusive). Defaults to 0.
     *
     * @type {number}
     */
    set lodRangeMin(value: number);
    /**
     * Gets minimum allowed LOD index (inclusive).
     *
     * @type {number}
     */
    get lodRangeMin(): number;
    /**
     * @type {number}
     * @private
     */
    private _lodRangeMax;
    /**
     * Maximum allowed LOD index (inclusive). Defaults to 10.
     *
     * @type {number}
     */
    set lodRangeMax(value: number);
    /**
     * Gets maximum allowed LOD index (inclusive).
     *
     * @type {number}
     */
    get lodRangeMax(): number;
    /**
     * @type {number}
     * @private
     */
    private _lodUnderfillLimit;
    /**
     * Maximum number of LOD levels allowed below the optimal level when the optimal data is not
     * resident in memory. The system may temporarily use a coarser LOD within this limit until the
     * optimal LOD is available. Defaults to 0, which disables fallback (always load optimal).
     * Higher values allow faster loading by using lower-quality data.
     *
     * @type {number}
     */
    set lodUnderfillLimit(value: number);
    /**
     * Gets the maximum allowed underfill LOD range.
     *
     * @type {number}
     */
    get lodUnderfillLimit(): number;
    /**
     * @type {number}
     * @private
     */
    private _splatBudget;
    /**
     * Target number of splats across all GSplats in the scene. When set > 0,
     * the system adjusts LOD levels globally to stay within this budget.
     * Set to 0 to disable budget enforcement and use LOD distances only (default).
     *
     * @type {number}
     */
    set splatBudget(value: number);
    /**
     * Gets the target number of splats across all GSplats in the scene.
     *
     * @type {number}
     */
    get splatBudget(): number;
    /**
     * @type {import('../../platform/graphics/texture.js').Texture|null}
     * @private
     */
    private _colorRamp;
    /**
     * Gradient texture for elevation-based coloring in overdraw visualization mode.
     * When set, enables overdraw mode with additive blending. When null, uses normal rendering.
     * Texture should be (width x 1) size. World Y coordinate (0-20 range) maps to texture U coordinate.
     * Defaults to null.
     *
     * @type {Texture|null}
     */
    set colorRamp(value: Texture | null);
    /**
     * Gets the color ramp texture for overdraw visualization.
     *
     * @type {import('../../platform/graphics/texture.js').Texture|null}
     */
    get colorRamp(): Texture | null;
    /**
     * Intensity multiplier for overdraw visualization mode. Value of 1 uses alpha of 1/32,
     * allowing approximately 32 overdraws to reach full brightness with additive blending.
     * Higher values increase brightness per splat. Defaults to 1.
     *
     * @type {number}
     */
    colorRampIntensity: number;
    /**
     * Whether to apply scene fog to Gaussian splats. When false, splats ignore fog settings
     * even if the scene or camera has fog configured. Defaults to true.
     *
     * @type {boolean}
     */
    useFog: boolean;
    /** @deprecated Use {@link GSplatParams#debug} with {@link GSPLAT_DEBUG_SH_UPDATE} instead. */
    set colorizeColorUpdate(value: boolean);
    /**
     * @deprecated Use {@link GSplatParams#debug} with {@link GSPLAT_DEBUG_SH_UPDATE} instead.
     * @returns {boolean} Whether SH update colorization is enabled.
     */
    get colorizeColorUpdate(): boolean;
    /**
     * Viewing angle threshold in degrees for triggering spherical harmonics color updates.
     * When the camera translates enough to change the viewing angle to an octree node or
     * splat by this amount, its SH colors are re-evaluated. Distant nodes naturally update
     * less frequently since they require more camera movement to reach the angle threshold.
     * Set to 0 to update every frame where camera moves. Defaults to 10.
     *
     * @type {number}
     */
    colorUpdateAngle: number;
    /** @ignore */
    set colorUpdateDistance(value: number);
    /** @ignore */
    get colorUpdateDistance(): number;
    /** @ignore */
    set colorUpdateDistanceLodScale(value: number);
    /** @ignore */
    get colorUpdateDistanceLodScale(): number;
    /** @ignore */
    set colorUpdateAngleLodScale(value: number);
    /** @ignore */
    get colorUpdateAngleLodScale(): number;
    /**
     * Sets the alpha threshold below which splats are discarded during shadow, pick, and prepass
     * rendering. Higher values create more aggressive clipping, while lower values preserve more
     * translucent splats. Defaults to 0.3.
     *
     * @type {number}
     */
    set alphaClip(value: number);
    /**
     * Gets the alpha clip threshold.
     *
     * @type {number}
     */
    get alphaClip(): number;
    /**
     * Sets the minimum screen-space pixel size below which splats are discarded. Defaults to 2.
     *
     * @type {number}
     */
    set minPixelSize(value: number);
    /**
     * Gets the minimum pixel size threshold.
     *
     * @type {number}
     */
    get minPixelSize(): number;
    /**
     * Sets the minimum visual contribution threshold for the {@link GSPLAT_RENDERER_COMPUTE} renderer.
     * Splats whose total screen contribution (opacity * projected area) falls below this value are
     * discarded. Higher values cull more aggressively, improving performance at the cost of quality.
     * Set to 0 to disable contribution culling. Defaults to 3.
     *
     * @type {number}
     */
    set minContribution(value: number);
    /**
     * Gets the minimum contribution threshold.
     *
     * @type {number}
     */
    get minContribution(): number;
    /**
     * Enables anti-aliasing compensation for Gaussian splats. Defaults to false.
     *
     * This option is intended for splat data that was generated with anti-aliasing
     * enabled during training/export. It improves visual stability and reduces
     * flickering for very small or distant splats.
     *
     * If the source splats were generated without anti-aliasing, enabling this
     * option may slightly soften the image or alter opacity.
     * @type {boolean}
     */
    set antiAlias(value: boolean);
    /**
     * Gets whether anti-aliasing compensation is enabled.
     *
     * @type {boolean}
     */
    get antiAlias(): boolean;
    /**
     * Enables 2D Gaussian Splatting mode. Defaults to false.
     *
     * Renders splats as oriented 2D surface elements instead of volumetric 3D Gaussians.
     * This provides a more surface-accurate appearance but requires splat data that
     * was generated for 2D Gaussian Splatting.
     *
     * Enabling this with standard 3D splat data may produce incorrect results.
     * @type {boolean}
     */
    set twoDimensional(value: boolean);
    /**
     * Gets whether 2D Gaussian Splatting mode is enabled.
     *
     * @type {boolean}
     */
    get twoDimensional(): boolean;
    /**
     * @type {number}
     * @private
     */
    private _fisheye;
    /**
     * Controls the fisheye projection strength for Gaussian splats. The value is in the
     * range [0, 1]:
     *
     * - 0: Standard rectilinear (perspective) projection.
     * - (0, 1]: Increasing barrel distortion, producing a wider field of view with a
     *   "little planet" effect at higher values.
     *
     * Enabling fisheye for the first time has a small one-off cost as new shaders are
     * compiled. Subsequent switches between 0 and non-zero are instantaneous.
     *
     * Only supported with perspective cameras. Has no effect with orthographic projection.
     *
     * Note: This only affects Gaussian splat rendering. Other objects in the scene (meshes,
     * sprites, etc.) continue to use the standard camera projection and are not distorted.
     *
     * For best results, enable {@link GSplatParams#radialSorting} when using fisheye projection
     * to avoid sorting artifacts caused by the wide field of view.
     *
     * Defaults to 0.
     *
     * @type {number}
     */
    set fisheye(value: number);
    /**
     * Gets the fisheye projection strength.
     *
     * @type {number}
     */
    get fisheye(): number;
    /**
     * Number of update ticks before unloading unused streamed resources. When a streamed resource's
     * reference count reaches zero, it enters a cooldown period before being unloaded. This allows
     * recently used data to remain in memory for quick reuse if needed again soon. Set to 0 to
     * unload immediately when unused. Defaults to 100.
     *
     * @type {number}
     */
    cooldownTicks: number;
    /**
     * Work buffer data format. Controls the precision and bandwidth of the intermediate work buffer
     * used during unified GSplat rendering. Can be set to {@link GSPLATDATA_COMPACT} (20 bytes/splat)
     * or {@link GSPLATDATA_LARGE} (32 bytes/splat). Defaults to {@link GSPLATDATA_COMPACT}.
     *
     * @type {string}
     */
    set dataFormat(value: string);
    /**
     * Gets the work buffer data format.
     *
     * @type {string}
     */
    get dataFormat(): string;
    /**
     * A material template that can be customized by the user. Any defines, parameters, or shader
     * chunks set on this material will be automatically applied to all GSplat components rendered
     * in unified mode. After making changes, call {@link Material#update} to for the changes to be applied
     * on the next frame.
     *
     * @type {ShaderMaterial}
     * @example
     * // Set a custom parameter on all GSplat materials
     * app.scene.gsplat.material.setParameter('myCustomParam', 1.0);
     * app.scene.gsplat.material.update();
     */
    get material(): ShaderMaterial;
    /**
     * Format descriptor for work buffer streams. Describes the textures used by the work buffer
     * for intermediate storage during unified rendering. Users can add extra streams via
     * {@link GSplatFormat#addExtraStreams} for custom per-splat data.
     *
     * @type {GSplatFormat}
     * @example
     * // Add a custom stream to store per-splat component IDs
     * app.scene.gsplat.format.addExtraStreams([{
     *     name: 'splatId',
     *     format: pc.PIXELFORMAT_R32U
     * }]);
     */
    get format(): GSplatFormat;
    /**
     * Called at the end of the frame to clear dirty flags.
     *
     * @ignore
     */
    frameEnd(): void;
}

/**
 * @import { Asset } from '../asset/asset.js'
 * @import { AssetRegistry } from '../asset/asset-registry.js'
 */
/**
 * Keeps track of which assets are in bundles and loads files from bundles.
 *
 * @ignore
 */
declare class BundleRegistry {
    /**
     * Create a new BundleRegistry instance.
     *
     * @param {AssetRegistry} assets - The asset registry.
     */
    constructor(assets: AssetRegistry);
    /**
     * Index of bundle assets.
     * @type {Map<number, Asset>}
     * @private
     */
    private _idToBundle;
    /**
     * Index of asset id to set of bundle assets.
     * @type {Map<number, Set<Asset>>}
     * @private
     */
    private _assetToBundles;
    /**
     * Index of file url to set of bundle assets.
     * @type {Map<string, Set<Asset>>}
     * @private
     */
    private _urlsToBundles;
    /**
     * Index of file request to load callbacks.
     * @type {Map<string, function[]>}
     * @private
     */
    private _fileRequests;
    _assets: AssetRegistry;
    /**
     * Called when asset is added to AssetRegistry.
     *
     * @param {Asset} asset - The asset that has been added.
     * @private
     */
    private _onAssetAdd;
    _unbindAssetEvents(id: any): void;
    _indexAssetInBundle(id: any, bundle: any): void;
    _indexAssetFileUrls(asset: any): void;
    _getAssetFileUrls(asset: any): any[];
    _onAssetRemove(asset: any): void;
    _onBundleLoadStart(asset: any): void;
    _onBundleLoad(asset: any): void;
    _onBundleError(err: any): void;
    _findLoadedOrLoadingBundleForUrl(url: any): any;
    /**
     * Lists all of the available bundles that reference the specified asset.
     *
     * @param {Asset} asset - The asset to search by.
     * @returns {Asset[]|null} An array of bundle assets or null if the
     * asset is not in any bundle.
     */
    listBundlesForAsset(asset: Asset): Asset[] | null;
    /**
     * Lists all bundle assets.
     *
     * @returns {Asset[]} An array of bundle assets.
     */
    list(): Asset[];
    /**
     * Returns true if there is a bundle that contains the specified URL.
     *
     * @param {string} url - The url.
     * @returns {boolean} True or false.
     */
    hasUrl(url: string): boolean;
    /**
     * Returns true if there is a bundle that contains the specified URL and that bundle is either
     * loaded or currently being loaded.
     *
     * @param {string} url - The url.
     * @returns {boolean} True or false.
     */
    urlIsLoadedOrLoading(url: string): boolean;
    /**
     * Loads the specified file URL from a bundle that is either loaded or currently being loaded.
     *
     * @param {string} url - The URL. Make sure you are using a relative URL that does not contain
     * any query parameters.
     * @param {Function} callback - The callback is called when the file has been loaded or if an
     * error occurs. The callback expects the first argument to be the error message (if any) and
     * the second argument is the file blob URL.
     * @example
     * const url = asset.getFileUrl().split('?')[0]; // get normalized asset URL
     * this.app.bundles.loadFile(url, function (err, data) {
     *     // do something with the data
     * });
     */
    loadUrl(url: string, callback: Function): void;
    /**
     * Destroys the registry, and releases its resources. Does not unload bundle assets as these
     * should be unloaded by the {@link AssetRegistry}.
     */
    destroy(): void;
}

/**
 * Callback used by {@link ResourceLoader#load} when a resource is loaded (or an error occurs).
 */
type ResourceLoaderCallback = (err: string | null, resource?: any) => void;
/**
 * @import { AppBase } from '../app-base.js'
 * @import { AssetRegistry } from '../asset/asset-registry.js'
 * @import { Asset } from '../asset/asset.js'
 * @import { BundlesFilterCallback } from '../asset/asset-registry.js'
 * @import { ResourceHandler } from './handler.js'
 */
/**
 * @callback ResourceLoaderCallback
 * Callback used by {@link ResourceLoader#load} when a resource is loaded (or an error occurs).
 * @param {string|null} err - The error message in the case where the load fails.
 * @param {any} [resource] - The resource that has been successfully loaded.
 * @returns {void}
 */
/**
 * Load resource data, potentially from remote sources. Caches resource on load to prevent multiple
 * requests. Add ResourceHandlers to handle different types of resources.
 */
declare class ResourceLoader {
    static makeKey(url: any, type: any): string;
    /**
     * Create a new ResourceLoader instance.
     *
     * @param {AppBase} app - The application.
     */
    constructor(app: AppBase);
    _handlers: {};
    _requests: {};
    _cache: {};
    _app: AppBase;
    /**
     * Add a {@link ResourceHandler} for a resource type. Handler should support at least `load()`
     * and `open()`. Handlers can optionally support patch(asset, assets) to handle dependencies on
     * other assets.
     *
     * @param {string} type - The name of the resource type that the handler will be registered
     * with. Can be:
     *
     * - {@link ASSET_ANIMATION}
     * - {@link ASSET_AUDIO}
     * - {@link ASSET_IMAGE}
     * - {@link ASSET_JSON}
     * - {@link ASSET_MODEL}
     * - {@link ASSET_MATERIAL}
     * - {@link ASSET_TEXT}
     * - {@link ASSET_TEXTURE}
     * - {@link ASSET_CUBEMAP}
     * - {@link ASSET_SHADER}
     * - {@link ASSET_CSS}
     * - {@link ASSET_HTML}
     * - {@link ASSET_SCRIPT}
     * - {@link ASSET_CONTAINER}
     *
     * @param {ResourceHandler} handler - An instance of a resource handler
     * supporting at least `load()` and `open()`.
     * @example
     * const loader = new ResourceLoader();
     * loader.addHandler("json", new pc.JsonHandler());
     */
    addHandler(type: string, handler: ResourceHandler): void;
    /**
     * Remove a {@link ResourceHandler} for a resource type.
     *
     * @param {string} type - The name of the type that the handler will be removed.
     */
    removeHandler(type: string): void;
    /**
     * Get a {@link ResourceHandler} for a resource type.
     *
     * @param {string} type - The name of the resource type that the handler is registered with.
     * @returns {ResourceHandler|undefined} The registered handler, or
     * undefined if the requested handler is not registered.
     */
    getHandler(type: string): ResourceHandler | undefined;
    /**
     * Make a request for a resource from a remote URL. Parse the returned data using the handler
     * for the specified type. When loaded and parsed, use the callback to return an instance of
     * the resource.
     *
     * @param {string} url - The URL of the resource to load.
     * @param {string} type - The type of resource expected.
     * @param {ResourceLoaderCallback} callback - The callback used when the resource is loaded or
     * an error occurs. Passed (err, resource) where err is null if there are no errors.
     * @param {Asset} [asset] - Optional asset that is passed into
     * handler.
     * @param {object} [options] - Additional options for loading.
     * @param {boolean} [options.bundlesIgnore] - If set to true, then asset will not try to load
     * from a bundle. Defaults to false.
     * @param {BundlesFilterCallback} [options.bundlesFilter] - A callback that will be called
     * when loading an asset that is contained in any of the bundles. It provides an array of
     * bundles and will ensure asset is loaded from bundle returned from a callback. By default,
     * the smallest filesize bundle is chosen.
     * @example
     * app.loader.load("../path/to/texture.png", "texture", function (err, texture) {
     *     // use texture here
     * });
     */
    load(url: string, type: string, callback: ResourceLoaderCallback, asset?: Asset, options?: {
        bundlesIgnore?: boolean;
        bundlesFilter?: BundlesFilterCallback;
    }): void;
    _loadNull(handler: any, callback: any, asset: any): void;
    _onSuccess(key: any, result: any, extra: any): void;
    _onFailure(key: any, err: any): void;
    /**
     * Convert raw resource data into a resource instance. E.g. Take 3D model format JSON and
     * return a {@link Model}.
     *
     * @param {string} type - The type of resource.
     * @param {*} data - The raw resource data.
     * @returns {*} The parsed resource data.
     */
    open(type: string, data: any): any;
    /**
     * Perform any operations on a resource, that requires a dependency on its asset data or any
     * other asset data.
     *
     * @param {Asset} asset - The asset to patch.
     * @param {AssetRegistry} assets - The asset registry.
     */
    patch(asset: Asset, assets: AssetRegistry): void;
    /**
     * Remove resource from cache.
     *
     * @param {string} url - The URL of the resource.
     * @param {string} type - The type of resource.
     */
    clearCache(url: string, type: string): void;
    /**
     * Check cache for resource from a URL. If present, return the cached value.
     *
     * @param {string} url - The URL of the resource to get from the cache.
     * @param {string} type - The type of the resource.
     * @returns {*} The resource loaded from the cache.
     */
    getFromCache(url: string, type: string): any;
    /**
     * Enables retrying of failed requests when loading assets.
     *
     * @param {number} maxRetries - The maximum number of times to retry loading an asset. Defaults
     * to 5.
     * @ignore
     */
    enableRetry(maxRetries?: number): void;
    /**
     * Disables retrying of failed requests when loading assets.
     *
     * @ignore
     */
    disableRetry(): void;
    /**
     * Destroys the resource loader.
     */
    destroy(): void;
}

/**
 * Callback used by {@link AssetRegistry#filter} to filter assets.
 */
type FilterAssetCallback = (asset: Asset) => boolean;
/**
 * Callback used by {@link AssetRegistry#loadFromUrl} and called when an asset is loaded (or an
 * error occurs).
 */
type LoadAssetCallback = (err: string | null, asset?: Asset) => void;
/**
 * Callback used by {@link ResourceLoader#load} and called when an asset is choosing a bundle
 * to load from. Return a single bundle to ensure asset is loaded from it.
 */
type BundlesFilterCallback = (bundles: Asset[]) => Asset;
/**
 * @import { BundleRegistry } from '../bundle/bundle-registry.js'
 * @import { ResourceLoader } from '../handlers/loader.js'
 */
/**
 * @callback FilterAssetCallback
 * Callback used by {@link AssetRegistry#filter} to filter assets.
 * @param {Asset} asset - The current asset to filter.
 * @returns {boolean} Return `true` to include asset to result list.
 */
/**
 * @callback LoadAssetCallback
 * Callback used by {@link AssetRegistry#loadFromUrl} and called when an asset is loaded (or an
 * error occurs).
 * @param {string|null} err - The error message is null if no errors were encountered.
 * @param {Asset} [asset] - The loaded asset if no errors were encountered.
 * @returns {void}
 */
/**
 * @callback BundlesFilterCallback
 * Callback used by {@link ResourceLoader#load} and called when an asset is choosing a bundle
 * to load from. Return a single bundle to ensure asset is loaded from it.
 * @param {Asset[]} bundles - List of bundle assets which contain the asset.
 * @returns {Asset} Return a single bundle asset to ensure asset is loaded from it.
 */
/**
 * Container for all assets that are available to this application. Note that PlayCanvas scripts
 * are provided with an AssetRegistry instance as `app.assets`.
 *
 * @category Asset
 */
declare class AssetRegistry extends EventHandler {
    /**
     * Fired when an asset completes loading. This event is available in three forms. They are as
     * follows:
     *
     * 1. `load` - Fired when any asset finishes loading.
     * 2. `load:[id]` - Fired when a specific asset has finished loading, where `[id]` is the
     * unique id of the asset.
     * 3. `load:url:[url]` - Fired when an asset finishes loading whose URL matches `[url]`, where
     * `[url]` is the URL of the asset.
     *
     * @event
     * @example
     * app.assets.on('load', (asset) => {
     *     console.log(`Asset loaded: ${asset.name}`);
     * });
     * @example
     * const id = 123456;
     * const asset = app.assets.get(id);
     * app.assets.on('load:' + id, (asset) => {
     *     console.log(`Asset loaded: ${asset.name}`);
     * });
     * app.assets.load(asset);
     * @example
     * const id = 123456;
     * const asset = app.assets.get(id);
     * app.assets.on('load:url:' + asset.file.url, (asset) => {
     *     console.log(`Asset loaded: ${asset.name}`);
     * });
     * app.assets.load(asset);
     */
    static EVENT_LOAD: string;
    /**
     * Fired when an asset is added to the registry. This event is available in three forms. They
     * are as follows:
     *
     * 1. `add` - Fired when any asset is added to the registry.
     * 2. `add:[id]` - Fired when an asset is added to the registry, where `[id]` is the unique id
     * of the asset.
     * 3. `add:url:[url]` - Fired when an asset is added to the registry and matches the URL
     * `[url]`, where `[url]` is the URL of the asset.
     *
     * @event
     * @example
     * app.assets.on('add', (asset) => {
     *    console.log(`Asset added: ${asset.name}`);
     * });
     * @example
     * const id = 123456;
     * app.assets.on('add:' + id, (asset) => {
     *    console.log(`Asset added: ${asset.name}`);
     * });
     * @example
     * const id = 123456;
     * const asset = app.assets.get(id);
     * app.assets.on('add:url:' + asset.file.url, (asset) => {
     *    console.log(`Asset added: ${asset.name}`);
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when an asset is removed from the registry. This event is available in three forms.
     * They are as follows:
     *
     * 1. `remove` - Fired when any asset is removed from the registry.
     * 2. `remove:[id]` - Fired when an asset is removed from the registry, where `[id]` is the
     * unique id of the asset.
     * 3. `remove:url:[url]` - Fired when an asset is removed from the registry and matches the
     * URL `[url]`, where `[url]` is the URL of the asset.
     *
     * @event
     * @param {Asset} asset - The asset that was removed.
     * @example
     * app.assets.on('remove', (asset) => {
     *    console.log(`Asset removed: ${asset.name}`);
     * });
     * @example
     * const id = 123456;
     * app.assets.on('remove:' + id, (asset) => {
     *    console.log(`Asset removed: ${asset.name}`);
     * });
     * @example
     * const id = 123456;
     * const asset = app.assets.get(id);
     * app.assets.on('remove:url:' + asset.file.url, (asset) => {
     *    console.log(`Asset removed: ${asset.name}`);
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when an error occurs during asset loading. This event is available in two forms. They
     * are as follows:
     *
     * 1. `error` - Fired when any asset reports an error in loading.
     * 2. `error:[id]` - Fired when an asset reports an error in loading, where `[id]` is the
     * unique id of the asset.
     *
     * @event
     * @example
     * const id = 123456;
     * const asset = app.assets.get(id);
     * app.assets.on('error', (err, asset) => {
     *     console.error(err);
     * });
     * app.assets.load(asset);
     * @example
     * const id = 123456;
     * const asset = app.assets.get(id);
     * app.assets.on('error:' + id, (err, asset) => {
     *     console.error(err);
     * });
     * app.assets.load(asset);
     */
    static EVENT_ERROR: string;
    /**
     * Create an instance of an AssetRegistry.
     *
     * @param {ResourceLoader} loader - The ResourceLoader used to load the asset files.
     */
    constructor(loader: ResourceLoader);
    /**
     * @type {Set<Asset>}
     * @private
     */
    private _assets;
    /**
     * @type {ResourceLoader}
     * @private
     */
    private _loader;
    /**
     * @type {Map<number, Asset>}
     * @private
     */
    private _idToAsset;
    /**
     * @type {Map<string, Asset>}
     * @private
     */
    private _urlToAsset;
    /**
     * @type {Map<string, Set<Asset>>}
     * @private
     */
    private _nameToAsset;
    /**
     * Index for looking up by tags.
     *
     * @private
     */
    private _tags;
    /**
     * A URL prefix that will be added to all asset loading requests.
     *
     * @type {string|null}
     */
    prefix: string | null;
    /**
     * BundleRegistry
     *
     * @type {BundleRegistry|null}
     */
    bundles: BundleRegistry | null;
    /**
     * The ResourceLoader used to load asset files.
     *
     * @type {ResourceLoader}
     * @ignore
     */
    get loader(): ResourceLoader;
    /**
     * Create a filtered list of assets from the registry.
     *
     * @param {object} [filters] - Filter options.
     * @param {boolean} [filters.preload] - Filter by preload setting.
     * @returns {Asset[]} The filtered list of assets.
     */
    list(filters?: {
        preload?: boolean;
    }): Asset[];
    /**
     * Add an asset to the registry. If {@link Asset#preload} is `true`, it will also get loaded.
     *
     * @param {Asset} asset - The asset to add.
     * @example
     * const asset = new pc.Asset("My Asset", "texture", {
     *     url: "../path/to/image.jpg"
     * });
     * app.assets.add(asset);
     */
    add(asset: Asset): void;
    /**
     * Remove an asset from the registry.
     *
     * @param {Asset} asset - The asset to remove.
     * @returns {boolean} True if the asset was successfully removed and false otherwise.
     * @example
     * const asset = app.assets.get(100);
     * app.assets.remove(asset);
     */
    remove(asset: Asset): boolean;
    /**
     * Retrieve an asset from the registry by its id field.
     *
     * @param {number} id - The id of the asset to get.
     * @returns {Asset|undefined} The asset.
     * @example
     * const asset = app.assets.get(100);
     */
    get(id: number): Asset | undefined;
    /**
     * Retrieve an asset from the registry by its file's URL field.
     *
     * @param {string} url - The url of the asset to get.
     * @returns {Asset|undefined} The asset.
     * @example
     * const asset = app.assets.getByUrl("../path/to/image.jpg");
     */
    getByUrl(url: string): Asset | undefined;
    /**
     * Load the asset's file from a remote source. Listen for `load` events on the asset to find
     * out when it is loaded.
     *
     * @param {Asset} asset - The asset to load.
     * @param {object} [options] - Options for asset loading.
     * @param {boolean} [options.bundlesIgnore] - If set to true, then asset will not try to load
     * from a bundle. Defaults to false.
     * @param {boolean} [options.force] - If set to true, then the check of asset being loaded or
     * is already loaded is bypassed, which forces loading of asset regardless.
     * @param {BundlesFilterCallback} [options.bundlesFilter] - A callback that will be called
     * when loading an asset that is contained in any of the bundles. It provides an array of
     * bundles and will ensure asset is loaded from bundle returned from a callback. By default,
     * the smallest filesize bundle is chosen.
     * @example
     * // load some assets
     * const assetsToLoad = [
     *     app.assets.find("My Asset"),
     *     app.assets.find("Another Asset")
     * ];
     * let count = 0;
     * assetsToLoad.forEach((assetToLoad) => {
     *     assetToLoad.ready((asset) => {
     *         count++;
     *         if (count === assetsToLoad.length) {
     *             // done
     *         }
     *     });
     *     app.assets.load(assetToLoad);
     * });
     */
    load(asset: Asset, options?: {
        bundlesIgnore?: boolean;
        force?: boolean;
        bundlesFilter?: BundlesFilterCallback;
    }): void;
    /**
     * Use this to load and create an asset if you don't have assets created. Usually you would
     * only use this if you are not integrated with the PlayCanvas Editor.
     *
     * @param {string} url - The url to load.
     * @param {string} type - The type of asset to load.
     * @param {LoadAssetCallback} callback - Function called when asset is loaded, passed (err,
     * asset), where err is null if no errors were encountered.
     * @example
     * app.assets.loadFromUrl("../path/to/texture.jpg", "texture", function (err, asset) {
     *     const texture = asset.resource;
     * });
     */
    loadFromUrl(url: string, type: string, callback: LoadAssetCallback): void;
    /**
     * Use this to load and create an asset when both the URL and filename are required. For
     * example, use this function when loading BLOB assets, where the URL does not adequately
     * identify the file.
     *
     * @param {string} url - The url to load.
     * @param {string} filename - The filename of the asset to load.
     * @param {string} type - The type of asset to load.
     * @param {LoadAssetCallback} callback - Function called when asset is loaded, passed (err,
     * asset), where err is null if no errors were encountered.
     * @example
     * const file = magicallyObtainAFile();
     * app.assets.loadFromUrlAndFilename(URL.createObjectURL(file), "texture.png", "texture", function (err, asset) {
     *     const texture = asset.resource;
     * });
     */
    loadFromUrlAndFilename(url: string, filename: string, type: string, callback: LoadAssetCallback): void;
    loadFromUrlError: any;
    _loadModel(modelAsset: any, continuation: any): void;
    _loadMaterials(modelAsset: any, mapping: any, callback: any): void;
    _loadTextures(materialAsset: any, callback: any): void;
    _onTagAdd(tag: any, asset: any): void;
    _onTagRemove(tag: any, asset: any): void;
    _onNameChange(asset: any, name: any, nameOld: any): void;
    /**
     * Return all Assets that satisfy the search query. Query can be simply a string, or comma
     * separated strings, to have inclusive results of assets that match at least one query. A
     * query that consists of an array of tags can be used to match assets that have each tag of
     * array.
     *
     * @param {...*} query - Name of a tag or array of tags.
     * @returns {Asset[]} A list of all Assets matched query.
     * @example
     * const assets = app.assets.findByTag("level-1");
     * // returns all assets that tagged by `level-1`
     * @example
     * const assets = app.assets.findByTag("level-1", "level-2");
     * // returns all assets that tagged by `level-1` OR `level-2`
     * @example
     * const assets = app.assets.findByTag(["level-1", "monster"]);
     * // returns all assets that tagged by `level-1` AND `monster`
     * @example
     * const assets = app.assets.findByTag(["level-1", "monster"], ["level-2", "monster"]);
     * // returns all assets that tagged by (`level-1` AND `monster`) OR (`level-2` AND `monster`)
     */
    findByTag(...query: any[]): Asset[];
    /**
     * Return all Assets that satisfy a filter callback.
     *
     * @param {FilterAssetCallback} callback - The callback function that is used to filter assets.
     * Return `true` to include an asset in the returned array.
     * @returns {Asset[]} A list of all Assets found.
     * @example
     * const assets = app.assets.filter(asset => asset.name.includes('monster'));
     * console.log(`Found ${assets.length} assets with a name containing 'monster'`);
     */
    filter(callback: FilterAssetCallback): Asset[];
    /**
     * Return the first Asset with the specified name and type found in the registry.
     *
     * @param {string} name - The name of the Asset to find.
     * @param {string} [type] - The type of the Asset to find.
     * @returns {Asset|null} A single Asset or null if no Asset is found.
     * @example
     * const asset = app.assets.find("myTextureAsset", "texture");
     */
    find(name: string, type?: string): Asset | null;
    /**
     * Return all Assets with the specified name and type found in the registry.
     *
     * @param {string} name - The name of the Assets to find.
     * @param {string} [type] - The type of the Assets to find.
     * @returns {Asset[]} A list of all Assets found.
     * @example
     * const assets = app.assets.findAll('brick', 'texture');
     * console.log(`Found ${assets.length} texture assets named 'brick'`);
     */
    findAll(name: string, type?: string): Asset[];
    /**
     * Logs all assets in the registry to the console. Used for debugging with TRACEID_ASSETS.
     *
     * @ignore
     */
    log(): void;
}

/**
 * Callback used by {@link Asset#ready} and called when an asset is ready.
 */
type AssetReadyCallback = (asset: Asset) => void;
/**
 * @callback AssetReadyCallback
 * Callback used by {@link Asset#ready} and called when an asset is ready.
 * @param {Asset} asset - The ready asset.
 * @returns {void}
 */
/**
 * An asset record of a file or data resource that can be loaded by the engine. The asset contains
 * four important fields:
 *
 * - `file`: contains the details of a file (filename, url) which contains the resource data, e.g.
 * an image file for a texture asset.
 * - `data`: contains a JSON blob which contains either the resource data for the asset (e.g.
 * material data) or additional data for the file (e.g. material mappings for a model).
 * - `options`: contains a JSON blob with handler-specific load options.
 * - `resource`: contains the final resource when it is loaded. (e.g. a {@link StandardMaterial} or
 * a {@link Texture}).
 *
 * See the {@link AssetRegistry} for details on loading resources from assets.
 *
 * @category Asset
 */
declare class Asset extends EventHandler {
    /**
     * Fired when the asset has completed loading.
     *
     * @event
     * @example
     * asset.on('load', (asset) => {
     *     console.log(`Asset loaded: ${asset.name}`);
     * });
     */
    static EVENT_LOAD: string;
    /**
     * Fired just before the asset unloads the resource. This allows for the opportunity to prepare
     * for an asset that will be unloaded. E.g. Changing the texture of a model to a default before
     * the one it was using is unloaded.
     *
     * @event
     * @example
     * asset.on('unload', (asset) => {
     *    console.log(`Asset about to unload: ${asset.name}`);
     * });
     */
    static EVENT_UNLOAD: string;
    /**
     * Fired when the asset is removed from the asset registry.
     *
     * @event
     * @example
     * asset.on('remove', (asset) => {
     *    console.log(`Asset removed: ${asset.name}`);
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired if the asset encounters an error while loading.
     *
     * @event
     * @example
     * asset.on('error', (err, asset) => {
     *    console.error(`Error loading asset ${asset.name}: ${err}`);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Fired when one of the asset properties `file`, `data`, `resource` or `resources` is changed.
     *
     * @event
     * @example
     * asset.on('change', (asset, property, newValue, oldValue) => {
     *    console.log(`Asset ${asset.name} has property ${property} changed from ${oldValue} to ${newValue}`);
     * });
     */
    static EVENT_CHANGE: string;
    /**
     * Fired when the asset's stream download progresses.
     *
     * Please note:
     * - only gsplat assets current emit this event
     * - totalBytes may not be reliable as it is based on the content-length header of the response
     *
     * @event
     * @example
     * asset.on('progress', (receivedBytes, totalBytes) => {
     *    console.log(`Asset ${asset.name} progress ${readBytes / totalBytes}`);
     * });
     */
    static EVENT_PROGRESS: string;
    /**
     * Fired when we add a new localized asset id to the asset.
     *
     * @event
     * @example
     * asset.on('add:localized', (locale, assetId) => {
     *    console.log(`Asset ${asset.name} has added localized asset ${assetId} for locale ${locale}`);
     * });
     */
    static EVENT_ADDLOCALIZED: string;
    /**
     * Fired when we remove a localized asset id from the asset.
     *
     * @event
     * @example
     * asset.on('remove:localized', (locale, assetId) => {
     *   console.log(`Asset ${asset.name} has removed localized asset ${assetId} for locale ${locale}`);
     * });
     */
    static EVENT_REMOVELOCALIZED: string;
    /**
     * Helper function to resolve asset file data and return the contents as an ArrayBuffer. If the
     * asset file contents are present, that is returned. Otherwise the file data is be downloaded
     * via http.
     *
     * @param {string} loadUrl - The URL as passed into the handler
     * @param {ResourceLoaderCallback} callback - The callback function to receive results.
     * @param {Asset} [asset] - The asset
     * @param {number} maxRetries - Number of retries if http download is required
     * @ignore
     */
    static fetchArrayBuffer(loadUrl: string, callback: ResourceLoaderCallback, asset?: Asset, maxRetries?: number): void;
    /**
     * Create a new Asset record. Generally, Assets are created in the loading process and you
     * won't need to create them by hand.
     *
     * @param {string} name - A non-unique but human-readable name which can be later used to
     * retrieve the asset.
     * @param {"animation"|"audio"|"binary"|"container"|"cubemap"|"css"|"font"|"gsplat"|"json"|"html"|"material"|"model"|"render"|"script"|"shader"|"sprite"|"template"|"text"|"texture"|"textureatlas"} type - Type of asset.
     * @param {object} [file] - Details about the file the asset is made from. At the least must
     * contain the 'url' field. For assets that don't contain file data use null.
     * @param {string} [file.url] - The URL of the resource file that contains the asset data.
     * @param {string} [file.filename] - The filename of the resource file or null if no filename
     * was set (e.g from using {@link AssetRegistry#loadFromUrl}).
     * @param {number} [file.size] - The size of the resource file or null if no size was set
     * (e.g. from using {@link AssetRegistry#loadFromUrl}).
     * @param {string} [file.hash] - The MD5 hash of the resource file data and the Asset data
     * field or null if hash was set (e.g from using {@link AssetRegistry#loadFromUrl}).
     * @param {ArrayBuffer} [file.contents] - Optional file contents. This is faster than wrapping
     * the data in a (base64 encoded) blob. Currently only used by container assets.
     * @param {object|string} [data] - JSON object or string with additional data about the asset.
     * (e.g. for texture and model assets) or contains the asset data itself (e.g. in the case of
     * materials).
     * @param {object} [options] - The asset handler options. For container options see
     * {@link ContainerHandler}.
     * @param {'anonymous'|'use-credentials'|null} [options.crossOrigin] - For use with texture assets
     * that are loaded using the browser. This setting overrides the default crossOrigin specifier.
     * For more details on crossOrigin and its use, see
     * https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement/crossOrigin.
     * @example
     * const asset = new pc.Asset("a texture", "texture", {
     *     url: "http://example.com/my/assets/here/texture.png"
     * });
     */
    constructor(name: string, type: "animation" | "audio" | "binary" | "container" | "cubemap" | "css" | "font" | "gsplat" | "json" | "html" | "material" | "model" | "render" | "script" | "shader" | "sprite" | "template" | "text" | "texture" | "textureatlas", file?: {
        url?: string;
        filename?: string;
        size?: number;
        hash?: string;
        contents?: ArrayBuffer;
    }, data?: object | string, options?: {
        crossOrigin?: "anonymous" | "use-credentials" | null;
    });
    /**
     * @type {AssetFile | null}
     * @private
     */
    private _file;
    /**
     * A string-assetId dictionary that maps locale to asset id.
     *
     * @type {object}
     * @private
     */
    private _i18n;
    /**
     * Whether to preload the asset.
     *
     * @type {boolean}
     * @private
     */
    private _preload;
    /**
     * This is where the loaded resource(s) are stored.
     *
     * @type {object[]}
     * @private
     */
    private _resources;
    /**
     * The asset id.
     *
     * @type {number}
     */
    id: number;
    /**
     * True if the asset has finished attempting to load the resource. It is not guaranteed
     * that the resources are available as there could have been a network error.
     *
     * @type {boolean}
     */
    loaded: boolean;
    /**
     * True if the resource is currently being loaded.
     *
     * @type {boolean}
     */
    loading: boolean;
    /**
     * Optional JSON data that contains the asset handler options.
     *
     * @type {object}
     */
    options: object;
    /**
     * The asset registry that this Asset belongs to.
     *
     * @type {AssetRegistry|null}
     */
    registry: AssetRegistry | null;
    /**
     * Asset tags. Enables finding of assets by tags using the {@link AssetRegistry#findByTag} method.
     *
     * @type {Tags}
     */
    tags: Tags;
    /**
     * The type of the asset.
     *
     * @type {"animation"|"audio"|"binary"|"container"|"cubemap"|"css"|"font"|"gsplat"|"json"|"html"|"material"|"model"|"render"|"script"|"shader"|"sprite"|"template"|"text"|"texture"|"textureatlas"}
     */
    type: "animation" | "audio" | "binary" | "container" | "cubemap" | "css" | "font" | "gsplat" | "json" | "html" | "material" | "model" | "render" | "script" | "shader" | "sprite" | "template" | "text" | "texture" | "textureatlas";
    /**
     * The URL object.
     *
     * @type {string | null}
     * @ignore
     */
    urlObject: string | null;
    _name: string;
    _data: any;
    /**
     * Sets the file details or null if no file.
     *
     * @type {object}
     */
    set file(value: object);
    /**
     * Gets the file details or null if no file.
     *
     * @type {object}
     */
    get file(): object;
    /**
     * Sets the asset name.
     *
     * @type {string}
     */
    set name(value: string);
    /**
     * Gets the asset name.
     *
     * @type {string}
     */
    get name(): string;
    /**
     * Sets optional asset JSON data. This contains either the complete resource data (such as in
     * the case of a material) or additional data (such as in the case of a model which contains
     * mappings from mesh to material).
     *
     * @type {object}
     */
    set data(value: object);
    /**
     * Gets optional asset JSON data.
     *
     * @type {object}
     */
    get data(): object;
    /**
     * Sets the asset resource. For example, a {@link StandardMaterial} or a {@link Texture}.
     *
     * @type {object}
     */
    set resource(value: object);
    /**
     * Gets the asset resource.
     *
     * @type {object}
     */
    get resource(): object;
    /**
     * Sets the asset resources. Some assets can hold more than one runtime resource (cube maps,
     * for example).
     *
     * @type {object[]}
     */
    set resources(value: object[]);
    /**
     * Gets the asset resources.
     *
     * @type {object[]}
     */
    get resources(): object[];
    /**
     * Sets whether to preload an asset. If true, the asset will be loaded during the preload phase
     * of application initialization or when calling {@link AssetRegistry#add}.
     *
     * @type {boolean}
     */
    set preload(value: boolean);
    /**
     * Gets whether to preload an asset.
     *
     * @type {boolean}
     */
    get preload(): boolean;
    set loadFaces(value: any);
    get loadFaces(): any;
    _loadFaces: any;
    /**
     * Return the URL required to fetch the file for this asset.
     *
     * @returns {string|null} The URL. Returns null if the asset has no associated file.
     * @example
     * const assets = app.assets.find("My Image", "texture");
     * const img = "&lt;img src='" + assets[0].getFileUrl() + "'&gt;";
     */
    getFileUrl(): string | null;
    /**
     * Construct an asset URL from this asset's location and a relative path. If the relativePath
     * is a blob or Base64 URI, then return that instead.
     *
     * @param {string} relativePath - The relative path to be concatenated to this asset's base url.
     * @returns {string} Resulting URL of the asset.
     * @ignore
     */
    getAbsoluteUrl(relativePath: string): string;
    /**
     * Returns the asset id of the asset that corresponds to the specified locale.
     *
     * @param {string} locale - The desired locale e.g. Ar-AR.
     * @returns {number} An asset id or null if there is no asset specified for the desired locale.
     * @ignore
     */
    getLocalizedAssetId(locale: string): number;
    /**
     * Adds a replacement asset id for the specified locale. When the locale in
     * {@link AppBase#i18n} changes then references to this asset will be replaced with the
     * specified asset id. (Currently only supported by the {@link ElementComponent}).
     *
     * @param {string} locale - The locale e.g. Ar-AR.
     * @param {number} assetId - The asset id.
     * @ignore
     */
    addLocalizedAssetId(locale: string, assetId: number): void;
    /**
     * Removes a localized asset.
     *
     * @param {string} locale - The locale e.g. Ar-AR.
     * @ignore
     */
    removeLocalizedAssetId(locale: string): void;
    /**
     * Take a callback which is called as soon as the asset is loaded. If the asset is already
     * loaded the callback is called straight away.
     *
     * @param {AssetReadyCallback} callback - The function called when the asset is ready. Passed
     * the (asset) arguments.
     * @param {object} [scope] - Scope object to use when calling the callback.
     * @example
     * const asset = app.assets.find("My Asset");
     * asset.ready((asset) => {
     *     // asset loaded
     * });
     * app.assets.load(asset);
     */
    ready(callback: AssetReadyCallback, scope?: object): void;
    reload(): void;
    /**
     * Destroys the associated resource and marks asset as unloaded.
     *
     * @example
     * const asset = app.assets.find("My Asset");
     * asset.unload();
     * // asset.resource is null
     */
    unload(): void;
}

/**
 * Callback used by {@link ResourceHandler#load} when a resource is loaded (or an error occurs).
 */
type ResourceHandlerCallback = (err: string | null, response?: any) => void;
/**
 * @import { AppBase } from '../app-base.js'
 * @import { AssetRegistry } from '../asset/asset-registry.js'
 * @import { Asset } from '../asset/asset.js'
 */
/**
 * @callback ResourceHandlerCallback
 * Callback used by {@link ResourceHandler#load} when a resource is loaded (or an error occurs).
 * @param {string|null} err - The error message in the case where the load fails.
 * @param {any} [response] - The raw data that has been successfully loaded.
 * @returns {void}
 */
/**
 * Base class for ResourceHandlers used by {@link ResourceLoader}.
 */
declare class ResourceHandler {
    /**
     * @param {AppBase} app - The running {@link AppBase}.
     * @param {string} handlerType - The type of the resource the handler handles.
     */
    constructor(app: AppBase, handlerType: string);
    /**
     * Type of the resource the handler handles.
     *
     * @type {string}
     */
    handlerType: string;
    /**
     * The running app instance.
     *
     * @type {AppBase}
     */
    _app: AppBase;
    /** @private */
    private _maxRetries;
    /**
     * Sets the number of times to retry a failed request for the resource.
     *
     * @type {number}
     */
    set maxRetries(value: number);
    /**
     * Gets the number of times to retry a failed request for the resource.
     *
     * @type {number}
     */
    get maxRetries(): number;
    /**
     * Load a resource from a remote URL. The base implementation does nothing.
     *
     * @param {string|object} url - Either the URL of the resource to load or a structure
     * containing the load and original URL.
     * @param {string} [url.load] - The URL to be used for loading the resource.
     * @param {string} [url.original] - The original URL to be used for identifying the resource
     * format. This is necessary when loading, for example from blob.
     * @param {ResourceHandlerCallback} callback - The callback used when the resource is loaded or
     * an error occurs.
     * @param {Asset} [asset] - Optional asset that is passed by ResourceLoader.
     */
    load(url: string | object, callback: ResourceHandlerCallback, asset?: Asset): void;
    /**
     * The open function is passed the raw resource data. The handler can then process the data
     * into a format that can be used at runtime. The base implementation simply returns the data.
     *
     * @param {string} url - The URL of the resource to open.
     * @param {*} data - The raw resource data passed by callback from {@link ResourceHandler#load}.
     * @param {Asset} [asset] - Optional asset that is passed by ResourceLoader.
     * @returns {*} The parsed resource data.
     */
    open(url: string, data: any, asset?: Asset): any;
    /**
     * The patch function performs any operations on a resource that requires a dependency on its
     * asset data or any other asset data. The base implementation does nothing.
     *
     * @param {Asset} asset - The asset to patch.
     * @param {AssetRegistry} assets - The asset registry.
     */
    patch(asset: Asset, assets: AssetRegistry): void;
}

/** @ignore */
declare class GSplatResource extends GSplatResourceBase {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatData} gsplatData - The splat data.
     */
    constructor(device: GraphicsDevice, gsplatData: GSplatData);
    /** @type {0 | 1 | 2 | 3} */
    shBands: 0 | 1 | 2 | 3;
    configureMaterialDefines(defines: any): void;
    /**
     * Updates pixel data of splatColor texture based on the supplied color components and opacity.
     * Assumes that the texture is using an RGBA format where RGB are color components influenced
     * by SH spherical harmonics and A is opacity after a sigmoid transformation.
     *
     * @param {GSplatData} gsplatData - The source data
     */
    updateColorData(gsplatData: GSplatData): void;
    /**
     * @param {GSplatData} gsplatData - The source data
     */
    updateTransformData(gsplatData: GSplatData): void;
    /**
     * @param {GSplatData} gsplatData - The source data
     */
    updateSHData(gsplatData: GSplatData): void;
}

type DataType = Int8Array | Uint8Array | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array;
type PlyProperty = {
    /**
     * - E.g. 'float'.
     */
    type: string;
    /**
     * - E.g. 'x', 'y', 'z', 'f_dc_0' etc.
     */
    name: string;
    /**
     * - Data type, e.g. instance of Float32Array.
     */
    storage: DataType;
    /**
     * - BYTES_PER_ELEMENT of given data type.
     */
    byteSize: number;
};
type PlyElement = {
    /**
     * - E.g. 'vertex'.
     */
    name: string;
    /**
     * - Given count.
     */
    count: number;
    /**
     * - The properties.
     */
    properties: PlyProperty[];
};
declare class PlyParser {
    /**
     * @param {AppBase} app - The app instance.
     * @param {number} maxRetries - Maximum amount of retries.
     */
    constructor(app: AppBase, maxRetries: number);
    /** @type {AppBase} */
    app: AppBase;
    /** @type {number} */
    maxRetries: number;
    /**
     * @param {object} url - The URL of the resource to load.
     * @param {string} url.load - The URL to use for loading the resource.
     * @param {string} url.original - The original URL useful for identifying the resource type.
     * @param {ResourceHandlerCallback} callback - The callback used when
     * the resource is loaded or an error occurs.
     * @param {Asset} asset - Container asset.
     */
    load(url: {
        load: string;
        original: string;
    }, callback: ResourceHandlerCallback, asset: Asset): Promise<void>;
    /**
     * @param {string} url - The URL.
     * @param {GSplatResource} data - The data.
     * @returns {GSplatResource} Return the data.
     */
    open(url: string, data: GSplatResource): GSplatResource;
}

declare class GSplatData {
    /**
     * @param {BoundingBox} result - Bounding box instance holding calculated result.
     * @param {Vec3} p - The splat position
     * @param {Quat} r - The splat rotation
     * @param {Vec3} s - The splat scale
     */
    static calcSplatAabb(result: BoundingBox, p: Vec3, r: Quat, s: Vec3): void;
    /**
     * @param {PlyElement[]} elements - The elements.
     * @param {string[]} comments - File header comments.
     */
    constructor(elements: PlyElement[], comments?: string[]);
    /** @type {PlyElement[]} */
    elements: PlyElement[];
    numSplats: number;
    /**
     * File header comments.
     *
     * @type { string[] }
     */
    comments: string[];
    getProp(name: any, elementName?: string): DataType;
    getElement(name: any): PlyElement;
    addProp(name: any, storage: any): void;
    /**
     * Create an iterator for accessing splat data
     *
     * @param {Vec3|null} [p] - the vector to receive splat position
     * @param {Quat|null} [r] - the quaternion to receive splat rotation
     * @param {Vec3|null} [s] - the vector to receive splat scale
     * @param {Vec4|null} [c] - the vector to receive splat color
     * @returns {SplatIterator} - The iterator
     */
    createIter(p?: Vec3 | null, r?: Quat | null, s?: Vec3 | null, c?: Vec4 | null): SplatIterator;
    /**
     * Calculate pessimistic scene aabb taking into account splat size. This is faster than
     * calculating an exact aabb.
     *
     * @param {BoundingBox} result - Where to store the resulting bounding box.
     * @param {(i: number) => boolean} [pred] - Optional predicate function to filter splats.
     * @returns {boolean} - Whether the calculation was successful.
     */
    calcAabb(result: BoundingBox, pred?: (i: number) => boolean): boolean;
    /**
     * Calculate exact scene aabb taking into account splat size
     *
     * @param {BoundingBox} result - Where to store the resulting bounding box.
     * @param {(i: number) => boolean} [pred] - Optional predicate function to filter splats.
     * @returns {boolean} - Whether the calculation was successful.
     */
    calcAabbExact(result: BoundingBox, pred?: (i: number) => boolean): boolean;
    /**
     * Returns a new Float32Array of centers (x, y, z per splat).
     * @returns {Float32Array} Centers buffer
     */
    getCenters(): Float32Array;
    /**
     * @param {Vec3} result - The result.
     * @param {Function} [pred] - Predicate given index for skipping.
     */
    calcFocalPoint(result: Vec3, pred?: Function): void;
    /**
     * @param {Scene} scene - The application's scene.
     * @param {Mat4} worldMat - The world matrix.
     */
    renderWireframeBounds(scene: Scene, worldMat: Mat4): void;
    get isCompressed(): boolean;
    get shBands(): any;
    calcMortonOrder(): Uint32Array<ArrayBuffer>;
    reorder(order: any): void;
    reorderData(): void;
}

declare class SplatIterator {
    constructor(gsplatData: any, p: any, r: any, s: any, c: any);
    read: (i: any) => void;
}

declare class GSplatCompressedData {
    numSplats: any;
    /**
     * File header comments.
     *
     * @type { string[] }
     */
    comments: string[];
    /**
     * Contains either 12 or 18 floats per chunk:
     *      min_x, min_y, min_z,
     *      max_x, max_y, max_z,
     *      min_scale_x, min_scale_y, min_scale_z,
     *      max_scale_x, max_scale_y, max_scale_z
     *      min_r, min_g, min_b,
     *      max_r, max_g, max_b
     * @type {Float32Array}
     */
    chunkData: Float32Array;
    /**
     * Contains 4 uint32 per vertex:
     *      packed_position
     *      packed_rotation
     *      packed_scale
     *      packed_color
     * @type {Uint32Array}
     */
    vertexData: Uint32Array;
    /**
     * Contains optional quantized spherical harmonic data.
     * @type {Uint8Array}
     */
    shData0: Uint8Array;
    /**
     * Contains optional quantized spherical harmonic data.
     * @type {Uint8Array}
     */
    shData1: Uint8Array;
    /**
     * Contains optional quantized spherical harmonic data.
     * @type {Uint8Array}
     */
    shData2: Uint8Array;
    /**
     * Contains the number of bands of spherical harmonics data.
     * @type {number}
     */
    shBands: number;
    /**
     * Create an iterator for accessing splat data
     *
     * @param {Vec3|null} [p] - the vector to receive splat position
     * @param {Quat|null} [r] - the quaternion to receive splat rotation
     * @param {Vec3|null} [s] - the vector to receive splat scale
     * @param {Vec4|null} [c] - the vector to receive splat color
     * @param {Float32Array|null} [sh] - the array to receive spherical harmonics data
     * @returns {SplatCompressedIterator} - The iterator
     */
    createIter(p?: Vec3 | null, r?: Quat | null, s?: Vec3 | null, c?: Vec4 | null, sh?: Float32Array | null): SplatCompressedIterator;
    /**
     * Calculate pessimistic scene aabb taking into account splat size. This is faster than
     * calculating an exact aabb.
     *
     * @param {BoundingBox} result - Where to store the resulting bounding box.
     * @returns {boolean} - Whether the calculation was successful.
     */
    calcAabb(result: BoundingBox): boolean;
    /**
     * Returns a new Float32Array of centers (x, y, z per splat).
     * @returns {Float32Array} Centers buffer
     */
    getCenters(): Float32Array;
    getChunks(result: any): void;
    /**
     * @param {Vec3} result - The result.
     */
    calcFocalPoint(result: Vec3): void;
    get isCompressed(): boolean;
    get numChunks(): number;
    get chunkSize(): number;
    decompress(): GSplatData;
}

declare class SplatCompressedIterator {
    constructor(gsplatData: any, p: any, r: any, s: any, c: any, sh: any);
    read: (i: any) => void;
}

declare class GSplatSogData {
    static calcBands(centroidsWidth: any): any;
    meta: any;
    numSplats: any;
    means_l: any;
    means_u: any;
    quats: any;
    scales: any;
    sh0: any;
    sh_centroids: any;
    sh_labels: any;
    packedTexture: any;
    packedSh0: any;
    packedShN: any;
    /**
     * URL of the asset, used for debugging texture names.
     *
     * @type {string}
     */
    url: string;
    /**
     * Whether to use minimal memory mode (releases source textures after packing).
     *
     * @type {boolean}
     */
    minimalMemory: boolean;
    /**
     * Event handle for devicerestored listener (when minimalMemory is false).
     *
     * @type {EventHandle|null}
     */
    deviceRestoredEvent: EventHandle | null;
    /**
     * Cached centers array (x, y, z per splat), length = numSplats * 3.
     *
     * @type {Float32Array | null}
     * @private
     */
    private _centers;
    destroyed: boolean;
    /**
     * Number of spherical harmonics bands.
     *
     * @type {number}
     */
    shBands: number;
    _destroyGpuResources(): void;
    destroy(): void;
    createIter(p: any, r: any, s: any, c: any, sh: any): GSplatSogIterator;
    calcAabb(result: any): void;
    getCenters(): Float32Array<ArrayBufferLike>;
    calcFocalPoint(result: any, pred: any): void;
    get isSog(): boolean;
    decompress(): Promise<GSplatData>;
    generateCenters(): Promise<void>;
    packGpuMemory(): void;
    packShMemory(): void;
    prepareGpuData(): Promise<void>;
    reorderData(): Promise<void>;
}

declare class GSplatSogIterator {
    constructor(data: any, p: any, r: any, s: any, c: any, sh: any);
    read: (i: any) => void;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { GSplatFormat } from './gsplat-format.js'
 */
/**
 * Manages textures for a GSplatFormat, creating them from stream definitions.
 *
 * @ignore
 */
declare class GSplatStreams {
    /**
     * Creates a new GSplatStreams instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {boolean} [isInstance] - Whether this manages instance-level textures (true) or
     * resource-level textures (false). Defaults to false.
     */
    constructor(device: GraphicsDevice, isInstance?: boolean);
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * The format defining the streams.
     *
     * @type {GSplatFormat|null}
     */
    format: GSplatFormat | null;
    /**
     * Map of texture names to Texture instances.
     *
     * @type {Map<string, Texture>}
     */
    textures: Map<string, Texture>;
    /**
     * Texture dimensions (width and height).
     *
     * @type {Vec2}
     * @private
     */
    private _textureDimensions;
    /**
     * Whether this manages instance-level textures (true) or resource-level textures (false).
     *
     * @type {boolean}
     * @private
     */
    private _isInstance;
    /**
     * The format version at last sync.
     *
     * @type {number}
     * @private
     */
    private _formatVersion;
    /**
     * Gets the texture dimensions (width and height).
     *
     * @type {Vec2}
     */
    get textureDimensions(): Vec2;
    /**
     * Destroys all managed textures.
     */
    destroy(): void;
    /**
     * Initialize with format and create textures for all streams.
     *
     * @param {GSplatFormat} format - The format defining streams.
     * @param {number} numElements - Number of elements (splats) to size textures for.
     */
    init(format: GSplatFormat, numElements: number): void;
    /**
     * Gets a texture by name.
     *
     * @param {string} name - Texture name.
     * @returns {Texture|undefined} The texture, or undefined if not found.
     */
    getTexture(name: string): Texture | undefined;
    /**
     * Gets all textures in format order (streams followed by extraStreams).
     *
     * @returns {Texture[]} Array of textures in format order.
     * @ignore
     */
    getTexturesInOrder(): Texture[];
    /**
     * Synchronizes textures with the format's stream definitions.
     * Creates new textures for added streams. Textures are never destroyed here -
     * streams can only be added, not removed (see GSplatFormat._extraStreams for rationale).
     *
     * @param {GSplatFormat|null} format - The format to sync with, or null to skip.
     * @ignore
     */
    syncWithFormat(format: GSplatFormat | null): void;
    /**
     * Resizes all managed textures to the specified dimensions. This assumes all textures
     * have uniform dimensions (e.g. work buffer textures). Do not use on resources with
     * mixed-size textures (e.g. SOG with differently-sized SH textures).
     *
     * @param {number} width - The new width.
     * @param {number} height - The new height.
     */
    resize(width: number, height: number): void;
    /**
     * Creates a new texture with the specified parameters.
     *
     * @param {string} name - The name of the texture to be created.
     * @param {number} format - The pixel format of the texture.
     * @param {Vec2} size - The size of the texture in a Vec2 object, containing width (x) and height (y).
     * @param {Uint8Array|Uint16Array|Uint32Array|Float32Array} [data] - The initial data to fill the texture with.
     * @returns {Texture} The created texture instance.
     */
    createTexture(name: string, format: number, size: Vec2, data?: Uint8Array | Uint16Array | Uint32Array | Float32Array): Texture;
}

/**
 * @import { GraphicsDevice } from './graphics-device.js'
 * @import { StorageBuffer } from './storage-buffer.js'
 * @import { Texture } from './texture.js'
 * @import { EventHandle } from '../../core/event-handle.js'
 */
/**
 * Manages non-blocking uploads of data to GPU resources (textures or storage buffers).
 * Internally pools staging resources (PBOs on WebGL, staging buffers on WebGPU) to avoid blocking
 * when the GPU is busy with previous uploads.
 *
 * Important: Create one UploadStream per target resource.
 *
 * @category Graphics
 * @ignore
 */
declare class UploadStream {
    /**
     * Create a new UploadStream instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {boolean} [useSingleBuffer] - If true, uses simple direct uploads (single texture on
     * WebGL, direct write on WebGPU). If false (default), uses optimized multi-buffer strategy (PBOs
     * with orphaning on WebGL, staging buffers on WebGPU) for potentially non-blocking uploads.
     */
    constructor(device: GraphicsDevice, useSingleBuffer?: boolean);
    /**
     * Event handle for device lost event.
     *
     * @type {EventHandle|null}
     * @protected
     */
    protected _deviceLostEvent: EventHandle | null;
    device: GraphicsDevice;
    useSingleBuffer: boolean;
    impl: any;
    /**
     * Destroy the upload stream and clean up all pooled resources.
     */
    destroy(): void;
    /**
     * Upload data to a texture (WebGL path) or storage buffer (WebGPU path).
     * For WebGL textures, both offset and size must be multiples of the texture width (aligned to
     * full rows).
     * For WebGPU storage buffers, both offset and size byte values must be multiples of 4.
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload. Must contain at least
     * `size` elements.
     * @param {Texture|StorageBuffer} target - The target resource (texture for WebGL, storage
     * buffer for WebGPU).
     * @param {number} [offset] - The element offset in the target where upload starts. Defaults to 0.
     * For WebGL textures, must be a multiple of texture width. For WebGPU, the byte offset must be
     * a multiple of 4.
     * @param {number} [size] - The number of elements to upload. Defaults to data.length.
     * For WebGL textures, must be a multiple of texture width. For WebGPU, the byte size must be
     * a multiple of 4.
     */
    upload(data: Uint8Array | Uint32Array | Float32Array, target: Texture | StorageBuffer, offset?: number, size?: number): void;
    /**
     * Handles device lost event. Override in platform implementations.
     *
     * @private
     */
    private _onDeviceLost;
}

/**
 * @import { GraphicsDevice } from '../graphics/graphics-device.js'
 */
/**
 * A frame pass represents a node in the frame graph. It encapsulates a unit of work that
 * executes during frame rendering. Subclasses include {@link RenderPass} for GPU render passes
 * with render targets, and non-rendering passes for compute dispatches or other tasks.
 *
 * @ignore
 */
declare class FramePass {
    /**
     * Creates an instance of the FramePass.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device.
     */
    constructor(graphicsDevice: GraphicsDevice);
    /** @type {string} */
    _name: string;
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * True if the frame pass is enabled.
     *
     * @type {boolean}
     * @private
     */
    private _enabled;
    /**
     * True if the render pass start is skipped. This means the render pass is merged into the
     * previous one. Used by FrameGraph.compile() for pass merging.
     *
     * @type {boolean}
     * @private
     */
    private _skipStart;
    /**
     * True if the render pass end is skipped. This means the following render pass is merged into
     * this one. Used by FrameGraph.compile() for pass merging.
     *
     * @type {boolean}
     * @private
     */
    private _skipEnd;
    /**
     * True if the frame pass is enabled and execute function will be called. Note that before and
     * after functions are called regardless of this flag.
     */
    executeEnabled: boolean;
    /**
     * If true, this pass might use dynamically rendered cubemaps. Defaults to false for non-render
     * passes (RenderPass overrides to true).
     *
     * @type {boolean}
     */
    requiresCubemaps: boolean;
    /**
     * Frame passes which need to be executed before this pass.
     *
     * @type {FramePass[]}
     */
    beforePasses: FramePass[];
    /**
     * Frame passes which need to be executed after this pass.
     *
     * @type {FramePass[]}
     */
    afterPasses: FramePass[];
    set name(value: string);
    get name(): string;
    set enabled(value: boolean);
    get enabled(): boolean;
    onEnable(): void;
    onDisable(): void;
    frameUpdate(): void;
    before(): void;
    execute(): void;
    after(): void;
    destroy(): void;
    render(): void;
    log(device: any, index?: number): void;
}

/**
 * A render pass represents a node in the frame graph that renders to a render target using a GPU
 * render pass. It extends {@link FramePass} with render target management, color/depth/stencil
 * attachment operations, and GPU render pass lifecycle (start/end).
 *
 * @ignore
 */
declare class RenderPass extends FramePass {
    /**
     * The render target for this render pass:
     *
     * - `undefined`: render pass does not render to any render target
     * - `null`: render pass renders to the backbuffer
     * - Otherwise, renders to the provided RT.
     *
     * @type {RenderTarget|null|undefined}
     */
    renderTarget: RenderTarget | null | undefined;
    /**
     * The options specified when the render target was initialized.
     */
    _options: any;
    /**
     * Number of samples. 0 if no render target, otherwise number of samples from the render target,
     * or the main framebuffer if render target is null.
     *
     * @type {number}
     */
    samples: number;
    /**
     * Array of color attachment operations. The first element corresponds to the color attachment
     * 0, and so on.
     *
     * @type {Array<ColorAttachmentOps>}
     */
    colorArrayOps: Array<ColorAttachmentOps>;
    /**
     * Color attachment operations for the first color attachment.
     *
     * @type {ColorAttachmentOps}
     */
    get colorOps(): ColorAttachmentOps;
    /** @type {DepthStencilAttachmentOps} */
    depthStencilOps: DepthStencilAttachmentOps;
    /**
     * True if the render pass uses the full viewport / scissor for rendering into the render target.
     *
     * @type {boolean}
     */
    fullSizeClearRect: boolean;
    set scaleX(value: any);
    get scaleX(): any;
    set scaleY(value: any);
    get scaleY(): any;
    set options(value: any);
    get options(): any;
    /**
     * @param {RenderTarget|null} [renderTarget] - The render target to render into (output). This
     * function should be called only for render passes which use render target, or passes which
     * render directly into the default framebuffer, in which case a null or undefined render
     * target is expected.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {Texture} [options.resizeSource] - A texture to use as a source for the automatic
     * render target resize operation. If not provided, no automatic resizing takes place.
     * @param {number} [options.scaleX] - The scale factor for the render target width. Defaults to 1.
     * @param {number} [options.scaleY] - The scale factor for the render target height. Defaults to 1.
     */
    init(renderTarget?: RenderTarget | null, options?: {
        resizeSource?: Texture;
        scaleX?: number;
        scaleY?: number;
    }): void;
    allocateAttachments(): void;
    postInit(): void;
    /**
     * Mark render pass as clearing the full color buffer.
     *
     * @param {Color|undefined} color - The color to clear to, or undefined to preserve the existing
     * content.
     */
    setClearColor(color: Color | undefined): void;
    /**
     * Mark render pass as clearing the full depth buffer.
     *
     * @param {number|undefined} depthValue - The depth value to clear to, or undefined to preserve
     * the existing content.
     */
    setClearDepth(depthValue: number | undefined): void;
    /**
     * Mark render pass as clearing the full stencil buffer.
     *
     * @param {number|undefined} stencilValue - The stencil value to clear to, or undefined to
     * preserve the existing content.
     */
    setClearStencil(stencilValue: number | undefined): void;
}

/**
 * @import { RenderTarget } from '../graphics/render-target.js'
 * @import { Texture } from './texture.js'
 */
declare class ColorAttachmentOps {
    /**
     * A color used to clear the color attachment when the clear is enabled, specified in sRGB space.
     */
    clearValue: Color;
    /**
     * A color used to clear the color attachment when the clear is enabled, specified in linear
     * space.
     */
    clearValueLinear: Color;
    /**
     * True if the attachment should be cleared before rendering, false to preserve
     * the existing content.
     */
    clear: boolean;
    /**
     * True if the attachment needs to be stored after the render pass. False if it can be
     * discarded. Note: This relates to the surface that is getting rendered to, and can be either
     * single or multi-sampled. Further, if a multi-sampled surface is used, the resolve flag
     * further specifies if this gets resolved to a single-sampled surface. This behavior matches
     * the WebGPU specification.
     *
     * @type {boolean}
     */
    store: boolean;
    /**
     * True if the attachment needs to be resolved.
     *
     * @type {boolean}
     */
    resolve: boolean;
    /**
     * True if the attachment needs to have mipmaps generated.
     *
     * @type {boolean}
     */
    genMipmaps: boolean;
}
declare class DepthStencilAttachmentOps {
    /**
     * A depth value used to clear the depth attachment when the clear is enabled.
     */
    clearDepthValue: number;
    /**
     * A stencil value used to clear the stencil attachment when the clear is enabled.
     */
    clearStencilValue: number;
    /**
     * True if the depth attachment should be cleared before rendering, false to preserve
     * the existing content.
     */
    clearDepth: boolean;
    /**
     * True if the stencil attachment should be cleared before rendering, false to preserve
     * the existing content.
     */
    clearStencil: boolean;
    /**
     * True if the depth attachment needs to be stored after the render pass. False
     * if it can be discarded.
     *
     * @type {boolean}
     */
    storeDepth: boolean;
    /**
     * True if the depth attachment needs to be resolved.
     *
     * @type {boolean}
     */
    resolveDepth: boolean;
    /**
     * True if the stencil attachment needs to be stored after the render pass. False
     * if it can be discarded.
     *
     * @type {boolean}
     */
    storeStencil: boolean;
}

type GSplatOctreeNodeLod = {
    /**
     * - The file path
     */
    file: string;
    /**
     * - The file index in the octree files array
     */
    fileIndex: number;
    /**
     * - The offset in the file
     */
    offset: number;
    /**
     * - The count of items
     */
    count: number;
};
declare class GSplatOctreeNode {
    /**
     * @param {GSplatOctreeNodeLod[]} lods - The LOD data for this node
     * @param {Object} [boundData] - The bounding box data with min and max arrays
     */
    constructor(lods: GSplatOctreeNodeLod[], boundData?: any);
    /**
     * @type {GSplatOctreeNodeLod[]}
     */
    lods: GSplatOctreeNodeLod[];
    /**
     * @type {BoundingBox}
     */
    bounds: BoundingBox;
    /**
     * Precomputed bounding sphere derived from the AABB. Stored as (center.x, center.y,
     * center.z, radius) for efficient GPU frustum culling.
     *
     * @type {Vec4}
     */
    boundingSphere: Vec4;
}

/**
 * Base class for GSplat asset loaders. This provides the interface that all
 * GSplat asset loaders must implement.
 *
 * @category Asset
 * @ignore
 */
declare class GSplatAssetLoaderBase {
    /**
     * Initiates loading of a gsplat asset. This is a fire-and-forget operation that starts
     * the loading process.
     *
     * @param {string} url - The URL of the gsplat file to load.
     * @abstract
     */
    load(url: string): void;
    /**
     * Unloads an asset that was previously loaded by this loader.
     *
     * @param {string} url - The URL of the asset to unload.
     * @abstract
     */
    unload(url: string): void;
    /**
     * Gets the resource for a given URL if it has been loaded by this loader.
     *
     * @param {string} url - The URL of the asset to retrieve the resource from.
     * @returns {object|undefined} The loaded resource if found and loaded, undefined otherwise.
     * @abstract
     */
    getResource(url: string): object | undefined;
    /**
     * Destroys the loader and cleans up any resources it holds.
     *
     * @abstract
     */
    destroy(): void;
}

/**
 * @import { GSplatResource } from '../gsplat/gsplat-resource.js'
 * @import { GSplatOctreeNodeLod } from './gsplat-octree-node.js'
 * @import { GSplatAssetLoaderBase } from './gsplat-asset-loader-base.js'
 */
declare class GSplatOctree {
    /**
     * @param {string} assetFileUrl - The file URL of the container asset.
     * @param {Object} data - The parsed JSON data containing info, filenames and tree.
     */
    constructor(assetFileUrl: string, data: any);
    /**
     * @type {GSplatOctreeNode[]}
     */
    nodes: GSplatOctreeNode[];
    /**
     * @type {{ url: string, lodLevel: number }[]}
     */
    files: {
        url: string;
        lodLevel: number;
    }[];
    /**
     * @type {number}
     */
    lodLevels: number;
    /**
     * The file URL of the container asset, used as the base for resolving relative URLs.
     *
     * @type {string}
     */
    assetFileUrl: string;
    /**
     * Resources of individual files, identified by their file index.
     *
     * @type {Map<number, GSplatResource>}
     */
    fileResources: Map<number, GSplatResource>;
    /**
     * Reference counts for each file by file index. Index is fileIndex, value is reference count.
     * When a file reaches zero references, it is scheduled for cooldown and unload.
     *
     * @type {Int32Array}
     */
    fileRefCounts: Int32Array;
    /**
     * Cooldown timers for files that reached zero references. Key is fileIndex, value is ticks
     * remaining.
     *
     * @type {Map<number, number>}
     */
    cooldowns: Map<number, number>;
    /**
     * Optional environment asset URL.
     *
     * @type {string|null}
     */
    environmentUrl: string | null;
    /**
     * Loaded environment resource.
     *
     * @type {GSplatResource|null}
     */
    environmentResource: GSplatResource | null;
    /**
     * Reference count for environment usage.
     *
     * @type {number}
     */
    environmentRefCount: number;
    /**
     * Asset loader used for loading/unloading resources.
     *
     * @type {GSplatAssetLoaderBase|null}
     */
    assetLoader: GSplatAssetLoaderBase | null;
    /**
     * Whether this octree has been destroyed.
     *
     * @type {boolean}
     */
    destroyed: boolean;
    /**
     * Number of update ticks before unloading unused file resources. Set from GSplatParams.
     *
     * @type {number}
     * @private
     */
    private cooldownTicks;
    /**
     * Destroys the octree and clears internal state. Does not force-unload resources as they may
     * still be referenced by managers. Resources will be cleaned up when their reference counts
     * reach zero through the normal cleanup mechanisms.
     */
    destroy(): void;
    /**
     * Trace out per-LOD counts of currently loaded file resources.
     * @private
     */
    private _traceLodCounts;
    /**
     * Recursively extracts leaf nodes (nodes with 'lods' property) from the hierarchical tree.
     *
     * @param {Object} node - The current tree node to process.
     * @param {Array} leafNodes - Array to collect leaf nodes.
     * @private
     */
    private _extractLeafNodes;
    getFileResource(fileIndex: any): GSplatResource;
    /**
     * Increments reference count for a file by index and cancels any pending cooldown.
     *
     * @param {number} fileIndex - Index of the file in `files` array.
     */
    incRefCount(fileIndex: number): void;
    /**
     * Decrements reference count for a file by index. When it reaches zero, either unload
     * immediately (if cooldownTicks is 0) or schedule for cooldown.
     *
     * @param {number} fileIndex - Index of the file in `files` array.
     * @param {number} cooldownTicks - Number of update ticks before unloading when unused. If 0,
     * unload immediately.
     */
    decRefCount(fileIndex: number, cooldownTicks: number): void;
    /**
     * Unloads a resource for a file index if currently loaded.
     *
     * @param {number} fileIndex - Index of the file in `files` array.
     */
    unloadResource(fileIndex: number): void;
    /**
     * Advances cooldowns for zero-ref files and unloads those whose timers expired.
     *
     * @param {number} cooldownTicks - Number of ticks for new cooldowns, synced from GSplatParams.
     */
    updateCooldownTick(cooldownTicks: number): void;
    /**
     * Ensures a file resource is loaded and available. This function:
     * - Starts loading if not already started
     * - Checks if loading completed and stores the resource if available
     *
     * @param {number} fileIndex - The index of the file in the `files` array.
     */
    ensureFileResource(fileIndex: number): void;
    /**
     * Increments reference count for environment.
     */
    incEnvironmentRefCount(): void;
    /**
     * Decrements reference count for environment. When it reaches zero, immediately unload.
     */
    decEnvironmentRefCount(): void;
    /**
     * Ensures environment resource is loaded and available.
     */
    ensureEnvironmentResource(): void;
    /**
     * Unloads environment resource if currently loaded.
     */
    unloadEnvironmentResource(): void;
}

declare class GSplatOctreeResource {
    /**
     * @param {string} assetFileUrl - The file URL of the container asset.
     * @param {object} data - Parsed JSON data.
     * @param {object} assetLoader - Asset loader instance (framework-level object).
     */
    constructor(assetFileUrl: string, data: object, assetLoader: object);
    /** @type {BoundingBox} */
    aabb: BoundingBox;
    /**
     * Version counter for centers array changes. Always 0 for octree resources (static).
     *
     * @type {number}
     * @ignore
     */
    centersVersion: number;
    /** @type {GSplatOctree|null} */
    octree: GSplatOctree | null;
    /**
     * Destroys the octree resource and cleans up all associated resources.
     */
    destroy(): void;
}

/**
 * @import { BoundingBox } from '../../core/shape/bounding-box.js'
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { GraphNode } from '../graph-node.js'
 * @import { GSplatResource } from '../gsplat/gsplat-resource.js'
 * @import { GSplatResourceBase } from '../gsplat/gsplat-resource-base.js'
 * @import { GSplatOctreeResource } from './gsplat-octree.resource.js'
 * @import { ScopeId } from '../../platform/graphics/scope-id.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 * @import { Vec2 } from '../../core/math/vec2.js'
 */
/**
 * Class representing a placement of a gsplat resource.
 *
 * @ignore
 */
declare class GSplatPlacement {
    /**
     * Create a new GSplatPlacement.
     *
     * @param {GSplatResource|null} resource - The resource of the splat.
     * @param {GraphNode} node - The node that the gsplat is linked to.
     * @param {number} [lodIndex] - The LOD index for this placement.
     * @param {Map<string, {scopeId: ScopeId, data: *}>|null} [parameters] - Per-instance shader parameters.
     * @param {GSplatPlacement|null} [parentPlacement] - Parent placement for shader config delegation.
     * @param {number|null} [id] - Unique identifier for picking. If not provided, inherits from parentPlacement.
     */
    constructor(resource: GSplatResource | null, node: GraphNode, lodIndex?: number, parameters?: Map<string, {
        scopeId: ScopeId;
        data: any;
    }> | null, parentPlacement?: GSplatPlacement | null, id?: number | null);
    /**
     * The resource of the splat..
     *
     * @type {GSplatResource|GSplatOctreeResource|null}
     */
    resource: GSplatResource | GSplatOctreeResource | null;
    /**
     * The node that the gsplat is linked to.
     *
     * @type {GraphNode}
     */
    node: GraphNode;
    /**
     * Map of intervals for octree nodes using this placement.
     * Key is octree node index, value is Vec2 representing start and end index (inclusive).
     *
     * @type {Map<number, Vec2>}
     */
    intervals: Map<number, Vec2>;
    /**
     * Unique identifier for this placement. Used by the picking system and available
     * for custom shader effects.
     *
     * @type {number}
     */
    id: number;
    /**
     * Unique allocation identifier for persistent work buffer allocation tracking.
     *
     * @type {number}
     */
    allocId: number;
    /**
     * The LOD index for this placement.
     *
     * @type {number}
     */
    lodIndex: number;
    /**
     * Base distance for the first LOD transition (LOD 0 to LOD 1).
     *
     * @type {number}
     * @private
     */
    private _lodBaseDistance;
    /**
     * Geometric multiplier between successive LOD distance thresholds.
     * Distance for LOD level i is: lodBaseDistance * lodMultiplier^i.
     *
     * @type {number}
     * @private
     */
    private _lodMultiplier;
    /**
     * @type {number}
     */
    set lodBaseDistance(value: number);
    get lodBaseDistance(): number;
    /**
     * Flag indicating LOD parameters have changed and LOD needs re-evaluation.
     *
     * @type {boolean}
     */
    lodDirty: boolean;
    /**
     * @type {number}
     */
    set lodMultiplier(value: number);
    get lodMultiplier(): number;
    /**
     * The axis-aligned bounding box for this placement, in local space.
     * Null means use resource.aabb as fallback.
     *
     * @type {BoundingBox|null}
     */
    _aabb: BoundingBox | null;
    /**
     * Per-instance shader parameters. Reference to the component's parameters Map.
     *
     * @type {Map<string, {scopeId: ScopeId, data: *}>|null}
     */
    parameters: Map<string, {
        scopeId: ScopeId;
        data: any;
    }> | null;
    /**
     * Optional streams for instance-level textures.
     *
     * @type {GSplatStreams|null}
     * @private
     */
    private _streams;
    /**
     * Flag indicating the splat needs to be re-rendered to work buffer.
     *
     * @type {boolean}
     */
    renderDirty: boolean;
    /**
     * Work buffer update mode.
     *
     * @type {number}
     */
    workBufferUpdate: number;
    /**
     * Last seen format version for auto-detecting format changes.
     *
     * @type {number}
     * @private
     */
    private _lastFormatVersion;
    /**
     * Custom work buffer modifier code for this placement (object with code and pre-computed hash).
     *
     * @type {{ code: string, hash: number }|null}
     * @private
     */
    private _workBufferModifier;
    /**
     * Parent placement. Used by octree file placements to inherit workBufferModifier and
     * parameters from the component's placement.
     *
     * @type {GSplatPlacement|null}
     * @ignore
     */
    parentPlacement: GSplatPlacement | null;
    /**
     * Destroys this placement and releases all resources.
     */
    destroy(): void;
    /**
     * Sets the work buffer modifier for this placement. Triggers work buffer re-render.
     * Must provide all three functions: modifySplatCenter, modifySplatRotationScale, modifySplatColor.
     *
     * @type {{ code: string, hash: number }|null}
     */
    set workBufferModifier(value: {
        code: string;
        hash: number;
    } | null);
    /**
     * Gets the work buffer modifier for this placement.
     * Delegates to parent placement if available (for octree file placements).
     *
     * @type {{ code: string, hash: number }|null}
     */
    get workBufferModifier(): {
        code: string;
        hash: number;
    } | null;
    /**
     * Returns and clears the render dirty flag. Also checks for format version changes
     * and handles render mode.
     *
     * @returns {boolean} True if the splat needed re-rendering.
     */
    consumeRenderDirty(): boolean;
    /**
     * Sets a custom AABB for this placement. Pass null to use resource.aabb as fallback.
     *
     * @param {BoundingBox|null} aabb - The bounding box to set, or null to clear.
     */
    set aabb(aabb: BoundingBox | null);
    /**
     * Gets the AABB for this placement. Returns custom AABB if set, otherwise resource.aabb.
     *
     * @returns {BoundingBox} The bounding box.
     */
    get aabb(): BoundingBox;
    /**
     * Computes the LOD distance threshold for a given level using the geometric progression.
     *
     * @param {number} level - The LOD level index.
     * @returns {number} The distance threshold for the given LOD level.
     */
    getLodDistance(level: number): number;
    /**
     * Gets an instance-level texture by name. Creates the streams container on first access
     * if the format has instance streams defined.
     *
     * @param {string} name - The name of the texture to get.
     * @param {GraphicsDevice} device - The graphics device (required for lazy initialization).
     * @returns {Texture|undefined} The texture, or undefined if not found.
     */
    getInstanceTexture(name: string, device: GraphicsDevice): Texture | undefined;
    /**
     * Gets the instance streams container, or null if not initialized.
     * Delegates to parent placement if available (for octree file placements).
     *
     * @type {GSplatStreams|null}
     * @ignore
     */
    get streams(): GSplatStreams | null;
    /**
     * Ensures instance streams container exists if format has instance streams.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @ignore
     */
    ensureInstanceStreams(device: GraphicsDevice): void;
}

declare class GSplatOctreeInstance {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatOctree} octree - The octree.
     * @param {GSplatPlacement} placement - The placement.
     */
    constructor(device: GraphicsDevice, octree: GSplatOctree, placement: GSplatPlacement);
    /** @type {GSplatOctree} */
    octree: GSplatOctree;
    /** @type {GSplatPlacement} */
    placement: GSplatPlacement;
    /** @type {Set<GSplatPlacement>} */
    activePlacements: Set<GSplatPlacement>;
    /** @type {boolean} */
    dirtyModifiedPlacements: boolean;
    /**
     * Set to true when placements are added or removed, signaling that the manager needs to
     * create a new world state and trigger a full work buffer rebuild.
     *
     * @type {boolean}
     */
    dirtyPlacementSetChanged: boolean;
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /**
     * Array of NodeInfo instances, one per octree node.
     * @type {NodeInfo[]}
     */
    nodeInfos: NodeInfo[];
    /**
     * Array of current placements per file. Index is fileIndex, value is GSplatPlacement or null.
     * Value null indicates file is not used / no placement.
     * @type {(GSplatPlacement|null)[]}
     */
    filePlacements: (GSplatPlacement | null)[];
    /**
     * Set of pending file loads (file indices).
     * @type {Set<number>}
     */
    pending: Set<number>;
    /**
     * Map of nodeIndex -> { oldFileIndex, newFileIndex } that needs to be decremented when the
     * new LOD resource loads. This ensures we decrement even if the node switches LOD again
     * before the new resource arrives.
     *
     * @type {Map<number, { oldFileIndex: number, newFileIndex: number }>}
     */
    pendingDecrements: Map<number, {
        oldFileIndex: number;
        newFileIndex: number;
    }>;
    /**
     * Files that became unused by this instance this update. Each entry represents a single decRef.
     *
     * @type {Set<number>}
     */
    removedCandidates: Set<number>;
    /**
     * Minimum allowed LOD index for this instance, clamped to valid octree bounds.
     *
     * @type {number}
     */
    rangeMin: number;
    /**
     * Maximum allowed LOD index for this instance, clamped to valid octree bounds.
     *
     * @type {number}
     */
    rangeMax: number;
    /**
     * Previous node position at which LOD was last updated. This is used to determine if LOD needs
     * to be updated as the octree splat moves.
     *
     * @type {Vec3}
     */
    previousPosition: Vec3;
    /**
     * Set when a resource has completed loading and LOD should be re-evaluated.
     *
     * @type {boolean}
     */
    needsLodUpdate: boolean;
    /**
     * Tracks prefetched file indices that are being loaded without active placements.
     * When any completes, we trigger LOD re-evaluation to allow promotion.
     *
     * @type {Set<number>}
     */
    prefetchPending: Set<number>;
    /**
     * Tracks invisible->visible pending adds per node: nodeIndex -> fileIndex.
     * Ensures only a single pending placement exists for a node while it's not yet displayed.
     * @type {Map<number, number>}
     */
    pendingVisibleAdds: Map<number, number>;
    /**
     * Returns the count of resources pending load or prefetch, including environment if loading.
     *
     * @type {number}
     */
    get pendingLoadCount(): number;
    /**
     * Environment placement.
     * @type {GSplatPlacement|null}
     */
    environmentPlacement: GSplatPlacement | null;
    /**
     * Event handle for device lost event.
     *
     * @type {EventHandle|null}
     * @private
     */
    private _deviceLostEvent;
    /**
     * Destroys this octree instance and clears internal references.
     *
     * @param {boolean} [skipRefCounting] - When true, skip decrementing file ref counts
     * on the octree. Used when the caller handles ref counting externally via pendingReleases
     * (e.g. during world state updates where decrements must be deferred).
     */
    destroy(skipRefCounting?: boolean): void;
    /**
     * Handles device lost event by releasing all loaded resources.
     *
     * @private
     */
    private _onDeviceLost;
    /**
     * Returns the file indices currently referenced by this instance that should be decremented
     * when the instance is destroyed.
     *
     * @returns {number[]} Array of file indices to decRef.
     */
    getFileDecrements(): number[];
    /**
     * Selects desired LOD index for a node using the underfill strategy. When underfill is enabled,
     * it prefers already-loaded LODs within [optimalLodIndex .. optimalLodIndex + lodUnderfillLimit].
     * If none are loaded, it selects the coarsest available LOD within the range.
     *
     * @param {import('./gsplat-octree-node.js').GSplatOctreeNode} node - The octree node.
     * @param {number} optimalLodIndex - Optimal LOD index based on camera/distance.
     * @param {number} maxLod - Maximum LOD index.
     * @param {number} lodUnderfillLimit - Allowed coarse range above optimal.
     * @returns {number} Desired LOD index to display.
     */
    selectDesiredLodIndex(node: GSplatOctreeNode, optimalLodIndex: number, maxLod: number, lodUnderfillLimit: number): number;
    /**
     * Prefetch only the next-better LOD toward optimal. This stages loading in steps across all
     * nodes, avoiding intermixing requests before coarse is present.
     *
     * @param {import('./gsplat-octree-node.js').GSplatOctreeNode} node - The octree node.
     * @param {number} desiredLodIndex - Currently selected LOD for display (may be coarser than optimal).
     * @param {number} optimalLodIndex - Target optimal LOD.
     */
    prefetchNextLod(node: GSplatOctreeNode, desiredLodIndex: number, optimalLodIndex: number): void;
    /**
     * Updates the octree instance when LOD needs to be updated.
     *
     * @param {GraphNode} cameraNode - The camera node.
     * @param {import('./gsplat-params.js').GSplatParams} params - Global gsplat parameters.
     */
    updateLod(cameraNode: GraphNode, params: GSplatParams): void;
    /**
     * Evaluates optimal LOD indices for all nodes based on camera position and parameters.
     * This is Pass 1 of the LOD update process. Results are stored in nodeInfos array.
     *
     * Uses geometric LOD distances (lodBaseDistance * lodMultiplier^i) with FOV compensation
     * so that LOD transitions are perceptually uniform under perspective projection.
     *
     * @param {GraphNode} cameraNode - The camera node.
     * @param {number} maxLod - Maximum LOD index (lodLevels - 1).
     * @param {number} lodBaseDistance - Base distance for first LOD transition.
     * @param {number} lodMultiplier - Geometric ratio between successive LOD thresholds.
     * @param {number} rangeMin - Minimum allowed LOD index.
     * @param {number} rangeMax - Maximum allowed LOD index.
     * @param {import('./gsplat-params.js').GSplatParams} params - Global gsplat parameters.
     * @param {number} uniformScale - Uniform scale of the octree transform for world-space conversion.
     * @returns {number} Total number of splats that would be used by optimal LODs.
     * @private
     */
    private evaluateNodeLods;
    /**
     * Evaluates optimal LOD for all nodes without applying changes.
     * Called by GSplatManager during phased global budget enforcement.
     *
     * @param {GraphNode} cameraNode - The camera node.
     * @param {import('./gsplat-params.js').GSplatParams} params - Global gsplat parameters.
     * @param {number} [budgetScale] - Dynamic scale applied to LOD parameters to shift
     * boundaries closer to the budget target. Applied to lodBaseDistance directly, and
     * gently to lodMultiplier via pow(budgetScale, -0.2). Defaults to 1.
     * @returns {number} Total optimal splat count.
     */
    evaluateOptimalLods(cameraNode: GraphNode, params: GSplatParams, budgetScale?: number): number;
    /**
     * Applies calculated LOD changes and manages file placements.
     * This is Pass 2 of the LOD update process. Reads from nodeInfos array populated by evaluateNodeLods().
     *
     * @param {number} maxLod - Maximum LOD index (lodLevels - 1).
     * @param {import('./gsplat-params.js').GSplatParams} params - Global gsplat parameters.
     */
    applyLodChanges(maxLod: number, params: GSplatParams): void;
    /**
     * Increments reference count for a file and creates placement immediately.
     *
     * @param {number} fileIndex - The file index.
     * @param {number} nodeIndex - The octree node index.
     * @param {number} lodIndex - The LOD index for this node.
     */
    incrementFileRef(fileIndex: number, nodeIndex: number, lodIndex: number): void;
    /**
     * Decrements reference count for a file and removes placement if needed.
     *
     * @param {number} fileIndex - The file index.
     * @param {number} nodeIndex - The octree node index.
     */
    decrementFileRef(fileIndex: number, nodeIndex: number): void;
    /**
     * Updates existing placement with loaded resource and adds to manager.
     *
     * @param {number} fileIndex - The file index.
     * @returns {boolean} True if placement was updated and added to manager, false otherwise.
     */
    addFilePlacement(fileIndex: number): boolean;
    /**
     * Tests if the octree instance has moved by more than the provided LOD update distance.
     *
     * @param {number} threshold - Distance threshold to trigger an update.
     * @returns {boolean} True if the octree instance has moved by more than the threshold, false otherwise.
     */
    testMoved(threshold: number): boolean;
    /**
     * Updates the previous position of the octree instance.
     */
    updateMoved(): void;
    /**
     * Updates the octree instance each frame.
     *
     * @returns {boolean} True if octree instance is dirty, false otherwise.
     */
    update(): boolean;
    /**
     * Consumes and returns whether the active placement set membership changed (add/remove).
     *
     * @returns {boolean} True if placements were added or removed since last call.
     */
    consumePlacementSetChanged(): boolean;
    debugRender(scene: any): void;
    /**
     * Returns true if this instance requests LOD re-evaluation and resets the flag.
     * @returns {boolean} True if LOD should be re-evaluated.
     */
    consumeNeedsLodUpdate(): boolean;
    /**
     * Polls prefetched file indices for completion and updates state.
     */
    pollPrefetchCompletions(): void;
}
/**
 * Stores LOD state for a single octree node.
 *
 * @ignore
 */
declare class NodeInfo {
    /**
     * Current LOD index being rendered. -1 indicates node is not visible.
     * @type {number}
     */
    currentLod: number;
    /**
     * Optimal LOD index based on distance/visibility (before underfill).
     * @type {number}
     */
    optimalLod: number;
    /**
     * World-space distance from camera to this node.
     * Used for non-linear bucket mapping in budget enforcement.
     * @type {number}
     */
    worldDistance: number;
    /**
     * Accumulated camera translation for SH color update threshold tracking.
     * @type {number}
     */
    colorAccumulatedTranslation: number;
    /**
     * Back-reference to owning GSplatOctreeInstance.
     * @type {GSplatOctreeInstance|null}
     */
    inst: GSplatOctreeInstance | null;
    /**
     * Cached reference to this node's LOD array for fast budget balancing.
     * @type {Array|null}
     */
    lods: any[] | null;
    /**
     * Unique allocation identifier for persistent work buffer allocation tracking.
     * @type {number}
     */
    allocId: number;
    /**
     * Resets all LOD values to -1 (invisible/uninitialized).
     */
    resetLod(): void;
}

/**
 * Represents a snapshot of gsplat state for rendering. This class captures all necessary data
 * at a point in time and should not hold references back to the source placement. All required
 * data should be copied or referenced, allowing placement to be modified without affecting the info.
 *
 * @ignore
 */
declare class GSplatInfo {
    /**
     * Create a new GSplatInfo.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatResourceBase} resource - The splat resource.
     * @param {GSplatPlacement} placement - The placement of the splat.
     * @param {Function|null} [consumeRenderDirty] - Callback to consume render dirty flag.
     * @param {GSplatOctreeNode[]|null} [octreeNodes] - Octree nodes for bounds lookup.
     * @param {NodeInfo[]|null} [nodeInfos] - Per-node info array from octree instance.
     */
    constructor(device: GraphicsDevice, resource: GSplatResourceBase, placement: GSplatPlacement, consumeRenderDirty?: Function | null, octreeNodes?: GSplatOctreeNode[] | null, nodeInfos?: NodeInfo[] | null);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {GSplatResourceBase} */
    resource: GSplatResourceBase;
    /** @type {GraphNode} */
    node: GraphNode;
    /** @type {number} */
    lodIndex: number;
    /**
     * Unique identifier from the placement, used for picking.
     *
     * @type {number}
     */
    placementId: number;
    /**
     * Unique allocation identifier for persistent work buffer allocation tracking.
     * Copied from the source placement.
     *
     * @type {number}
     */
    allocId: number;
    /**
     * Identifies the bounds group this splat belongs to. All file placements from the same
     * octree instance share the parent placement's allocId. Non-octree placements use their
     * own allocId. Used to deduplicate bounds and transform texture entries.
     *
     * @type {number}
     */
    parentPlacementId: number;
    /** @type {number} */
    numSplats: number;
    /** @type {number} */
    activeSplats: number;
    /**
     * Array of intervals for remapping of indices, each two consecutive numbers represent
     * start and end of a range of splats.
     *
     * @type {number[]}
     */
    intervals: number[];
    /**
     * Per-interval pixel offsets in the work buffer. For non-octree splats this has one entry.
     * For octree splats each entry corresponds to one interval in this.intervals.
     *
     * @type {number[]}
     */
    intervalOffsets: number[];
    /**
     * Per-interval allocation IDs for persistent tracking. Parallel to intervals: for octree
     * splats each entry is the NodeInfo.allocId for that interval's node; for non-octree
     * splats this has one entry equal to this.allocId.
     *
     * @type {number[]}
     */
    intervalAllocIds: number[];
    /**
     * Per-interval octree node indices. Parallel to intervals: for octree splats each entry
     * is the nodeIndex for that interval. Empty for non-octree splats.
     *
     * @type {number[]}
     */
    intervalNodeIndices: number[];
    /** @type {Mat4} */
    previousWorldTransform: Mat4;
    /** @type {BoundingBox} */
    aabb: BoundingBox;
    /**
     * Small RGBA32U texture storing per-sub-draw data for instanced interval rendering.
     * Each texel: R = rowStart | (numRows << 16), G = colStart, B = colEnd, A = sourceBase.
     * Created lazily by {@link ensureSubDrawTexture} when needed for rendering.
     *
     * @type {Texture|null}
     */
    subDrawTexture: Texture | null;
    /**
     * Number of sub-draw instances for instanced interval rendering.
     *
     * @type {number}
     */
    subDrawCount: number;
    /**
     * Number of bounding sphere entries this GSplatInfo contributes to the shared bounds texture.
     *
     * @type {number}
     */
    numBoundsEntries: number;
    /**
     * Base index into the shared bounds sphere texture for this GSplatInfo's entries.
     *
     * @type {number}
     */
    boundsBaseIndex: number;
    /**
     * Octree nodes array reference for writing bounding sphere data. Set when the GSplatInfo
     * is created from an octree placement.
     *
     * @type {GSplatOctreeNode[]|null}
     */
    octreeNodes: GSplatOctreeNode[] | null;
    /**
     * Per-node info array from the octree instance, providing allocId for each node.
     * Indexed by nodeIndex. Null for non-octree splats.
     *
     * @type {NodeInfo[]|null}
     */
    nodeInfos: NodeInfo[] | null;
    /** @type {number} */
    colorAccumulatedTranslation: number;
    /**
     * Per-instance shader parameters. Reference to the component's parameters Map.
     *
     * @type {Map<string, {scopeId: ScopeId, data: *}>|null}
     */
    parameters: Map<string, {
        scopeId: ScopeId;
        data: any;
    }> | null;
    /**
     * Function to get current work buffer modifier from source placement.
     * Retrieved live (not snapshotted) to ensure shader configuration stays current.
     *
     * @type {(() => ({ code: string, hash: number }|null))|null}
     */
    getWorkBufferModifier: (() => ({
        code: string;
        hash: number;
    } | null)) | null;
    /**
     * Function to get current instance streams from source placement.
     * Retrieved live (not snapshotted) to ensure streams are available after lazy creation.
     *
     * @type {(() => GSplatStreams|null)|null}
     */
    getInstanceStreams: (() => GSplatStreams | null) | null;
    /**
     * Callback to consume render dirty flag from the source placement.
     *
     * @type {Function|null}
     * @private
     */
    private _consumeRenderDirty;
    destroy(): void;
    /**
     * Sets per-interval pixel offsets for this splat. Sub-draw computation and GPU texture
     * creation are deferred to {@link ensureSubDrawTexture} to avoid work for splats that
     * may never be rendered (e.g. intermediate world states or unchanged splats).
     *
     * @param {number[]} intervalOffsets - Per-interval pixel offsets in the work buffer.
     */
    setLayout(intervalOffsets: number[]): void;
    /**
     * Ensures the sub-draw texture exists, computing sub-draw data and creating the GPU texture
     * on first call. Must be called outside a render pass (e.g. in the render pass update method)
     * since WebGPU does not allow texture creation inside a render pass.
     *
     * @param {number} textureWidth - The work buffer texture width.
     */
    ensureSubDrawTexture(textureWidth: number): void;
    /**
     * Updates the flattened intervals array from placement intervals. Intervals are sorted and
     * stored as half-open pairs [start, end). Called once from the constructor; sub-draw data
     * is built later in setLayout when the work buffer texture width is known.
     *
     * @param {Map<number, Vec2>} intervals - Map of node index to inclusive [x, y] intervals.
     */
    updateIntervals(intervals: Map<number, Vec2>): void;
    /**
     * Splits an interval at row boundaries into sub-draws (partial first row, full middle rows,
     * partial last row) and appends them to the sub-draw data array.
     *
     * @param {Uint32Array} subDrawData - The output array to append sub-draw entries to.
     * @param {number} subDrawCount - Current number of sub-draws already in the array.
     * @param {number} sourceBase - Source splat index for this interval.
     * @param {number} size - Number of splats in this interval.
     * @param {number} targetOffset - Pixel offset in the work buffer texture.
     * @param {number} textureWidth - Width of the work buffer texture.
     * @returns {number} Updated sub-draw count.
     */
    appendSubDraws(subDrawData: Uint32Array, subDrawCount: number, sourceBase: number, size: number, targetOffset: number, textureWidth: number): number;
    /**
     * Builds the sub-draw data texture from the current intervals (or a synthetic full-range
     * interval when none exist). Each interval is split at row boundaries of the work buffer
     * texture to produce axis-aligned rectangles stored as a small RGBA32U texture.
     *
     * @param {number} textureWidth - The work buffer texture width.
     */
    updateSubDraws(textureWidth: number): void;
    update(): any;
    /**
     * Writes bounding sphere data for this GSplatInfo into a shared Float32Array.
     * For octree resources, writes spheres for ALL nodes (indexed by nodeIndex) to keep
     * boundsBaseIndex stable across LOD changes.
     * For non-octree resources, computes a single sphere from the resource AABB.
     *
     * @param {Float32Array} data - The shared bounds sphere data array.
     * @param {number} offset - The float offset to start writing at.
     */
    writeBoundsSpheres(data: Float32Array, offset: number): void;
    get hasSphericalHarmonics(): boolean;
}

/**
 * A render pass used to render multiple gsplats to a work buffer render target.
 *
 * @ignore
 */
declare class GSplatWorkBufferRenderPass extends RenderPass {
    constructor(device: any, workBuffer: any, colorOnly?: boolean);
    /**
     * Array of GSplatInfo objects to render in this pass.
     *
     * @type {GSplatInfo[]}
     */
    splats: GSplatInfo[];
    /** @type {number[][]|undefined} */
    colorsByLod: number[][] | undefined;
    /**
     * The camera node used for rendering.
     *
     * @type {GraphNode}
     */
    cameraNode: GraphNode;
    /** @type {GSplatWorkBuffer} */
    workBuffer: GSplatWorkBuffer;
    /** @type {boolean} */
    colorOnly: boolean;
    /** @type {Float32Array} */
    _modelScaleData: Float32Array;
    /** @type {Float32Array} */
    _modelRotationData: Float32Array;
    /** @type {Int32Array} */
    _textureSize: Int32Array;
    /**
     * Shared grow-only texture holding packed sub-draw data for all partial renders in a frame.
     *
     * @type {Texture}
     */
    _subDrawTexture: Texture;
    /**
     * Flat array of interleaved [baseOffset, count] pairs, parallel to this.splats.
     * For splat at index i: _partialData[i*2] = base offset into _subDrawTexture,
     * _partialData[i*2+1] = sub-draw count (0 means use splat's own sub-draws).
     *
     * @type {number[]}
     */
    _partialData: number[];
    /**
     * Initialize the render pass with the specified render target.
     *
     * @param {RenderTarget} renderTarget - The target to render to.
     */
    init(renderTarget: RenderTarget): void;
    /**
     * Update the render pass with splats to render and camera.
     *
     * @param {GSplatInfo[]} splats - Array of GSplatInfo objects to render.
     * @param {GraphNode} cameraNode - The camera node for rendering.
     * @param {number[][]|undefined} colorsByLod - Optional array of RGB colors per LOD index.
     * @param {Set<number>|null} [changedAllocIds] - Set of changed allocIds for partial render.
     * @returns {boolean} True if there are splats to render, false otherwise.
     */
    update(splats: GSplatInfo[], cameraNode: GraphNode, colorsByLod: number[][] | undefined, changedAllocIds?: Set<number> | null): boolean;
    /**
     * Render a single splat info object. Optionally renders only a subset of sub-draws
     * using an override texture and count (for partial work buffer updates).
     *
     * @param {GSplatInfo} splatInfo - The splat info to render.
     * @param {Texture} [overrideSubDrawTexture] - Override sub-draw texture for partial renders.
     * @param {number} [overrideSubDrawCount] - Override sub-draw count for partial renders.
     * @param {number} [subDrawBase] - Base offset into the sub-draw texture.
     */
    renderSplat(splatInfo: GSplatInfo, overrideSubDrawTexture?: Texture, overrideSubDrawCount?: number, subDrawBase?: number): void;
}

/**
 * Frustum culling data for GSplat octree nodes. Manages bounding-sphere and
 * transform storage buffers and computes frustum planes from camera matrices.
 * The actual culling test is performed inline by the interval compaction compute shader.
 *
 * @ignore
 */
declare class GSplatFrustumCuller {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     */
    constructor(device: GraphicsDevice);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /**
     * Storage buffer holding interleaved BoundsEntry structs (center.xyz, radius,
     * transformIndex, pad x3). 32 bytes per entry.
     *
     * @type {StorageBuffer|null}
     */
    boundsBuffer: StorageBuffer | null;
    /**
     * Total number of bounds entries across all GSplatInfos.
     *
     * @type {number}
     */
    totalBoundsEntries: number;
    /** @type {number} */
    _allocatedBoundsEntries: number;
    /** @type {Float32Array|null} */
    _boundsFloatView: Float32Array | null;
    /** @type {Uint32Array|null} */
    _boundsUintView: Uint32Array | null;
    /** @type {Float32Array|null} */
    _tmpSpheres: Float32Array | null;
    /**
     * Storage buffer holding world matrices as vec4f triplets (3 vec4f per matrix,
     * rows of a 4x3 affine matrix). 48 bytes per matrix.
     *
     * @type {StorageBuffer|null}
     */
    transformsBuffer: StorageBuffer | null;
    /** @type {number} */
    _allocatedTransformCount: number;
    /** @type {Float32Array|null} */
    _transformsData: Float32Array | null;
    /**
     * Packed frustum planes (6 planes x 4 floats: nx, ny, nz, distance).
     * Updated by {@link computeFrustumPlanes} and consumed by the interval cull shader.
     *
     * @type {Float32Array}
     */
    frustumPlanes: Float32Array;
    /**
     * Camera world position for fisheye cone culling (xyz).
     *
     * @type {Float32Array}
     */
    fisheyeCameraPos: Float32Array;
    /**
     * Camera forward direction (normalized) for fisheye cone culling (xyz).
     *
     * @type {Float32Array}
     */
    fisheyeCameraForward: Float32Array;
    /**
     * Maximum visible angle from forward direction for fisheye cone culling.
     *
     * @type {number}
     */
    fisheyeMaxTheta: number;
    destroy(): void;
    /**
     * Updates the bounds buffer with local-space bounding spheres and transform
     * indices from pre-built bounds groups.
     *
     * @param {Array<{splat: GSplatInfo, boundsBaseIndex: number, numBoundsEntries: number}>} boundsGroups - Pre-built bounds groups.
     */
    updateBoundsData(boundsGroups: Array<{
        splat: GSplatInfo;
        boundsBaseIndex: number;
        numBoundsEntries: number;
    }>): void;
    /**
     * Updates the transforms buffer with one world matrix per bounds group.
     * Each matrix is stored as 3 vec4f (rows of a 4x3 affine matrix).
     *
     * @param {Array<{splat: GSplatInfo, boundsBaseIndex: number, numBoundsEntries: number}>} boundsGroups - Pre-built bounds groups.
     */
    updateTransformsData(boundsGroups: Array<{
        splat: GSplatInfo;
        boundsBaseIndex: number;
        numBoundsEntries: number;
    }>): void;
    /**
     * Computes frustum planes from camera matrices and stores them in
     * {@link frustumPlanes} for use by the interval cull compute shader.
     *
     * @param {Mat4} projectionMatrix - The camera projection matrix.
     * @param {Mat4} viewMatrix - The camera view matrix.
     */
    computeFrustumPlanes(projectionMatrix: Mat4, viewMatrix: Mat4): void;
    /**
     * Sets fisheye cone culling data for the interval cull shader.
     *
     * @param {import('../../core/math/vec3.js').Vec3} cameraPos - Camera world position.
     * @param {import('../../core/math/vec3.js').Vec3} cameraForward - Camera forward direction (normalized).
     * @param {number} maxTheta - Maximum visible angle from forward direction in radians.
     */
    setFisheyeData(cameraPos: Vec3, cameraForward: Vec3, maxTheta: number): void;
}

/**
 * An object that renders a quad using a {@link Shader}.
 *
 * Note: QuadRender does not modify render states. Before calling {@link QuadRender#render},
 * you should set up the required states using {@link GraphicsDevice#setDrawStates}, or the
 * individual setters ({@link GraphicsDevice#setBlendState}, {@link GraphicsDevice#setCullMode},
 * {@link GraphicsDevice#setFrontFace}, {@link GraphicsDevice#setDepthState},
 * {@link GraphicsDevice#setStencilState}). Otherwise previously set states will be used.
 *
 * Example:
 *
 * ```javascript
 * const shader = pc.ShaderUtils.createShader(app.graphicsDevice, {
 *     uniqueName: 'MyShader',
 *     attributes: { aPosition: SEMANTIC_POSITION },
 *     vertexGLSL: '// vertex shader code',
 *     fragmentGLSL: '// fragment shader code'
 * });
 * const quad = new QuadRender(shader);
 *
 * // Set up render states before rendering (defaults are suitable for full-screen quads)
 * app.graphicsDevice.setDrawStates();
 *
 * quad.render();
 * quad.destroy();
 * ```
 *
 * @category Graphics
 */
declare class QuadRender {
    /**
     * Create a new QuadRender instance.
     *
     * @param {Shader} shader - The shader to be used to render the quad.
     */
    constructor(shader: Shader);
    /**
     * @type {UniformBuffer}
     * @ignore
     */
    uniformBuffer: UniformBuffer;
    /**
     * @type {BindGroup}
     * @ignore
     */
    bindGroup: BindGroup;
    shader: Shader;
    /**
     * Destroys the resources associated with this instance.
     */
    destroy(): void;
    /**
     * Renders the quad. If the viewport is provided, the original viewport and scissor is restored
     * after the rendering.
     *
     * @param {Vec4} [viewport] - The viewport rectangle of the quad, in pixels. The viewport is
     * not changed if not provided.
     * @param {Vec4} [scissor] - The scissor rectangle of the quad, in pixels. Used only if the
     * viewport is provided.
     * @param {number} [numInstances] - Number of instances to draw. When provided, renders
     * multiple quads using instanced drawing. Each instance can use the instance index
     * (`gl_InstanceID` in GLSL, `pcInstanceIndex` in WGSL) to fetch per-quad data from
     * a texture or buffer, allowing each quad to be parameterized independently.
     */
    render(viewport?: Vec4, scissor?: Vec4, numInstances?: number): void;
}

/**
 * @ignore
 */
declare class GSplatWorkBuffer {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatFormat} format - The work buffer format descriptor.
     */
    constructor(device: GraphicsDevice, format: GSplatFormat);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {GSplatFormat} */
    format: GSplatFormat;
    /** @type {number} */
    id: number;
    /**
     * Manages textures for format streams.
     *
     * @type {GSplatStreams}
     */
    streams: GSplatStreams;
    /**
     * Main MRT render target for all work buffer streams.
     *
     * @type {RenderTarget}
     */
    renderTarget: RenderTarget;
    /**
     * Color-only render target for updating just the dataColor stream.
     *
     * @type {RenderTarget}
     */
    colorRenderTarget: RenderTarget;
    /** @type {Texture|undefined} */
    orderTexture: Texture | undefined;
    /** @type {StorageBuffer|undefined} */
    orderBuffer: StorageBuffer | undefined;
    /** @type {UploadStream} */
    uploadStream: UploadStream;
    /** @type {GSplatWorkBufferRenderPass} */
    renderPass: GSplatWorkBufferRenderPass;
    /** @type {GSplatWorkBufferRenderPass} */
    colorRenderPass: GSplatWorkBufferRenderPass;
    /**
     * GPU frustum culler for octree node visibility.
     *
     * @type {GSplatFrustumCuller}
     */
    frustumCuller: GSplatFrustumCuller;
    /**
     * Creates or recreates render targets from current textures.
     *
     * @private
     */
    private _createRenderTargets;
    /**
     * Syncs textures and render targets with the format when extra streams are added.
     * Call this before rendering to ensure all streams have textures.
     */
    syncWithFormat(): void;
    /**
     * Gets a texture by name.
     *
     * @param {string} name - The texture name.
     * @returns {Texture|undefined} The texture, or undefined if not found.
     */
    getTexture(name: string): Texture | undefined;
    destroy(): void;
    get textureSize(): number;
    setOrderData(data: any): void;
    /**
     * @param {number} textureSize - The texture size to resize to.
     */
    resize(textureSize: number): void;
    /**
     * Render given splats to the work buffer.
     *
     * @param {GSplatInfo[]} splats - The splats to render.
     * @param {GraphNode} cameraNode - The camera node.
     * @param {number[][]|undefined} colorsByLod - Array of RGB colors per LOD. Index by lodIndex; if a
     * shorter array is provided, index 0 will be reused as fallback.
     * @param {Set<number>|null} [changedAllocIds] - When provided, only render sub-draws for intervals
     * whose allocIds are in this set (per-node partial update).
     */
    render(splats: GSplatInfo[], cameraNode: GraphNode, colorsByLod: number[][] | undefined, changedAllocIds?: Set<number> | null): void;
    /**
     * Render only the color data to the work buffer (not geometry/covariance).
     *
     * @param {GSplatInfo[]} splats - The splats to render.
     * @param {GraphNode} cameraNode - The camera node.
     * @param {number[][]|undefined} colorsByLod - Array of RGB colors per LOD. Index by lodIndex; if a
     * shorter array is provided, index 0 will be reused as fallback.
     * @param {Set<number>|null} [changedAllocIds] - Set of changed allocIds for partial render.
     */
    renderColor(splats: GSplatInfo[], cameraNode: GraphNode, colorsByLod: number[][] | undefined, changedAllocIds?: Set<number> | null): void;
}
/**
 * @import { GSplatFormat } from '../gsplat/gsplat-format.js'
 * @import { GSplatInfo } from "./gsplat-info.js"
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { GraphNode } from '../graph-node.js';
 * @import { ShaderMaterial } from '../materials/shader-material.js'
 */
/**
 * A helper class to cache quad renders for work buffer rendering.
 *
 * @ignore
 */
declare class WorkBufferRenderInfo {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {string} key - Cache key for this render info.
     * @param {ShaderMaterial} material - The material to use.
     * @param {boolean} colorOnly - Whether to render only color (not full MRT).
     * @param {GSplatFormat} format - The work buffer format descriptor.
     */
    constructor(device: GraphicsDevice, key: string, material: ShaderMaterial, colorOnly: boolean, format: GSplatFormat);
    /** @type {ShaderMaterial} */
    material: ShaderMaterial;
    /** @type {QuadRender} */
    quadRender: QuadRender;
    destroy(): void;
}

/**
 * Base class for a GSplat resource and defines common properties.
 *
 *  @ignore
 */
declare class GSplatResourceBase {
    static createMesh(device: any): Mesh;
    static get instanceSize(): number;
    constructor(device: any, gsplatData: any);
    /**
     * @type {GraphicsDevice}
     * @ignore
     */
    device: GraphicsDevice;
    /**
     * @type {GSplatData | GSplatCompressedData | GSplatSogData}
     * @ignore
     */
    gsplatData: GSplatData | GSplatCompressedData | GSplatSogData;
    /** @type {Float32Array} */
    centers: Float32Array;
    /**
     * Version counter for centers array changes. Remains 0 for static resources.
     * Only GSplatContainer increments this via its update() method.
     *
     * @type {number}
     * @ignore
     */
    centersVersion: number;
    /** @type {BoundingBox} */
    aabb: BoundingBox;
    /**
     * @type {Mesh|null}
     * @ignore
     */
    mesh: Mesh | null;
    /**
     * @type {number}
     * @ignore
     */
    id: number;
    /**
     * Cache for work buffer render materials/shaders. Keyed by configuration hash.
     * Stored per-resource because materials depend on resource-specific configuration
     * (SH bands, textures, defines). Cleaned up when resource is destroyed.
     *
     * @type {Map<string, WorkBufferRenderInfo>}
     * @ignore
     */
    workBufferRenderInfos: Map<string, WorkBufferRenderInfo>;
    /**
     * Format descriptor for this resource. Assigned by derived classes.
     *
     * @type {GSplatFormat}
     * @protected
     */
    protected _format: GSplatFormat;
    /**
     * Manages textures for this resource based on format streams.
     *
     * @type {GSplatStreams}
     * @ignore
     */
    streams: GSplatStreams;
    /**
     * Non-texture uniform parameters required by this resource's format.
     * This is the single source of truth for format-specific uniforms (e.g., dequantization
     * parameters) used by both material configuration and processing.
     *
     * @type {Map<string, any>}
     * @ignore
     */
    parameters: Map<string, any>;
    /**
     * @type {number}
     * @private
     */
    private _refCount;
    /**
     * @type {number}
     * @private
     */
    private _meshRefCount;
    /**
     * Destroys this resource. If the resource is still in use by the sorter, destruction is
     * automatically deferred until it's safe.
     */
    destroy(): void;
    /**
     * Actually destroys this resource and releases all GPU resources.
     * Derived classes should override this method instead of destroy().
     *
     * @protected
     */
    protected _actualDestroy(): void;
    /**
     * Increments the reference count.
     *
     * @ignore
     */
    incRefCount(): void;
    /**
     * Decrements the reference count.
     *
     * @ignore
     */
    decRefCount(): void;
    /**
     * Gets the current reference count. This represents how many times this resource is currently
     * being used internally by the engine. For {@link GSplatComponent#asset|assets} assigned to
     * {@link GSplatComponent#unified|unified} gsplat components, this tracks active usage during
     * rendering and sorting operations.
     *
     * Resources should not be unloaded while the reference count is non-zero, as they are still
     * in use by the rendering pipeline.
     *
     * @type {number}
     * @ignore
     */
    get refCount(): number;
    /**
     * Ensures mesh and instanceIndices exist. Creates them lazily on first call. Must be paired
     * with a call to releaseMesh() when done.
     *
     * @ignore
     */
    ensureMesh(): void;
    /**
     * Releases reference to mesh. When all references are released, cleans up instanceIndices.
     * The mesh itself is destroyed by MeshInstance when its internal refCount reaches zero.
     *
     * @ignore
     */
    releaseMesh(): void;
    /**
     * Get or create a QuadRender for rendering to work buffer.
     *
     * @param {boolean} colorOnly - Whether to render only color (not full MRT).
     * @param {{ code: string, hash: number }|null} workBufferModifier - Optional custom modifier (object with code and pre-computed hash).
     * @param {number} formatHash - Captured format hash for shader caching.
     * @param {string} formatDeclarations - Captured format declarations for shader compilation.
     * @param {GSplatFormat} workBufferFormat - The work buffer format descriptor.
     * @returns {WorkBufferRenderInfo} The WorkBufferRenderInfo instance.
     * @ignore
     */
    getWorkBufferRenderInfo(colorOnly: boolean, workBufferModifier: {
        code: string;
        hash: number;
    } | null, formatHash: number, formatDeclarations: string, workBufferFormat: GSplatFormat): WorkBufferRenderInfo;
    get numSplats(): any;
    /**
     * Gets the format descriptor for this resource. The format defines texture streams and
     * shader code for reading splat data. Use this to add extra streams.
     *
     * @type {GSplatFormat}
     */
    get format(): GSplatFormat;
    /**
     * Gets a texture by name.
     *
     * @param {string} name - The name of the texture.
     * @returns {Texture|null} The texture, or null if not found.
     */
    getTexture(name: string): Texture | null;
    /**
     * Gets the texture dimensions (width and height) used by this resource's data textures.
     *
     * @type {Vec2}
     */
    get textureDimensions(): Vec2;
    /**
     * Configures a material to use this resource's data. Base implementation injects format's
     * shader chunks and binds textures from the streams.
     *
     * @param {ShaderMaterial} material - The material to configure.
     * @param {{ code: string, hash: number }|null} workBufferModifier - Optional custom modifier (object with code and pre-computed hash).
     * @param {string} formatDeclarations - Captured format declarations for shader compilation.
     * @ignore
     */
    configureMaterial(material: ShaderMaterial, workBufferModifier: {
        code: string;
        hash: number;
    } | null, formatDeclarations: string): void;
    /**
     * Configures material defines for this resource. Derived classes should override this.
     *
     * @param {Map<string, string|number|boolean>} defines - The defines map to configure.
     * @ignore
     */
    configureMaterialDefines(defines: Map<string, string | number | boolean>): void;
    instantiate(): void;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { StorageBuffer } from '../../platform/graphics/storage-buffer.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
declare class GSplatSorter extends EventHandler {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {import('../scene.js').Scene} [scene] - The scene to fire sort timing events on.
     */
    constructor(device: GraphicsDevice, scene?: Scene);
    worker: Worker;
    /** @type {Texture|StorageBuffer} */
    target: Texture | StorageBuffer;
    /** @type {ArrayBuffer} */
    orderData: ArrayBuffer;
    centers: any;
    scene: Scene;
    /** @type {UploadStream} */
    uploadStream: UploadStream;
    /**
     * Pending sorted result from the worker, applied on the next applyPendingSorted() call.
     * When multiple results arrive between frames, only the latest is kept.
     *
     * @type {{ count: number, data: Uint32Array }|null}
     */
    pendingSorted: {
        count: number;
        data: Uint32Array;
    } | null;
    destroy(): void;
    /**
     * @param {Texture|StorageBuffer} target - The GPU target for order data uploads.
     * @param {number} numSplats - The number of splats.
     * @param {Float32Array} centers - The splat center positions.
     * @param {Uint32Array} [chunks] - Optional chunk data.
     */
    init(target: Texture | StorageBuffer, numSplats: number, centers: Float32Array, chunks?: Uint32Array): void;
    /**
     * Applies the most recent pending sorted result (if any), uploading order data to the GPU.
     * Call once per frame from the instance's update().
     *
     * @returns {number} The splat count from the applied result, or -1 if nothing was pending.
     */
    applyPendingSorted(): number;
    setMapping(mapping: any): void;
    setCamera(pos: any, dir: any): void;
}

declare class GSplatResolveSH {
    constructor(device: any, gsplatInstance: any);
    prevDir: Vec3;
    updateMode: string;
    device: any;
    gsplatInstance: any;
    shader: Shader;
    texture: any;
    renderTarget: RenderTarget;
    renderPass: CustomRenderPass;
    quadRender: QuadRender;
    destroy(): void;
    render(camera: any, modelMat: any): void;
}

declare class CustomRenderPass extends RenderPass {
    /**
     * @type {() => void | null}
     */
    executeCallback: () => void | null;
}

/**
 * Class responsible for management of shader passes, associated with a device.
 *
 * @ignore
 */
declare class ShaderPass {
    /**
     * Get access to the shader pass instance for the specified device.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @returns { ShaderPass } The shader pass instance for the specified device.
     */
    static get(device: GraphicsDevice): ShaderPass;
    /**
     * Allocated shader passes, map of a shader pass name to info.
     *
     * @type {Map<string, ShaderPassInfo>}
     */
    passesNamed: Map<string, ShaderPassInfo>;
    /**
     * Allocated shader passes, indexed by their index.
     *
     * @type {Array<ShaderPassInfo>}
     */
    passesIndexed: Array<ShaderPassInfo>;
    /** Next available index */
    nextIndex: number;
    /**
     * Allocates a shader pass with the specified name and options.
     *
     * @param {string} name - A name of the shader pass.
     * @param {object} [options] - Options for the shader pass, which are added as properties to the
     * shader pass info.
     * @returns {ShaderPassInfo} The allocated shader pass info.
     */
    allocate(name: string, options?: object): ShaderPassInfo;
    /**
     * Return the shader pass info for the specified index.
     *
     * @param {number} index - The shader pass index.
     * @returns {ShaderPassInfo} - The shader pass info.
     */
    getByIndex(index: number): ShaderPassInfo;
    getByName(name: any): ShaderPassInfo;
}
/**
 * Info about a shader pass. Shader pass is represented by a unique index and a name, and the
 * index is used to access the shader required for the pass, from an array stored in the
 * material or mesh instance.
 *
 * @ignore
 */
declare class ShaderPassInfo {
    /**
     * @param {string} name - The name, for example 'depth'. Must contain only letters, numbers,
     * and underscores, and start with a letter.
     * @param {number} index - Index from ShaderPass#nextIndex.
     * @param {object} [options] - Options for additional configuration of the shader pass.
     * @param {boolean} [options.isForward] - Whether the pass is forward.
     * @param {boolean} [options.isShadow] - Whether the pass is shadow.
     * @param {boolean} [options.lightType] - Type of light, for example `pc.LIGHTTYPE_DIRECTIONAL`.
     * @param {boolean} [options.shadowType] - Type of shadow, for example `pc.SHADOW_PCF3_32F`.
     */
    constructor(name: string, index: number, options?: {
        isForward?: boolean;
        isShadow?: boolean;
        lightType?: boolean;
        shadowType?: boolean;
    });
    /** @type {number} */
    index: number;
    /** @type {string} */
    name: string;
    /** @type {Map<string, string>} */
    defines: Map<string, string>;
    buildShaderDefines(): void;
}

/**
 * A render pass implementing grab of a color buffer.
 *
 * @ignore
 */
declare class FramePassColorGrab extends FramePass {
    colorRenderTarget: any;
    /**
     * The source render target to grab the color from.
     *
     * @type {RenderTarget|null}
     */
    source: RenderTarget | null;
    shouldReallocate(targetRT: any, sourceTexture: any, sourceFormat: any): boolean;
    allocateRenderTarget(renderTarget: any, sourceRenderTarget: any, device: any, format: any): any;
    releaseRenderTarget(rt: any): void;
}

/**
 * A render pass implementing grab of a depth buffer, used on WebGL 2 and WebGPU devices.
 *
 * @ignore
 */
declare class FramePassDepthGrab extends FramePass {
    constructor(device: any, camera: any);
    depthRenderTarget: any;
    camera: any;
    shouldReallocate(targetRT: any, sourceTexture: any): boolean;
    allocateRenderTarget(renderTarget: any, sourceRenderTarget: any, device: any, format: any, isDepth: any): any;
    releaseRenderTarget(rt: any): void;
}

/**
 * Fog parameters.
 *
 * @category Graphics
 */
declare class FogParams {
    /**
     * The type of fog used by the scene. Can be:
     *
     * - {@link FOG_NONE}
     * - {@link FOG_LINEAR}
     * - {@link FOG_EXP}
     * - {@link FOG_EXP2}
     *
     * Defaults to {@link FOG_NONE}.
     *
     * @type {string}
     */
    type: string;
    /**
     * The color of the fog (if enabled), specified in sRGB color space. Defaults to black (0, 0, 0).
     *
     * @type {Color}
     */
    color: Color;
    /**
     * The density of the fog (if enabled). This property is only valid if the fog property is set
     * to {@link FOG_EXP} or {@link FOG_EXP2}. Defaults to 0.
     *
     * @type {number}
     */
    density: number;
    /**
     * The distance from the viewpoint where linear fog begins. This property is only valid if the
     * fog property is set to {@link FOG_LINEAR}. Defaults to 1.
     *
     * @type {number}
     */
    start: number;
    /**
     * The distance from the viewpoint where linear fog reaches its maximum. This property is only
     * valid if the fog property is set to {@link FOG_LINEAR}. Defaults to 1000.
     *
     * @type {number}
     */
    end: number;
}

/**
 * Internal camera shader parameters, used to generate and use matching shaders.
 *
 * @ignore
 */
declare class CameraShaderParams {
    /** @private */
    private _gammaCorrection;
    /** @private */
    private _toneMapping;
    /** @private */
    private _srgbRenderTarget;
    /** @private */
    private _ssaoEnabled;
    /** @private */
    private _fog;
    /** @private */
    private _sceneDepthMapLinear;
    /**
     * The hash of the rendering parameters, or undefined if the hash has not been computed yet.
     *
     * @type {number|undefined}
     * @private
     */
    private _hash;
    /**
     * Content of this class relevant to shader generation, which is supplied as defines for the
     * shader.
     *
     * @type {Map<string, string>}
     * @private
     */
    private _defines;
    _definesDirty: boolean;
    /**
     * The hash of the rendering parameters.
     *
     * @type {number}
     * @ignore
     */
    get hash(): number;
    get defines(): Map<string, string>;
    markDirty(): void;
    set fog(type: string);
    get fog(): string;
    set ssaoEnabled(value: boolean);
    get ssaoEnabled(): boolean;
    set gammaCorrection(value: number);
    get gammaCorrection(): number;
    _gammaCorrectionAssigned: boolean;
    set toneMapping(value: number);
    get toneMapping(): number;
    set srgbRenderTarget(value: boolean);
    get srgbRenderTarget(): boolean;
    set sceneDepthMapLinear(value: boolean);
    get sceneDepthMapLinear(): boolean;
    /**
     * Returns {@link GAMMA_SRGB} if the shader code needs to output gamma corrected color, otherwise
     * returns {@link GAMMA_NONE}.
     *
     * @type {number}
     * @ignore
     */
    get shaderOutputGamma(): number;
}

/**
 * @import { Ray } from './ray.js'
 */
/**
 * An infinite plane. Internally, it's represented in a parametric equation form:
 * `ax + by + cz + distance = 0`.
 *
 * @category Math
 */
declare class Plane {
    /**
     * Create a new Plane instance.
     *
     * @param {Vec3} [normal] - Normal of the plane. The constructor copies this parameter. Defaults
     * to {@link Vec3.UP}.
     * @param {number} [distance] - The distance from the plane to the origin, along its normal.
     * Defaults to 0.
     */
    constructor(normal?: Vec3, distance?: number);
    /**
     * The normal of the plane.
     *
     * @type {Vec3}
     */
    normal: Vec3;
    /**
     * The distance from the plane to the origin, along its normal.
     *
     * @type {number}
     */
    distance: number;
    /**
     * Returns a clone of the specified plane.
     *
     * @returns {this} A duplicate plane.
     */
    clone(): this;
    /**
     * Copies the contents of a source plane to a destination plane.
     *
     * @param {Plane} src - A source plane to copy to the destination plane.
     * @returns {Plane} Self for chaining.
     */
    copy(src: Plane): Plane;
    /**
     * Test if the plane intersects between two points.
     *
     * @param {Vec3} start - Start position of line.
     * @param {Vec3} end - End position of line.
     * @param {Vec3} [point] - If there is an intersection, the intersection point will be copied
     * into here.
     * @returns {boolean} True if there is an intersection.
     */
    intersectsLine(start: Vec3, end: Vec3, point?: Vec3): boolean;
    /**
     * Test if a ray intersects with the infinite plane.
     *
     * @param {Ray} ray - Ray to test against (direction must be normalized).
     * @param {Vec3} [point] - If there is an intersection, the intersection point will be copied
     * into here.
     * @returns {boolean} True if there is an intersection.
     */
    intersectsRay(ray: Ray, point?: Vec3): boolean;
    /**
     * Normalize the plane.
     *
     * @returns {Plane} Self for chaining.
     */
    normalize(): Plane;
    /**
     * Sets the plane based on a normal and a distance from the origin.
     *
     * @param {number} nx - The x-component of the normal.
     * @param {number} ny - The y-component of the normal.
     * @param {number} nz - The z-component of the normal.
     * @param {number} d - The distance from the origin.
     * @returns {Plane} Self for chaining.
     */
    set(nx: number, ny: number, nz: number, d: number): Plane;
    /**
     * Sets the plane based on a specified normal and a point on the plane.
     *
     * @param {Vec3} point - The point on the plane.
     * @param {Vec3} normal - The normal of the plane.
     * @returns {Plane} Self for chaining.
     */
    setFromPointNormal(point: Vec3, normal: Vec3): Plane;
}

/**
 * @import { BoundingSphere } from './bounding-sphere.js'
 * @import { Mat4 } from '../math/mat4.js'
 * @import { Vec3 } from '../math/vec3.js'
 */
/**
 * A frustum is a shape that defines the viewing space of a camera. It can be used to determine
 * visibility of points and bounding spheres. Typically, you would not create a Frustum shape
 * directly, but instead query {@link CameraComponent#frustum}.
 *
 * @category Math
 */
declare class Frustum {
    /**
     * The six planes that make up the frustum.
     *
     * @type {Plane[]}
     */
    planes: Plane[];
    /**
     * Returns a clone of the specified frustum.
     *
     * @returns {Frustum} A duplicate frustum.
     * @example
     * const frustum = new pc.Frustum();
     * const clone = frustum.clone();
     */
    clone(): Frustum;
    /**
     * Copies the contents of a source frustum to a destination frustum.
     *
     * @param {Frustum} src - A source frustum to copy to the destination frustum.
     * @returns {Frustum} Self for chaining.
     * @example
     * const src = entity.camera.frustum;
     * const dst = new pc.Frustum();
     * dst.copy(src);
     */
    copy(src: Frustum): Frustum;
    /**
     * Updates the frustum shape based on the supplied 4x4 matrix.
     *
     * @param {Mat4} matrix - The matrix describing the shape of the frustum.
     * @example
     * // Create a perspective projection matrix
     * const projection = new pc.Mat4();
     * projection.setPerspective(45, 16 / 9, 1, 1000);
     *
     * // Create a frustum shape that is represented by the matrix
     * const frustum = new pc.Frustum();
     * frustum.setFromMat4(projection);
     */
    setFromMat4(matrix: Mat4): void;
    /**
     * Tests whether a point is inside the frustum. Note that points lying in a frustum plane are
     * considered to be outside the frustum.
     *
     * @param {Vec3} point - The point to test.
     * @returns {boolean} True if the point is inside the frustum, false otherwise.
     */
    containsPoint(point: Vec3): boolean;
    /**
     * Expands this frustum to also contain another frustum. For each of the 6 planes, the plane
     * that is further out (larger distance) is kept, creating a combined frustum that encompasses
     * both. This is useful for multi-view rendering such as stereo XR where culling should keep
     * objects visible in any view.
     *
     * Note: This method assumes both frustums have similar orientation (parallel views). This is
     * valid for WebXR stereo rendering where eyes use parallel projection with only a horizontal
     * offset, not toe-in convergence.
     *
     * @param {Frustum} other - The other frustum to add.
     * @returns {Frustum} Self for chaining.
     */
    add(other: Frustum): Frustum;
    /**
     * Tests whether a bounding sphere intersects the frustum. If the sphere is outside the
     * frustum, zero is returned. If the sphere intersects the frustum, 1 is returned. If the
     * sphere is completely inside the frustum, 2 is returned. Note that a sphere touching a
     * frustum plane from the outside is considered to be outside the frustum.
     *
     * @param {BoundingSphere} sphere - The sphere to test.
     * @returns {number} 0 if the bounding sphere is outside the frustum, 1 if it intersects the
     * frustum and 2 if it is contained by the frustum.
     */
    containsSphere(sphere: BoundingSphere): number;
}

/**
 * A camera.
 *
 * @ignore
 */
declare class Camera {
    /**
     * @type {ShaderPassInfo|null}
     */
    shaderPassInfo: ShaderPassInfo | null;
    /**
     * @type {FramePassColorGrab|null}
     */
    renderPassColorGrab: FramePassColorGrab | null;
    /**
     * @type {FramePassDepthGrab|null}
     */
    renderPassDepthGrab: FramePassDepthGrab | null;
    /**
     * The fog parameters.
     *
     * @type {FogParams|null}
     */
    fogParams: FogParams | null;
    /**
     * Shader parameters used to generate and use matching shaders.
     *
     * @type {CameraShaderParams}
     */
    shaderParams: CameraShaderParams;
    /**
     * Frame passes used to render this camera. If empty, the camera will render using the default
     * frame passes.
     *
     * @type {FramePass[]}
     */
    framePasses: FramePass[];
    /**
     * Frame passes that execute before this camera's main scene rendering. Entries are picked up
     * by the RenderPassForward that renders this camera's layers.
     *
     * @type {FramePass[]}
     */
    beforePasses: FramePass[];
    /** @type {number} */
    jitter: number;
    _aspectRatio: number;
    _aspectRatioMode: number;
    _calculateProjection: any;
    _calculateTransform: any;
    _clearColor: Color;
    _clearColorBuffer: boolean;
    _clearDepth: number;
    _clearDepthBuffer: boolean;
    _clearStencil: number;
    _clearStencilBuffer: boolean;
    _cullFaces: boolean;
    _farClip: number;
    _flipFaces: boolean;
    _fov: number;
    _frustumCulling: boolean;
    _horizontalFov: boolean;
    _layers: number[];
    _layersSet: Set<number>;
    _nearClip: number;
    _node: any;
    _orthoHeight: number;
    _projection: number;
    _rect: Vec4;
    _renderTarget: any;
    _scissorRect: Vec4;
    _scissorRectClear: boolean;
    _aperture: number;
    _shutter: number;
    _sensitivity: number;
    _projMat: Mat4;
    _projMatDirty: boolean;
    _projMatSkybox: Mat4;
    _viewMat: Mat4;
    _viewMatDirty: boolean;
    _viewProjMat: Mat4;
    _viewProjMatDirty: boolean;
    _shaderMatricesVersion: number;
    _viewProjInverse: Mat4;
    _viewProjCurrent: any;
    _viewProjPrevious: Mat4;
    _jitters: number[];
    frustum: Frustum;
    _xr: any;
    _xrProperties: {
        horizontalFov: boolean;
        fov: number;
        aspectRatio: number;
        farClip: number;
        nearClip: number;
    };
    destroy(): void;
    /**
     * Store camera matrices required by TAA. Only update them once per frame.
     */
    _storeShaderMatrices(viewProjMat: any, jitterX: any, jitterY: any, renderVersion: any): void;
    /**
     * True if the camera clears the full render target. (viewport / scissor are full size)
     */
    get fullSizeClearRect(): boolean;
    set aspectRatio(newValue: number);
    get aspectRatio(): number;
    set aspectRatioMode(newValue: number);
    get aspectRatioMode(): number;
    set calculateProjection(newValue: any);
    get calculateProjection(): any;
    set calculateTransform(newValue: any);
    get calculateTransform(): any;
    set clearColor(newValue: Color);
    get clearColor(): Color;
    set clearColorBuffer(newValue: boolean);
    get clearColorBuffer(): boolean;
    set clearDepth(newValue: number);
    get clearDepth(): number;
    set clearDepthBuffer(newValue: boolean);
    get clearDepthBuffer(): boolean;
    set clearStencil(newValue: number);
    get clearStencil(): number;
    set clearStencilBuffer(newValue: boolean);
    get clearStencilBuffer(): boolean;
    set cullFaces(newValue: boolean);
    get cullFaces(): boolean;
    set farClip(newValue: number);
    get farClip(): number;
    set flipFaces(newValue: boolean);
    get flipFaces(): boolean;
    set fov(newValue: number);
    get fov(): number;
    set frustumCulling(newValue: boolean);
    get frustumCulling(): boolean;
    set horizontalFov(newValue: boolean);
    get horizontalFov(): boolean;
    set layers(newValue: number[]);
    get layers(): number[];
    get layersSet(): Set<number>;
    set nearClip(newValue: number);
    get nearClip(): number;
    set node(newValue: any);
    get node(): any;
    set orthoHeight(newValue: number);
    get orthoHeight(): number;
    set projection(newValue: number);
    get projection(): number;
    get projectionMatrix(): Mat4;
    set rect(newValue: Vec4);
    get rect(): Vec4;
    set renderTarget(newValue: any);
    get renderTarget(): any;
    set scissorRect(newValue: Vec4);
    get scissorRect(): Vec4;
    get viewMatrix(): Mat4;
    set aperture(newValue: number);
    get aperture(): number;
    set sensitivity(newValue: number);
    get sensitivity(): number;
    set shutter(newValue: number);
    get shutter(): number;
    set xr(newValue: any);
    get xr(): any;
    /**
     * Creates a duplicate of the camera.
     *
     * @returns {Camera} A cloned Camera.
     */
    clone(): Camera;
    /**
     * Copies one camera to another.
     *
     * @param {Camera} other - Camera to copy.
     * @returns {Camera} Self for chaining.
     */
    copy(other: Camera): Camera;
    _enableRenderPassColorGrab(device: any, enable: any): void;
    _enableRenderPassDepthGrab(device: any, renderer: any, enable: any): void;
    _updateViewProjMat(): void;
    /**
     * Convert a point from 3D world space to 2D canvas pixel space based on the camera's rect.
     *
     * @param {Vec3} worldCoord - The world space coordinate to transform.
     * @param {number} cw - The width of PlayCanvas' canvas element.
     * @param {number} ch - The height of PlayCanvas' canvas element.
     * @param {Vec3} [screenCoord] - 3D vector to receive screen coordinate result.
     * @returns {Vec3} The screen space coordinate.
     */
    worldToScreen(worldCoord: Vec3, cw: number, ch: number, screenCoord?: Vec3): Vec3;
    /**
     * Convert a point from 2D canvas pixel space to 3D world space based on the camera's rect.
     *
     * @param {number} x - X coordinate on PlayCanvas' canvas element.
     * @param {number} y - Y coordinate on PlayCanvas' canvas element.
     * @param {number} z - The distance from the camera in world space to create the new point.
     * @param {number} cw - The width of PlayCanvas' canvas element.
     * @param {number} ch - The height of PlayCanvas' canvas element.
     * @param {Vec3} [worldCoord] - 3D vector to receive world coordinate result.
     * @returns {Vec3} The world space coordinate.
     */
    screenToWorld(x: number, y: number, z: number, cw: number, ch: number, worldCoord?: Vec3): Vec3;
    _evaluateProjectionMatrix(): void;
    getProjectionMatrixSkybox(): Mat4;
    getExposure(): number;
    getScreenSize(sphere: any): number;
    /**
     * Returns an array of corners of the frustum of the camera in the local coordinate system of the camera.
     *
     * @param {number} [near] - Near distance for the frustum points. Defaults to the near clip distance of the camera.
     * @param {number} [far] - Far distance for the frustum points. Defaults to the far clip distance of the camera.
     * @returns {Vec3[]} - An array of corners, using a global storage space.
     */
    getFrustumCorners(near?: number, far?: number): Vec3[];
    /**
     * Sets XR camera properties that should be derived physical camera in {@link XrManager}.
     *
     * @param {object} [properties] - Properties object.
     * @param {number} [properties.aspectRatio] - Aspect ratio.
     * @param {number} [properties.farClip] - Far clip.
     * @param {number} [properties.fov] - Field of view.
     * @param {boolean} [properties.horizontalFov] - Enable horizontal field of view.
     * @param {number} [properties.nearClip] - Near clip.
     */
    setXrProperties(properties?: {
        aspectRatio?: number;
        farClip?: number;
        fov?: number;
        horizontalFov?: boolean;
        nearClip?: number;
    }): void;
    /**
     * Fills the provided array with camera parameters for use in shaders.
     * The array format is: [1/far, far, near, isOrtho].
     *
     * @param {Float32Array} output - Array to fill with camera parameters.
     * @returns {Float32Array} The output array.
     * @ignore
     */
    fillShaderParams(output: Float32Array): Float32Array;
}

/** @ignore */
declare class GSplatInstance {
    /**
     * @param {GSplatResourceBase} resource - The splat instance.
     * @param {object} [options] - Options for the instance.
     * @param {ShaderMaterial|null} [options.material] - The material instance.
     * @param {boolean} [options.highQualitySH] - Whether to use the high quality or the approximate spherical harmonic calculation. Only applies to SOG data.
     * @param {import('../scene.js').Scene} [options.scene] - The scene to fire sort timing events on.
     */
    constructor(resource: GSplatResourceBase, options?: {
        material?: ShaderMaterial | null;
        highQualitySH?: boolean;
        scene?: Scene;
    });
    /** @type {GSplatResourceBase} */
    resource: GSplatResourceBase;
    /** @type {Texture|undefined} */
    orderTexture: Texture | undefined;
    /** @type {StorageBuffer|undefined} */
    orderBuffer: StorageBuffer | undefined;
    /** @type {ShaderMaterial} */
    _material: ShaderMaterial;
    /** @type {MeshInstance} */
    meshInstance: MeshInstance;
    options: {};
    /** @type {GSplatSorter|null} */
    sorter: GSplatSorter | null;
    lastCameraPosition: Vec3;
    lastCameraDirection: Vec3;
    /** @type {GSplatResolveSH|null} */
    resolveSH: GSplatResolveSH | null;
    /**
     * List of cameras this instance is visible for. Updated every frame by the renderer.
     *
     * @type {Camera[]}
     * @ignore
     */
    cameras: Camera[];
    destroy(): void;
    /**
     * Set order data parameters on the material.
     *
     * @param {ShaderMaterial} material - The material to configure.
     */
    setMaterialOrderData(material: ShaderMaterial): void;
    /**
     * @param {ShaderMaterial} value - The material instance.
     */
    set material(value: ShaderMaterial);
    get material(): ShaderMaterial;
    /**
     * Configure the material with gsplat instance and resource properties.
     *
     * @param {ShaderMaterial} material - The material to configure.
     * @param {object} [options] - Object for passing optional arguments.
     * @param {boolean} [options.dither] - Specify true to configure the material for dithered rendering (stochastic alpha).
     */
    configureMaterial(material: ShaderMaterial, options?: {
        dither?: boolean;
    }): void;
    /**
     * Sorts the GS vertices based on the given camera.
     * @param {GraphNode} cameraNode - The camera node used for sorting.
     */
    sort(cameraNode: GraphNode): void;
    update(): void;
    setHighQualitySH(value: any): void;
}

/**
 * A skin instance is responsible for generating the matrix palette that is used to skin vertices
 * from object space to world space.
 *
 * @category Graphics
 */
declare class SkinInstance {
    /**
     * Create a new SkinInstance instance.
     *
     * @param {Skin} skin - The skin that will provide the inverse bind pose
     * matrices to generate the final matrix palette.
     */
    constructor(skin: Skin);
    /**
     * An array of nodes representing each bone in this skin instance.
     *
     * @type {GraphNode[]}
     */
    bones: GraphNode[];
    _dirty: boolean;
    _rootBone: any;
    _skinUpdateIndex: number;
    _updateBeforeCull: boolean;
    set rootBone(rootBone: any);
    get rootBone(): any;
    init(device: any, numBones: any): void;
    boneTexture: Texture;
    matrixPalette: Uint8Array<ArrayBufferLike> | Uint16Array<ArrayBufferLike> | Uint32Array<ArrayBufferLike> | Float32Array<ArrayBufferLike>;
    destroy(): void;
    /**
     * Resolves skin bones to a hierarchy with the rootBone at its root.
     *
     * @param {Entity} rootBone - A reference to the entity to be used as the root bone.
     * @param {Entity} entity - Specifies the entity used if the bone match is not found in the
     * hierarchy - usually the entity the render component is attached to.
     * @ignore
     */
    resolve(rootBone: Entity, entity: Entity): void;
    /**
     * @param {Skin} skin - The skin.
     */
    initSkin(skin: Skin): void;
    skin: Skin;
    matrices: any[];
    uploadBones(device: any): void;
    _updateMatrices(rootNode: any, skinUpdateIndex: any): void;
    updateMatrices(rootNode: any, skinUpdateIndex: any): void;
    updateMatrixPalette(rootNode: any, skinUpdateIndex: any): void;
}

/**
 * @import { Morph } from './morph.js'
 * @import { Shader } from '../platform/graphics/shader.js'
 */
/**
 * An instance of {@link Morph}. Contains weights to assign to every {@link MorphTarget}, manages
 * selection of active morph targets.
 *
 * @category Graphics
 */
declare class MorphInstance {
    /**
     * Create a new MorphInstance instance.
     *
     * @param {Morph} morph - The {@link Morph} to instance.
     */
    constructor(morph: Morph);
    /**
     * The morph with its targets, which is being instanced.
     *
     * @type {Morph}
     */
    morph: Morph;
    device: GraphicsDevice;
    shader: Shader;
    _weights: any[];
    _weightMap: Map<any, any>;
    _shaderMorphWeights: Float32Array<ArrayBuffer>;
    _shaderMorphIndex: Uint32Array<ArrayBuffer>;
    rtPositions: RenderTarget;
    rtNormals: RenderTarget;
    _textureParams: Float32Array<ArrayBuffer>;
    _aabbSize: Float32Array<ArrayBuffer>;
    _aabbMin: Float32Array<ArrayBuffer>;
    _aabbNrmSize: Float32Array<ArrayBuffer>;
    _aabbNrmMin: Float32Array<ArrayBuffer>;
    aabbSizeId: ScopeId;
    aabbMinId: ScopeId;
    morphTextureId: ScopeId;
    morphFactor: ScopeId;
    morphIndex: ScopeId;
    countId: ScopeId;
    zeroTextures: boolean;
    /**
     * Frees video memory allocated by this object.
     */
    destroy(): void;
    texturePositions: any;
    textureNormals: any;
    /**
     * Clones a MorphInstance. The returned clone uses the same {@link Morph} and weights are set
     * to defaults.
     *
     * @returns {MorphInstance} A clone of the specified MorphInstance.
     */
    clone(): MorphInstance;
    _getWeightIndex(key: any): any;
    /**
     * Gets current weight of the specified morph target.
     *
     * @param {string|number} key - An identifier for the morph target. Either the weight index or
     * the weight name.
     * @returns {number} Weight.
     */
    getWeight(key: string | number): number;
    /**
     * Sets weight of the specified morph target.
     *
     * @param {string|number} key - An identifier for the morph target. Either the weight index or
     * the weight name.
     * @param {number} weight - Weight.
     */
    setWeight(key: string | number, weight: number): void;
    _dirty: boolean;
    /**
     * Create the shader for texture based morphing.
     *
     * @param {number} maxCount - Maximum bumber of textures to blend.
     * @returns {Shader} Shader.
     * @private
     */
    private _createShader;
    _updateTextureRenderTarget(renderTarget: any, activeCount: any, isPos: any): void;
    _updateTextureMorph(activeCount: any): void;
    setAabbUniforms(isPos?: boolean): void;
    prepareRendering(device: any): void;
    /**
     * Selects active morph targets and prepares morph for rendering. Called automatically by
     * renderer.
     */
    update(): void;
}

/**
 * Base class for all post effects. Post effects take a a render target as input apply effects to
 * it and then render the result to an output render target or the screen if no output is
 * specified.
 *
 * @category Graphics
 */
declare class PostEffect {
    /**
     * A simple vertex shader used to render a quad, which requires 'vec2 aPosition' in the vertex
     * buffer, and generates uv coordinates vUv0 for use in the fragment shader.
     *
     * @type {string}
     */
    static quadVertexShader: string;
    /**
     * Create a new PostEffect instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device of the application.
     */
    constructor(graphicsDevice: GraphicsDevice);
    /**
     * The graphics device of the application.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * The property that should to be set to `true` (by the custom post effect) if a depth map
     * is necessary (default is false).
     *
     * @type {boolean}
     */
    needsDepthBuffer: boolean;
    /**
     * Render the post effect using the specified inputTarget to the specified outputTarget.
     *
     * @param {RenderTarget} inputTarget - The input render target.
     * @param {RenderTarget} outputTarget - The output render target. If null then this will be the
     * screen.
     * @param {Vec4} [rect] - The rect of the current camera. If not specified, it will default to
     * `[0, 0, 1, 1]`.
     */
    render(inputTarget: RenderTarget, outputTarget: RenderTarget, rect?: Vec4): void;
    /**
     * Draw a screen-space rectangle in a render target, using a specified shader.
     *
     * @param {RenderTarget|null} target - The output render target.
     * @param {Shader} shader - The shader to be used for drawing the rectangle.
     * @param {Vec4} [rect] - The normalized screen-space position (rect.x, rect.y) and size (rect.z,
     * rect.w) of the rectangle. Default is `[0, 0, 1, 1]`.
     */
    drawQuad(target: RenderTarget | null, shader: Shader, rect?: Vec4): void;
}

/**
 * Used to manage multiple post effects for a camera.
 *
 * @category Graphics
 */
declare class PostEffectQueue {
    /**
     * Create a new PostEffectQueue instance.
     *
     * @param {AppBase} app - The application.
     * @param {CameraComponent} camera - The camera component.
     */
    constructor(app: AppBase, camera: CameraComponent);
    app: AppBase;
    camera: CameraComponent;
    /**
     * Render target where the postprocessed image needs to be rendered to. Defaults to null
     * which is main framebuffer.
     *
     * @type {RenderTarget}
     * @ignore
     */
    destinationRenderTarget: RenderTarget;
    /**
     * All of the post effects in the queue.
     *
     * @type {PostEffectEntry[]}
     * @ignore
     */
    effects: PostEffectEntry[];
    /**
     * If the queue is enabled it will render all of its effects, otherwise it will not render
     * anything.
     *
     * @type {boolean}
     * @ignore
     */
    enabled: boolean;
    depthTarget: any;
    /**
     * Allocate a color buffer texture.
     *
     * @param {number} format - The format of the color buffer.
     * @param {string} name - The name of the color buffer.
     * @returns {Texture} The color buffer texture.
     * @private
     */
    private _allocateColorBuffer;
    /**
     * Creates a render target with the dimensions of the canvas, with an optional depth buffer.
     *
     * @param {boolean} useDepth - Set to true to create a render target with a depth buffer.
     * @param {boolean} hdr - Use HDR render target format.
     * @returns {RenderTarget} The render target.
     * @private
     */
    private _createOffscreenTarget;
    _resizeOffscreenTarget(rt: any): void;
    _destroyOffscreenTarget(rt: any): void;
    /**
     * Adds a post effect to the queue. If the queue is disabled adding a post effect will
     * automatically enable the queue.
     *
     * @param {PostEffect} effect - The post effect to add to the queue.
     */
    addEffect(effect: PostEffect): void;
    _sourceTarget: any;
    _newPostEffect: PostEffect;
    /**
     * Removes a post effect from the queue. If the queue becomes empty it will be disabled
     * automatically.
     *
     * @param {PostEffect} effect - The post effect to remove.
     */
    removeEffect(effect: PostEffect): void;
    _requestDepthMaps(): void;
    _releaseDepthMaps(): void;
    _requestDepthMap(): void;
    _releaseDepthMap(): void;
    /**
     * Removes all the effects from the queue and disables it.
     */
    destroy(): void;
    /**
     * Enables the queue and all of its effects. If there are no effects then the queue will not be
     * enabled.
     */
    enable(): void;
    /**
     * Disables the queue and all of its effects.
     */
    disable(): void;
    /**
     * Handler called when the application's canvas element is resized.
     *
     * @param {number} width - The new width of the canvas.
     * @param {number} height - The new height of the canvas.
     * @private
     */
    private _onCanvasResized;
    resizeRenderTargets(): void;
    onCameraRectChanged(name: any, oldValue: any, newValue: any): void;
}

/**
 * @import { AppBase } from '../../app-base.js'
 * @import { CameraComponent } from './component.js'
 * @import { PostEffect } from '../../../scene/graphics/post-effect.js'
 */
declare class PostEffectEntry {
    constructor(effect: any, inputTarget: any);
    effect: any;
    inputTarget: any;
    outputTarget: any;
    name: any;
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * DOM Overlay provides the ability to use DOM elements as an overlay in a WebXR AR session. It
 * requires that the root DOM element is provided for session start. That way, input source
 * `select` events are first tested against DOM Elements and then propagated down to the XR
 * Session. If this propagation is not desirable, use the `beforexrselect` event on a DOM element
 * and the `preventDefault` function to stop propagation.
 *
 * ```javascript
 * app.xr.domOverlay.root = element;
 * app.xr.start(camera, pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR);
 * ```
 *
 * ```javascript
 * // Disable input source firing `select` event when some descendant element of DOM overlay root
 * // is touched/clicked. This is useful when the user interacts with UI elements and there should
 * // not be `select` events behind UI.
 * someElement.addEventListener('beforexrselect', (evt) => {
 *     evt.preventDefault();
 * });
 * ```
 *
 * @category XR
 */
declare class XrDomOverlay {
    /**
     * Create a new XrDomOverlay instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {Element|null}
     * @private
     */
    private _root;
    /**
     * True if DOM Overlay is supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if DOM Overlay is available. This information becomes available only when the session has
     * started and a valid root DOM element has been provided.
     *
     * @type {boolean}
     */
    get available(): boolean;
    /**
     * State of the DOM Overlay, which defines how the root DOM element is rendered. Can be:
     *
     * - `screen` - indicates that the DOM element is covering the whole physical screen, matching
     * XR viewports.
     * - `floating` - indicates that the underlying platform renders the DOM element as floating in
     * space, which can move during the WebXR session or allow the application to move the element.
     * - `head-locked` - indicates that the DOM element follows the user's head movement
     * consistently, appearing similar to a helmet heads-up display.
     *
     * @type {"screen"|"floating"|"head-locked"|null}
     */
    get state(): "screen" | "floating" | "head-locked" | null;
    /**
     * Sets the DOM element to be used as the root for DOM Overlay. Can be changed only when the XR
     * session is not running.
     *
     * @type {Element|null}
     * @example
     * app.xr.domOverlay.root = element;
     * app.xr.start(camera, pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR);
     */
    set root(value: Element | null);
    /**
     * Gets the DOM element to be used as the root for DOM Overlay.
     *
     * @type {Element|null}
     */
    get root(): Element | null;
}

/**
 * @import { XrHand } from './xr-hand.js'
 * @import { XrJoint } from './xr-joint.js'
 */
/**
 * Represents a finger of a tracked {@link XrHand} with related joints and index.
 *
 * @category XR
 */
declare class XrFinger {
    /**
     * Create a new XrFinger instance.
     *
     * @param {number} index - Index of the finger.
     * @param {XrHand} hand - Hand that the finger belongs to.
     * @ignore
     */
    constructor(index: number, hand: XrHand);
    /**
     * @type {number}
     * @private
     */
    private _index;
    /**
     * @type {XrHand}
     * @private
     */
    private _hand;
    /**
     * @type {XrJoint[]}
     * @private
     */
    private _joints;
    /**
     * @type {XrJoint|null}
     * @private
     */
    private _tip;
    /**
     * Gets the index of the finger. Enumeration is: thumb, index, middle, ring, little.
     *
     * @type {number}
     */
    get index(): number;
    /**
     * Gets the hand that the finger belongs to.
     *
     * @type {XrHand}
     */
    get hand(): XrHand;
    /**
     * Array of joints that belong to this finger, starting from joint closest to wrist all the way
     * to the tip of a finger.
     *
     * @type {XrJoint[]}
     */
    get joints(): XrJoint[];
    /**
     * Tip joint of the finger, or null if not available.
     *
     * @type {XrJoint|null}
     */
    get tip(): XrJoint | null;
}

/**
 * Represents the joint of a finger.
 *
 * @category XR
 */
declare class XrJoint {
    /**
     * Create an XrJoint instance.
     *
     * @param {number} index - Index of a joint within a finger.
     * @param {XRHandJoint} id - Id of a joint based on WebXR Hand Input Specs.
     * @param {XrHand} hand - Hand that joint relates to.
     * @param {XrFinger|null} finger - Finger that joint is related to. Can be null in the case of
     * the wrist joint.
     * @ignore
     */
    constructor(index: number, id: XRHandJoint, hand: XrHand, finger?: XrFinger | null);
    /**
     * @type {number}
     * @private
     */
    private _index;
    /**
     * @type {XRHandJoint}
     * @private
     */
    private _id;
    /**
     * @type {XrHand}
     * @private
     */
    private _hand;
    /**
     * @type {XrFinger|null}
     * @private
     */
    private _finger;
    /**
     * @type {boolean}
     * @private
     */
    private _wrist;
    /**
     * @type {boolean}
     * @private
     */
    private _tip;
    /**
     * @type {number|null}
     * @private
     */
    private _radius;
    /**
     * @type {Mat4}
     * @private
     */
    private _localTransform;
    /**
     * @type {Mat4}
     * @private
     */
    private _worldTransform;
    /**
     * @type {Vec3}
     * @private
     */
    private _localPosition;
    /**
     * @type {Quat}
     * @private
     */
    private _localRotation;
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /**
     * @type {boolean}
     * @private
     */
    private _dirtyLocal;
    /**
     * @param {XRJointPose} pose - XRJointPose of this joint.
     * @ignore
     */
    update(pose: XRJointPose): void;
    /** @private */
    private _updateTransforms;
    /**
     * Get the world space position of a joint.
     *
     * @returns {Vec3} The world space position of a joint.
     */
    getPosition(): Vec3;
    /**
     * Get the world space rotation of a joint.
     *
     * @returns {Quat} The world space rotation of a joint.
     */
    getRotation(): Quat;
    /**
     * Id of a joint based on WebXR Hand Input Specs.
     *
     * @type {XRHandJoint}
     */
    get id(): XRHandJoint;
    /**
     * Index of a joint within a finger, starting from 0 (root of a finger) all the way to tip of
     * the finger.
     *
     * @type {number}
     */
    get index(): number;
    /**
     * Hand that joint relates to.
     *
     * @type {XrHand}
     */
    get hand(): XrHand;
    /**
     * Finger that joint relates to.
     *
     * @type {XrFinger|null}
     */
    get finger(): XrFinger | null;
    /**
     * True if joint is a wrist.
     *
     * @type {boolean}
     */
    get wrist(): boolean;
    /**
     * True if joint is a tip of a finger.
     *
     * @type {boolean}
     */
    get tip(): boolean;
    /**
     * The radius of a joint, which is a distance from joint to the edge of a skin.
     *
     * @type {number}
     */
    get radius(): number;
}

/**
 * Represents a hand with fingers and joints.
 *
 * @category XR
 */
declare class XrHand extends EventHandler {
    /**
     * Fired when tracking becomes available.
     *
     * @event
     * @example
     * hand.on('tracking', () => {
     *     console.log('Hand tracking is available');
     * });
     */
    static EVENT_TRACKING: string;
    /**
     * Fired when tracking is lost.
     *
     * @event
     * @example
     * hand.on('trackinglost', () => {
     *     console.log('Hand tracking is lost');
     * });
     */
    static EVENT_TRACKINGLOST: string;
    /**
     * Represents a hand with fingers and joints.
     *
     * @param {XrInputSource} inputSource - Input Source that hand is related to.
     * @ignore
     */
    constructor(inputSource: XrInputSource);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {XrInputSource}
     * @private
     */
    private _inputSource;
    /**
     * @type {boolean}
     * @private
     */
    private _tracking;
    /**
     * @type {XrFinger[]}
     * @private
     */
    private _fingers;
    /**
     * @type {XrJoint[]}
     * @private
     */
    private _joints;
    /**
     * @type {Object<string, XrJoint>}
     * @private
     */
    private _jointsById;
    /**
     * @type {XrJoint[]}
     * @private
     */
    private _tips;
    /**
     * @type {XrJoint|null}
     * @private
     */
    private _wrist;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * @param {number} index - Finger index.
     * @returns {boolean} True if finger is closed and false otherwise.
     * @private
     */
    private _fingerIsClosed;
    /**
     * Returns joint by its XRHand id.
     *
     * @param {string} id - Id of a joint based on specs ID's in XRHand: https://immersive-web.github.io/webxr-hand-input/#skeleton-joints-section.
     * @returns {XrJoint|null} Joint or null if not available.
     */
    getJointById(id: string): XrJoint | null;
    /**
     * Array of fingers of the hand.
     *
     * @type {XrFinger[]}
     */
    get fingers(): XrFinger[];
    /**
     * Array of joints in the hand.
     *
     * @type {XrJoint[]}
     */
    get joints(): XrJoint[];
    /**
     * Array of joints that are fingertips.
     *
     * @type {XrJoint[]}
     */
    get tips(): XrJoint[];
    /**
     * Wrist of a hand, or null if it is not available by WebXR underlying system.
     *
     * @type {XrJoint|null}
     */
    get wrist(): XrJoint | null;
    /**
     * True if tracking is available, otherwise tracking might be lost.
     *
     * @type {boolean}
     */
    get tracking(): boolean;
}

/**
 * Represents XR input source, which is any input mechanism which allows the user to perform
 * targeted actions in the same virtual space as the viewer. Example XR input sources include, but
 * are not limited to: handheld controllers, optically tracked hands, touch screen taps, and
 * gaze-based input methods that operate on the viewer's pose.
 *
 * @category XR
 */
declare class XrInputSource extends EventHandler {
    /**
     * Fired when {@link XrInputSource} is removed.
     *
     * @event
     * @example
     * inputSource.once('remove', () => {
     *     // input source is not available anymore
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when input source has triggered primary action. This could be pressing a trigger
     * button, or touching a screen. The handler is passed an
     * [XRInputSourceEvent](https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent)
     * object from the WebXR API.
     *
     * @event
     * @example
     * const ray = new pc.Ray();
     * inputSource.on('select', (evt) => {
     *     ray.set(inputSource.getOrigin(), inputSource.getDirection());
     *     if (obj.intersectsRay(ray)) {
     *         // selected an object with input source
     *     }
     * });
     */
    static EVENT_SELECT: string;
    /**
     * Fired when input source has started to trigger primary action. The handler is passed an
     * [XRInputSourceEvent](https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent)
     * object from the WebXR API.
     *
     * @event
     * @example
     * inputSource.on('selectstart', (evt) => {
     *     console.log('Select started');
     * });
     */
    static EVENT_SELECTSTART: string;
    /**
     * Fired when input source has ended triggering primary action. The handler is passed an
     * [XRInputSourceEvent](https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent)
     * object from the WebXR API.
     *
     * @event
     * @example
     * inputSource.on('selectend', (evt) => {
     *     console.log('Select ended');
     * });
     */
    static EVENT_SELECTEND: string;
    /**
     * Fired when input source has triggered squeeze action. This is associated with "grabbing"
     * action on the controllers. The handler is passed an
     * [XRInputSourceEvent](https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent)
     * object from the WebXR API.
     *
     * @event
     * @example
     * inputSource.on('squeeze', (evt) => {
     *     console.log('Squeeze');
     * });
     */
    static EVENT_SQUEEZE: string;
    /**
     * Fired when input source has started to trigger squeeze action. The handler is passed an
     * [XRInputSourceEvent](https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent)
     * object from the WebXR API.
     *
     * @event
     * @example
     * inputSource.on('squeezestart', (evt) => {
     *     if (obj.containsPoint(inputSource.getPosition())) {
     *         // grabbed an object
     *     }
     * });
     */
    static EVENT_SQUEEZESTART: string;
    /**
     * Fired when input source has ended triggering squeeze action. The handler is passed an
     * [XRInputSourceEvent](https://developer.mozilla.org/en-US/docs/Web/API/XRInputSourceEvent)
     * object from the WebXR API.
     *
     * @event
     * @example
     * inputSource.on('squeezeend', (evt) => {
     *     console.log('Squeeze ended');
     * });
     */
    static EVENT_SQUEEZEEND: string;
    /**
     * Fired when new {@link XrHitTestSource} is added to the input source. The handler is passed
     * the {@link XrHitTestSource} object that has been added.
     *
     * @event
     * @example
     * inputSource.on('hittest:add', (hitTestSource) => {
     *     // new hit test source is added
     * });
     */
    static EVENT_HITTESTADD: string;
    /**
     * Fired when {@link XrHitTestSource} is removed from the input source. The handler is passed
     * the {@link XrHitTestSource} object that has been removed.
     *
     * @event
     * @example
     * inputSource.on('hittest:remove', (hitTestSource) => {
     *     // hit test source is removed
     * });
     */
    static EVENT_HITTESTREMOVE: string;
    /**
     * Fired when hit test source receives new results. It provides transform information that
     * tries to match real world picked geometry. The handler is passed the {@link XrHitTestSource}
     * object that produced the hit result, the {@link Vec3} position, the {@link Quat}
     * rotation and the [XRHitTestResult](https://developer.mozilla.org/en-US/docs/Web/API/XRHitTestResult)
     * object that is created by the WebXR API.
     *
     * @event
     * @example
     * inputSource.on('hittest:result', (hitTestSource, position, rotation, hitTestResult) => {
     *     target.setPosition(position);
     *     target.setRotation(rotation);
     * });
     */
    static EVENT_HITTESTRESULT: string;
    /**
     * Create a new XrInputSource instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @param {XRInputSource} xrInputSource - A WebXR input source.
     * @ignore
     */
    constructor(manager: XrManager, xrInputSource: XRInputSource);
    /**
     * @type {number}
     * @private
     */
    private _id;
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {XRInputSource}
     * @private
     */
    private _xrInputSource;
    /**
     * @type {Ray}
     * @private
     */
    private _ray;
    /**
     * @type {Ray}
     * @private
     */
    private _rayLocal;
    /**
     * @type {boolean}
     * @private
     */
    private _grip;
    /**
     * @type {XrHand|null}
     * @private
     */
    private _hand;
    /**
     * @type {boolean}
     * @private
     */
    private _velocitiesAvailable;
    /**
     * @type {number}
     * @private
     */
    private _velocitiesTimestamp;
    /**
     * @type {Mat4|null}
     * @private
     */
    private _localTransform;
    /**
     * @type {Mat4|null}
     * @private
     */
    private _worldTransform;
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _localPosition;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _localPositionLast;
    /**
     * @type {Quat|null}
     * @private
     */
    private _localRotation;
    /**
     * @type {Vec3|null}
     * @private
     */
    private _linearVelocity;
    /**
     * @type {boolean}
     * @private
     */
    private _dirtyLocal;
    /**
     * @type {boolean}
     * @private
     */
    private _dirtyRay;
    /**
     * @type {boolean}
     * @private
     */
    private _selecting;
    /**
     * @type {boolean}
     * @private
     */
    private _squeezing;
    /**
     * @type {boolean}
     * @private
     */
    private _elementInput;
    /**
     * @type {Entity|null}
     * @private
     */
    private _elementEntity;
    /**
     * @type {XrHitTestSource[]}
     * @private
     */
    private _hitTestSources;
    /**
     * Unique number associated with instance of input source. Same physical devices when
     * reconnected will not share this ID.
     *
     * @type {number}
     */
    get id(): number;
    /**
     * XRInputSource object that is associated with this input source.
     *
     * @type {XRInputSource}
     */
    get inputSource(): XRInputSource;
    /**
     * Type of ray Input Device is based on. Can be one of the following:
     *
     * - {@link XRTARGETRAY_GAZE}: Gaze - indicates the target ray will originate at the viewer and
     * follow the direction it is facing. This is commonly referred to as a "gaze input" device in
     * the context of head-mounted displays.
     * - {@link XRTARGETRAY_SCREEN}: Screen - indicates that the input source was an interaction
     * with the canvas element associated with an inline session's output context, such as a mouse
     * click or touch event.
     * - {@link XRTARGETRAY_POINTER}: Tracked Pointer - indicates that the target ray originates
     * from either a handheld device or other hand-tracking mechanism and represents that the user
     * is using their hands or the held device for pointing.
     *
     * @type {string}
     */
    get targetRayMode(): string;
    /**
     * Describes which hand input source is associated with. Can be one of the following:
     *
     * - {@link XRHAND_NONE}: None - input source is not meant to be held in hands.
     * - {@link XRHAND_LEFT}: Left - indicates that input source is meant to be held in left hand.
     * - {@link XRHAND_RIGHT}: Right - indicates that input source is meant to be held in right
     * hand.
     *
     * @type {string}
     */
    get handedness(): string;
    /**
     * List of input profile names indicating both the preferred visual representation and behavior
     * of the input source.
     *
     * @type {string[]}
     */
    get profiles(): string[];
    /**
     * If input source can be held, then it will have node with its world transformation, that can
     * be used to position and rotate visual object based on it.
     *
     * @type {boolean}
     */
    get grip(): boolean;
    /**
     * If input source is a tracked hand, then it will point to {@link XrHand} otherwise it is
     * null.
     *
     * @type {XrHand|null}
     */
    get hand(): XrHand | null;
    /**
     * If input source has buttons, triggers, thumbstick or touchpad, then this object provides
     * access to its states.
     *
     * @type {Gamepad|null}
     */
    get gamepad(): Gamepad | null;
    /**
     * True if input source is in active primary action between selectstart and selectend events.
     *
     * @type {boolean}
     */
    get selecting(): boolean;
    /**
     * True if input source is in active squeeze action between squeezestart and squeezeend events.
     *
     * @type {boolean}
     */
    get squeezing(): boolean;
    /**
     * Sets whether the input source can interact with {@link ElementComponent}s. Defaults to true.
     *
     * @type {boolean}
     */
    set elementInput(value: boolean);
    /**
     * Gets whether the input source can interact with {@link ElementComponent}s.
     *
     * @type {boolean}
     */
    get elementInput(): boolean;
    /**
     * If {@link XrInputSource#elementInput} is true, this property will hold entity with Element
     * component at which this input source is hovering, or null if not hovering over any element.
     *
     * @type {Entity|null}
     */
    get elementEntity(): Entity | null;
    /**
     * List of active {@link XrHitTestSource} instances associated with this input source.
     *
     * @type {XrHitTestSource[]}
     */
    get hitTestSources(): XrHitTestSource[];
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /** @private */
    private _updateTransforms;
    /** @private */
    private _updateRayTransforms;
    /**
     * Get the world space position of input source if it is handheld ({@link XrInputSource#grip}
     * is true). Otherwise it will return null.
     *
     * @returns {Vec3|null} The world space position of handheld input source.
     */
    getPosition(): Vec3 | null;
    /**
     * Get the local space position of input source if it is handheld ({@link XrInputSource#grip}
     * is true). Local space is relative to parent of the XR camera. Otherwise it will return null.
     *
     * @returns {Vec3|null} The local space position of handheld input source.
     */
    getLocalPosition(): Vec3 | null;
    /**
     * Get the world space rotation of input source if it is handheld ({@link XrInputSource#grip}
     * is true). Otherwise it will return null.
     *
     * @returns {Quat|null} The world space rotation of handheld input source.
     */
    getRotation(): Quat | null;
    /**
     * Get the local space rotation of input source if it is handheld ({@link XrInputSource#grip}
     * is true). Local space is relative to parent of the XR camera. Otherwise it will return null.
     *
     * @returns {Quat|null} The local space rotation of handheld input source.
     */
    getLocalRotation(): Quat | null;
    /**
     * Get the linear velocity (units per second) of the input source if it is handheld
     * ({@link XrInputSource#grip} is true). Otherwise it will return null.
     *
     * @returns {Vec3|null} The world space linear velocity of the handheld input source.
     */
    getLinearVelocity(): Vec3 | null;
    /**
     * Get the world space origin of input source ray.
     *
     * @returns {Vec3} The world space origin of input source ray.
     */
    getOrigin(): Vec3;
    /**
     * Get the world space direction of input source ray.
     *
     * @returns {Vec3} The world space direction of input source ray.
     */
    getDirection(): Vec3;
    /**
     * Attempts to start hit test source based on this input source.
     *
     * @param {object} [options] - Object for passing optional arguments.
     * @param {string[]} [options.entityTypes] - Optional list of underlying entity types against
     * which hit tests will be performed. Defaults to [{@link XRTRACKABLE_PLANE}]. Can be any
     * combination of the following:
     *
     * - {@link XRTRACKABLE_POINT}: Point - indicates that the hit test results will be computed
     * based on the feature points detected by the underlying Augmented Reality system.
     * - {@link XRTRACKABLE_PLANE}: Plane - indicates that the hit test results will be computed
     * based on the planes detected by the underlying Augmented Reality system.
     * - {@link XRTRACKABLE_MESH}: Mesh - indicates that the hit test results will be computed
     * based on the meshes detected by the underlying Augmented Reality system.
     *
     * @param {Ray} [options.offsetRay] - Optional ray by which hit test ray can be offset.
     * @param {XrHitTestStartCallback} [options.callback] - Optional callback function called once
     * hit test source is created or failed.
     * @example
     * app.xr.input.on('add', (inputSource) => {
     *     inputSource.hitTestStart({
     *         callback: (err, hitTestSource) => {
     *             if (err) return;
     *             hitTestSource.on('result', (position, rotation, inputSource, hitTestResult) => {
     *                 // position and rotation of hit test result
     *                 // that will be created from touch on mobile devices
     *             });
     *         }
     *     });
     * });
     */
    hitTestStart(options?: {
        entityTypes?: string[];
        offsetRay?: Ray;
        callback?: XrHitTestStartCallback;
    }): void;
    /**
     * @param {XrHitTestSource} hitTestSource - Hit test source to be added.
     * @private
     */
    private onHitTestSourceAdd;
    /**
     * @param {XrHitTestSource} hitTestSource - Hit test source to be removed.
     * @private
     */
    private onHitTestSourceRemove;
}

/**
 * Represents XR hit test source, which provides access to hit results of real world geometry from
 * AR session.
 *
 * ```javascript
 * // start a hit test from a viewer origin forward
 * app.xr.hitTest.start({
 *     spaceType: pc.XRSPACE_VIEWER,
 *     callback: (err, hitTestSource) => {
 *         if (err) return;
 *         // subscribe to hit test results
 *         hitTestSource.on('result', (position, rotation, inputSource, hitTestResult) => {
 *             // position and rotation of hit test result
 *         });
 *     }
 * });
 * ```
 *
 * @category XR
 */
declare class XrHitTestSource extends EventHandler {
    /**
     * Fired when {@link XrHitTestSource} is removed.
     *
     * @event
     * @example
     * hitTestSource.once('remove', () => {
     *     // hit test source has been removed
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when the hit test source receives new results. It provides transform information that
     * tries to match real world geometry. Callback provides the {@link Vec3} position, the
     * {@link Quat} rotation, the {@link XrInputSource} (if it is a transient hit test source)
     * and the [XRHitTestResult](https://developer.mozilla.org/en-US/docs/Web/API/XRHitTestResult)
     * object that is created by WebXR API.
     *
     * @event
     * @example
     * hitTestSource.on('result', (position, rotation, inputSource, hitTestResult) => {
     *     target.setPosition(position);
     *     target.setRotation(rotation);
     * });
     */
    static EVENT_RESULT: string;
    /**
     * Create a new XrHitTestSource instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @param {XRHitTestSource} xrHitTestSource - XRHitTestSource object that is created by WebXR API.
     * @param {boolean} transient - True if XRHitTestSource created for input source profile.
     * @param {null|XrInputSource} inputSource - Input Source for which hit test is created for, or null.
     * @ignore
     */
    constructor(manager: XrManager, xrHitTestSource: XRHitTestSource, transient: boolean, inputSource?: null | XrInputSource);
    /**
     * @type {XrManager}
     * @private
     */
    private manager;
    /**
     * @type {XRHitTestSource}
     * @private
     */
    private _xrHitTestSource;
    /**
     * @type {boolean}
     * @private
     */
    private _transient;
    /**
     * @type {null|XrInputSource}
     * @private
     */
    private _inputSource;
    /**
     * Stop and remove hit test source.
     */
    remove(): void;
    /** @ignore */
    onStop(): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * @param {XRTransientInputHitTestResult[]} results - Hit test results.
     * @param {null|XrInputSource} inputSource - Input source.
     * @private
     */
    private updateHitResults;
}

/**
 * Callback used by {@link XrHitTest#start} and {@link XrInputSource#hitTestStart}.
 */
type XrHitTestStartCallback = (err: Error | null, hitTestSource: XrHitTestSource | null) => void;
/**
 * @import { Ray } from '../../core/shape/ray.js'
 * @import { XrInputSource } from './xr-input-source.js'
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * @callback XrHitTestStartCallback
 * Callback used by {@link XrHitTest#start} and {@link XrInputSource#hitTestStart}.
 * @param {Error|null} err - The Error object if failed to create hit test source or null.
 * @param {XrHitTestSource|null} hitTestSource - Object that provides access to hit results against
 * real world geometry.
 * @returns {void}
 */
/**
 * The Hit Test interface allows initiating hit testing against real-world geometry from various
 * sources: the view, input sources, or an arbitrary ray in space. Results reflect the underlying
 * AR system's understanding of the real world.
 *
 * @category XR
 */
declare class XrHitTest extends EventHandler {
    /**
     * Fired when hit test becomes available.
     *
     * @event
     * @example
     * app.xr.hitTest.on('available', () => {
     *     console.log('Hit Testing is available');
     * });
     */
    static EVENT_AVAILABLE: string;
    /**
     * Fired when hit test becomes unavailable.
     *
     * @event
     * @example
     * app.xr.hitTest.on('unavailable', () => {
     *     console.log('Hit Testing is unavailable');
     * });
     */
    static EVENT_UNAVAILABLE: string;
    /**
     * Fired when new {@link XrHitTestSource} is added to the list. The handler is passed the
     * {@link XrHitTestSource} object that has been added.
     *
     * @event
     * @example
     * app.xr.hitTest.on('add', (hitTestSource) => {
     *     // new hit test source is added
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when {@link XrHitTestSource} is removed to the list. The handler is passed the
     * {@link XrHitTestSource} object that has been removed.
     *
     * @event
     * @example
     * app.xr.hitTest.on('remove', (hitTestSource) => {
     *     // hit test source is removed
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when hit test source receives new results. It provides transform information that
     * tries to match real world picked geometry. The handler is passed the {@link XrHitTestSource}
     * that produced the hit result, the {@link Vec3} position, the {@link Quat} rotation and the
     * {@link XrInputSource} (if it is a transient hit test source).
     *
     * @event
     * @example
     * app.xr.hitTest.on('result', (hitTestSource, position, rotation, inputSource) => {
     *     target.setPosition(position);
     *     target.setRotation(rotation);
     * });
     */
    static EVENT_RESULT: string;
    /**
     * Fired when failed create hit test source. The handler is passed the Error object.
     *
     * @event
     * @example
     * app.xr.hitTest.on('error', (err) => {
     *     console.error(err.message);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Create a new XrHitTest instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private manager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {boolean}
     * @private
     */
    private _available;
    /**
     * @type {boolean}
     * @private
     */
    private _checkingAvailability;
    /**
     * List of active {@link XrHitTestSource}.
     *
     * @type {XrHitTestSource[]}
     */
    sources: XrHitTestSource[];
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * Attempts to start hit test with provided reference space.
     *
     * @param {object} [options] - Optional object for passing arguments.
     * @param {string} [options.spaceType] - Reference space type. Defaults to
     * {@link XRSPACE_VIEWER}. Can be one of the following:
     *
     * - {@link XRSPACE_VIEWER}: Viewer - hit test will be facing relative to viewers space.
     * - {@link XRSPACE_LOCAL}: Local - represents a tracking space with a native origin near the
     * viewer at the time of creation.
     * - {@link XRSPACE_LOCALFLOOR}: Local Floor - represents a tracking space with a native origin
     * at the floor in a safe position for the user to stand. The y axis equals 0 at floor level.
     * Floor level value might be estimated by the underlying platform.
     * - {@link XRSPACE_BOUNDEDFLOOR}: Bounded Floor - represents a tracking space with its native
     * origin at the floor, where the user is expected to move within a pre-established boundary.
     * - {@link XRSPACE_UNBOUNDED}: Unbounded - represents a tracking space where the user is
     * expected to move freely around their environment, potentially long distances from their
     * starting point.
     *
     * @param {string} [options.profile] - if hit test source meant to match input source instead
     * of reference space, then name of profile of the {@link XrInputSource} should be provided.
     * @param {string[]} [options.entityTypes] - Optional list of underlying entity types against
     * which hit tests will be performed. Defaults to [ {@link XRTRACKABLE_PLANE} ]. Can be any
     * combination of the following:
     *
     * - {@link XRTRACKABLE_POINT}: Point - indicates that the hit test results will be computed
     * based on the feature points detected by the underlying Augmented Reality system.
     * - {@link XRTRACKABLE_PLANE}: Plane - indicates that the hit test results will be computed
     * based on the planes detected by the underlying Augmented Reality system.
     * - {@link XRTRACKABLE_MESH}: Mesh - indicates that the hit test results will be computed
     * based on the meshes detected by the underlying Augmented Reality system.
     *
     * @param {Ray} [options.offsetRay] - Optional ray by which
     * hit test ray can be offset.
     * @param {XrHitTestStartCallback} [options.callback] - Optional callback function called once
     * hit test source is created or failed.
     * @example
     * // start hit testing from viewer position facing forwards
     * app.xr.hitTest.start({
     *     spaceType: pc.XRSPACE_VIEWER,
     *     callback: (err, hitTestSource) => {
     *         if (err) return;
     *         hitTestSource.on('result', (position, rotation) => {
     *             // position and rotation of hit test result
     *         });
     *     }
     * });
     * @example
     * // start hit testing using an arbitrary ray
     * const ray = new pc.Ray(new pc.Vec3(0, 0, 0), new pc.Vec3(0, -1, 0));
     * app.xr.hitTest.start({
     *     spaceType: pc.XRSPACE_LOCAL,
     *     offsetRay: ray,
     *     callback: (err, hitTestSource) => {
     *         // hit test source that will sample real world geometry straight down
     *         // from the position where AR session started
     *     }
     * });
     * @example
     * // start hit testing for touch screen taps
     * app.xr.hitTest.start({
     *     profile: 'generic-touchscreen',
     *     callback: (err, hitTestSource) => {
     *         if (err) return;
     *         hitTestSource.on('result', (position, rotation, inputSource) => {
     *             // position and rotation of hit test result
     *             // that will be created from touch on mobile devices
     *         });
     *     }
     * });
     */
    start(options?: {
        spaceType?: string;
        profile?: string;
        entityTypes?: string[];
        offsetRay?: Ray;
        callback?: XrHitTestStartCallback;
    }): void;
    /**
     * @param {XRHitTestSource} xrHitTestSource - Hit test source.
     * @param {boolean} transient - True if hit test source is created from transient input source.
     * @param {XrInputSource|null} inputSource - Input Source with which hit test source is associated with.
     * @param {Function} callback - Callback called once hit test source is created.
     * @private
     */
    private _onHitTestSource;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * True if AR Hit Test is supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if Hit Test is available. This information is available only when the session has started.
     *
     * @type {boolean}
     */
    get available(): boolean;
}

/**
 * The tracked image interface that is created by the Image Tracking system and is provided as a
 * list from {@link XrImageTracking#images}. It contains information about the tracking state as
 * well as the position and rotation of the tracked image.
 *
 * @category XR
 */
declare class XrTrackedImage extends EventHandler {
    /**
     * Fired when image becomes actively tracked.
     *
     * @event
     * @example
     * trackedImage.on('tracked', () => {
     *     console.log('Image is now tracked');
     * });
     */
    static EVENT_TRACKED: string;
    /**
     * Fired when image is no longer actively tracked.
     *
     * @event
     * @example
     * trackedImage.on('untracked', () => {
     *     console.log('Image is no longer tracked');
     * });
     */
    static EVENT_UNTRACKED: string;
    /**
     * Create a new XrTrackedImage instance.
     *
     * @param {HTMLCanvasElement|HTMLImageElement|SVGImageElement|HTMLVideoElement|Blob|ImageData|ImageBitmap} image - Image
     * that is matching the real world image as closely as possible. Resolution of images should be
     * at least 300x300. High resolution does NOT improve tracking performance. Color of image is
     * irrelevant, so grayscale images can be used. Images with too many geometric features or
     * repeating patterns will reduce tracking stability.
     * @param {number} width - Width (in meters) of image in real world. Providing this value as
     * close to the real value will improve tracking quality.
     * @ignore
     */
    constructor(image: HTMLCanvasElement | HTMLImageElement | SVGImageElement | HTMLVideoElement | Blob | ImageData | ImageBitmap, width: number);
    /**
     * @type {HTMLCanvasElement|HTMLImageElement|SVGImageElement|HTMLVideoElement|Blob|ImageData|ImageBitmap}
     * @private
     */
    private _image;
    /**
     * @type {number}
     * @private
     */
    private _width;
    /**
     * @type {ImageBitmap|null}
     * @private
     */
    private _bitmap;
    /**
     * @type {number}
     * @ignore
     */
    _measuredWidth: number;
    /**
     * @type {boolean}
     * @private
     */
    private _trackable;
    /**
     * @type {boolean}
     * @private
     */
    private _tracking;
    /**
     * @type {boolean}
     * @private
     */
    private _emulated;
    /**
     * @type {XRPose|null}
     * @ignore
     */
    _pose: XRPose | null;
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /**
     * Image that is used for tracking.
     *
     * @type {HTMLCanvasElement|HTMLImageElement|SVGImageElement|HTMLVideoElement|Blob|ImageData|ImageBitmap}
     */
    get image(): HTMLCanvasElement | HTMLImageElement | SVGImageElement | HTMLVideoElement | Blob | ImageData | ImageBitmap;
    /**
     * Width that is provided to assist tracking performance. This property can be updated only
     * when the AR session is not running.
     *
     * @type {number}
     */
    set width(value: number);
    /**
     * Get the width (in meters) of image in real world.
     *
     * @type {number}
     */
    get width(): number;
    /**
     * True if image is trackable. A too small resolution or invalid images can be untrackable by
     * the underlying AR system.
     *
     * @type {boolean}
     */
    get trackable(): boolean;
    /**
     * True if image is in tracking state and being tracked in real world by the underlying AR
     * system.
     *
     * @type {boolean}
     */
    get tracking(): boolean;
    /**
     * True if image was recently tracked but currently is not actively tracked due to inability of
     * identifying the image by the underlying AR system. Position and rotation will be based on
     * the previously known transformation assuming the tracked image has not moved.
     *
     * @type {boolean}
     */
    get emulated(): boolean;
    /**
     * @returns {Promise<ImageBitmap>} Promise that resolves to an image bitmap.
     * @ignore
     */
    prepare(): Promise<ImageBitmap>;
    /**
     * Destroys the tracked image.
     *
     * @ignore
     */
    destroy(): void;
    /**
     * Get the world position of the tracked image.
     *
     * @returns {Vec3} Position in world space.
     * @example
     * // update entity position to match tracked image position
     * entity.setPosition(trackedImage.getPosition());
     */
    getPosition(): Vec3;
    /**
     * Get the world rotation of the tracked image.
     *
     * @returns {Quat} Rotation in world space.
     * @example
     * // update entity rotation to match tracked image rotation
     * entity.setRotation(trackedImage.getRotation());
     */
    getRotation(): Quat;
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * Image Tracking provides the ability to track real world images using provided image samples and
 * their estimated sizes. The underlying system will assume that the tracked image can move and
 * rotate in the real world and will try to provide transformation estimates and its tracking
 * state.
 *
 * @category XR
 */
declare class XrImageTracking extends EventHandler {
    /**
     * Fired when the XR session is started, but image tracking failed to process the provided
     * images. The handler is passed the Error object.
     *
     * @event
     * @example
     * app.xr.imageTracking.on('error', (err) => {
     *     console.error(err.message);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Create a new XrImageTracking instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {boolean}
     * @private
     */
    private _available;
    /**
     * @type {XrTrackedImage[]}
     * @private
     */
    private _images;
    /**
     * Add an image for image tracking. A width can also be provided to help the underlying system
     * estimate the appropriate transformation. Modifying the tracked images list is only possible
     * before an AR session is started.
     *
     * @param {HTMLCanvasElement|HTMLImageElement|SVGImageElement|HTMLVideoElement|Blob|ImageData|ImageBitmap} image -
     * Image that is matching real world image as close as possible. Resolution of images should be
     * at least 300x300. High resolution does _not_ improve tracking performance. The color of the
     * image is irrelevant, so grayscale images can be used. Images with too many geometric
     * features or repeating patterns will reduce tracking stability.
     * @param {number} width - Width (in meters) of image in the real world. Providing this value
     * as close to the real value will improve tracking quality.
     * @returns {XrTrackedImage|null} Tracked image object that will contain tracking information.
     * Returns null if image tracking is not supported or if the XR manager is not active.
     * @example
     * // image of a book cover that has width of 20cm (0.2m)
     * app.xr.imageTracking.add(bookCoverImg, 0.2);
     */
    add(image: HTMLCanvasElement | HTMLImageElement | SVGImageElement | HTMLVideoElement | Blob | ImageData | ImageBitmap, width: number): XrTrackedImage | null;
    /**
     * Remove an image from image tracking.
     *
     * @param {XrTrackedImage} trackedImage - Tracked image to be removed. Modifying the tracked
     * images list is only possible before an AR session is started.
     */
    remove(trackedImage: XrTrackedImage): void;
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * @param {Function} callback - Function to call when all images have been prepared as image
     * bitmaps.
     * @ignore
     */
    prepareImages(callback: Function): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * True if Image Tracking is supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if Image Tracking is available. This information is only available when the
     * XR session has started, and will be true if image tracking is supported and
     * images were provided and they have been processed successfully.
     *
     * @type {boolean}
     */
    get available(): boolean;
    /**
     * List of {@link XrTrackedImage} that contain tracking information.
     *
     * @type {XrTrackedImage[]}
     */
    get images(): XrTrackedImage[];
}

/**
 * Represents a detected plane in the real world, providing its position, rotation, polygon points,
 * and semantic label. The plane data may change over time as the system updates its understanding
 * of the environment. Instances of this class are created and managed by the
 * {@link XrPlaneDetection} system.
 *
 * @category XR
 */
declare class XrPlane extends EventHandler {
    /**
     * Fired when an {@link XrPlane} is removed.
     *
     * @event
     * @example
     * plane.once('remove', () => {
     *     // plane is not available anymore
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when {@link XrPlane} attributes such as: orientation and/or points have been changed.
     * Position and rotation can change at any time without triggering a `change` event.
     *
     * @event
     * @example
     * plane.on('change', () -> {
     *     // plane has been changed
     * });
     */
    static EVENT_CHANGE: string;
    /**
     * Create a new XrPlane instance.
     *
     * @param {XrPlaneDetection} planeDetection - Plane detection system.
     * @param {*} xrPlane - XRPlane that is instantiated by WebXR system.
     * @ignore
     */
    constructor(planeDetection: XrPlaneDetection, xrPlane: any);
    /**
     * @type {number}
     * @private
     */
    private _id;
    /**
     * @type {XrPlaneDetection}
     * @private
     */
    private _planeDetection;
    /**
     * @type {XRPlane}
     * @private
     */
    private _xrPlane;
    /**
     * @type {number}
     * @private
     */
    private _lastChangedTime;
    /**
     * @type {"horizontal"|"vertical"|null}
     * @private
     */
    private _orientation;
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /** @ignore */
    destroy(): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * Get the world space position of a plane.
     *
     * @returns {Vec3} The world space position of a plane.
     */
    getPosition(): Vec3;
    /**
     * Get the world space rotation of a plane.
     *
     * @returns {Quat} The world space rotation of a plane.
     */
    getRotation(): Quat;
    /**
     * Unique identifier of a plane.
     *
     * @type {number}
     */
    get id(): number;
    /**
     * Gets the plane's specific orientation. This can be "horizontal" for planes that are parallel
     * to the ground, "vertical" for planes that are perpendicular to the ground, or `null` if the
     * orientation is different or unknown.
     *
     * @type {"horizontal"|"vertical"|null}
     * @example
     * if (plane.orientation === 'horizontal') {
     *     console.log('This plane is horizontal.');
     * } else if (plane.orientation === 'vertical') {
     *     console.log('This plane is vertical.');
     * } else {
     *     console.log('Orientation of this plane is unknown or different.');
     * }
     */
    get orientation(): "horizontal" | "vertical" | null;
    /**
     * Gets the array of points that define the polygon of the plane in its local coordinate space.
     * Each point is represented as a `DOMPointReadOnly` object with `x`, `y`, and `z` properties.
     * These points can be transformed to world coordinates using the plane's position and
     * rotation.
     *
     * @type {DOMPointReadOnly[]}
     * @example
     * // prepare reusable objects
     * const transform = new pc.Mat4();
     * const vecA = new pc.Vec3();
     * const vecB = new pc.Vec3();
     *
     * // update Mat4 to plane position and rotation
     * transform.setTRS(plane.getPosition(), plane.getRotation(), pc.Vec3.ONE);
     *
     * // draw lines between points
     * for (let i = 0; i < plane.points.length; i++) {
     *     vecA.copy(plane.points[i]);
     *     vecB.copy(plane.points[(i + 1) % plane.points.length]);
     *
     *     // transform points to world space
     *     transform.transformPoint(vecA, vecA);
     *     transform.transformPoint(vecB, vecB);
     *
     *     // render line
     *     app.drawLine(vecA, vecB, pc.Color.WHITE);
     * }
     */
    get points(): DOMPointReadOnly[];
    /**
     * Gets the semantic label of the plane provided by the underlying system. The label describes
     * the type of surface the plane represents, such as "floor", "wall", "ceiling", etc. The list
     * of possible labels can be found in the [semantic labels repository](https://github.com/immersive-web/semantic-labels).
     *
     * @type {string}
     * @example
     * if (plane.label === 'floor') {
     *     console.log('This plane represents the floor.');
     * } else if (plane.label === 'wall') {
     *     console.log('This plane represents a wall.');
     * }
     */
    get label(): string;
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * Plane Detection provides the ability to detect real world surfaces based on estimations of the
 * underlying AR system.
 *
 * ```javascript
 * // start session with plane detection enabled
 * app.xr.start(camera, pc.XRTYPE_VR, pc.XRSPACE_LOCALFLOOR, {
 *     planeDetection: true
 * });
 * ```
 *
 * ```javascript
 * app.xr.planeDetection.on('add', (plane) => {
 *     // new plane been added
 * });
 * ```
 *
 * @category XR
 */
declare class XrPlaneDetection extends EventHandler {
    /**
     * Fired when plane detection becomes available.
     *
     * @event
     * @example
     * app.xr.planeDetection.on('available', () => {
     *     console.log('Plane detection is available');
     * });
     */
    static EVENT_AVAILABLE: string;
    /**
     * Fired when plane detection becomes unavailable.
     *
     * @event
     * @example
     * app.xr.planeDetection.on('unavailable', () => {
     *     console.log('Plane detection is unavailable');
     * });
     */
    static EVENT_UNAVAILABLE: string;
    /**
     * Fired when new {@link XrPlane} is added to the list. The handler is passed the
     * {@link XrPlane} instance that has been added.
     *
     * @event
     * @example
     * app.xr.planeDetection.on('add', (plane) => {
     *     // new plane is added
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when a {@link XrPlane} is removed from the list. The handler is passed the
     * {@link XrPlane} instance that has been removed.
     *
     * @event
     * @example
     * app.xr.planeDetection.on('remove', (plane) => {
     *     // new plane is removed
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Create a new XrPlaneDetection instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {boolean}
     * @private
     */
    private _available;
    /**
     * @type {Map<XRPlane, XrPlane>}
     * @private
     */
    private _planesIndex;
    /**
     * @type {XrPlane[]}
     * @private
     */
    private _planes;
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * True if Plane Detection is supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if Plane Detection is available. This information is available only when the session has started.
     *
     * @type {boolean}
     */
    get available(): boolean;
    /**
     * Array of {@link XrPlane} instances that contain individual plane information.
     *
     * @type {XrPlane[]}
     */
    get planes(): XrPlane[];
}

/**
 * @import { XrMeshDetection } from './xr-mesh-detection.js'
 */
/**
 * Detected Mesh instance that provides its transform (position, rotation), triangles (vertices,
 * indices) and its semantic label. Any of its properties can change during its lifetime.
 *
 * @category XR
 */
declare class XrMesh extends EventHandler {
    /**
     * Fired when an {@link XrMesh} is removed.
     *
     * @event
     * @example
     * mesh.once('remove', () => {
     *     // mesh is no longer available
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when {@link XrMesh} attributes such as vertices, indices and/or label have been
     * changed. Position and rotation can change at any time without triggering a `change` event.
     *
     * @event
     * @example
     * mesh.on('change', () => {
     *     // mesh attributes have been changed
     * });
     */
    static EVENT_CHANGE: string;
    /**
     * Create a new XrMesh instance.
     *
     * @param {XrMeshDetection} meshDetection - Mesh Detection
     * interface.
     * @param {XRMesh} xrMesh - XRMesh that is instantiated by WebXR system.
     * @ignore
     */
    constructor(meshDetection: XrMeshDetection, xrMesh: XRMesh);
    /**
     * @type {XrMeshDetection}
     * @private
     */
    private _meshDetection;
    /**
     * @type {XRMesh}
     * @private
     */
    private _xrMesh;
    /**
     * @type {number}
     * @private
     */
    private _lastChanged;
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /**
     * @type {XRMesh}
     * @ignore
     */
    get xrMesh(): XRMesh;
    /**
     * Semantic Label of a mesh that is provided by underlying system. Current list includes (but
     * not limited to): https://github.com/immersive-web/semantic-labels/blob/master/labels.json
     *
     * @type {string}
     */
    get label(): string;
    /**
     * Array of mesh vertices. This array contains 3 components per vertex (`x, y, z`).
     *
     * @type {Float32Array}
     */
    get vertices(): Float32Array;
    /**
     * Array of mesh indices.
     *
     * @type {Uint32Array}
     */
    get indices(): Uint32Array;
    /** @ignore */
    destroy(): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * Get the world space position of a mesh.
     *
     * @returns {Vec3} The world space position of a mesh.
     */
    getPosition(): Vec3;
    /**
     * Get the world space rotation of a mesh.
     *
     * @returns {Quat} The world space rotation of a mesh.
     */
    getRotation(): Quat;
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * Mesh Detection provides the ability to detect real world meshes based on the
 * scanning and reconstruction by the underlying AR system.
 *
 * ```javascript
 * // start session with plane detection enabled
 * app.xr.start(camera, pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, {
 *     meshDetection: true
 * });
 * ```
 *
 * ```javascript
 * app.xr.meshDetection.on('add', (mesh) => {
 *     // new mesh been added
 * });
 * ```
 *
 * @category XR
 */
declare class XrMeshDetection extends EventHandler {
    /**
     * Fired when mesh detection becomes available.
     *
     * @event
     * @example
     * app.xr.meshDetection.on('available', () => {
     *     console.log('Mesh detection is available');
     * });
     */
    static EVENT_AVAILABLE: string;
    /**
     * Fired when mesh detection becomes unavailable.
     *
     * @event
     * @example
     * app.xr.meshDetection.on('unavailable', () => {
     *     console.log('Mesh detection is unavailable');
     * });
     */
    static EVENT_UNAVAILABLE: string;
    /**
     * Fired when new {@link XrMesh} is added to the list. The handler is passed the {@link XrMesh}
     * instance that has been added.
     *
     * @event
     * @example
     * app.xr.meshDetection.on('add', (mesh) => {
     *     // a new XrMesh has been added
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when a {@link XrMesh} is removed from the list. The handler is passed the
     * {@link XrMesh} instance that has been removed.
     *
     * @event
     * @example
     * app.xr.meshDetection.on('remove', (mesh) => {
     *     // XrMesh has been removed
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Create a new XrMeshDetection instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {boolean}
     * @private
     */
    private _available;
    /**
     * @type {Map<XRMesh, XrMesh>}
     * @private
     */
    private _index;
    /**
     * @type {XrMesh[]}
     * @private
     */
    private _list;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * @param {XrMesh} mesh - XrMesh to remove.
     * @private
     */
    private _removeMesh;
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * True if Mesh Detection is supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if Mesh Detection is available. This information is available only when session has started.
     *
     * @type {boolean}
     */
    get available(): boolean;
    /**
     * Array of {@link XrMesh} instances that contain transform, vertices and label information.
     *
     * @type {XrMesh[]}
     */
    get meshes(): XrMesh[];
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * Provides access to input sources for WebXR.
 *
 * Input sources represent:
 *
 * - hand held controllers - and their optional capabilities: gamepad and vibration
 * - hands - with their individual joints
 * - transient sources - such as touch screen taps and voice commands
 *
 * @category XR
 */
declare class XrInput extends EventHandler {
    /**
     * Fired when a new {@link XrInputSource} is added to the list. The handler is passed the
     * {@link XrInputSource} that has been added.
     *
     * @event
     * @example
     * app.xr.input.on('add', (inputSource) => {
     *     // new input source is added
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when an {@link XrInputSource} is removed from the list. The handler is passed the
     * {@link XrInputSource} that has been removed.
     *
     * @event
     * @example
     * app.xr.input.on('remove', (inputSource) => {
     *     // input source is removed
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when {@link XrInputSource} has triggered primary action. This could be pressing a
     * trigger button, or touching a screen. The handler is passed the {@link XrInputSource} that
     * triggered the `select` event and the XRInputSourceEvent event from the WebXR API.
     *
     * @event
     * @example
     * const ray = new pc.Ray();
     * app.xr.input.on('select', (inputSource, evt) => {
     *     ray.set(inputSource.getOrigin(), inputSource.getDirection());
     *     if (obj.intersectsRay(ray)) {
     *         // selected an object with input source
     *     }
     * });
     */
    static EVENT_SELECT: string;
    /**
     * Fired when {@link XrInputSource} has started to trigger primary action. The handler is
     * passed the {@link XrInputSource} that triggered the `selectstart` event and the
     * XRInputSourceEvent event from the WebXR API.
     *
     * @event
     * @example
     * app.xr.input.on('selectstart', (inputSource, evt) => {
     *     console.log('Select started');
     * });
     */
    static EVENT_SELECTSTART: string;
    /**
     * Fired when {@link XrInputSource} has ended triggering primary action. The handler is passed
     * the {@link XrInputSource} that triggered the `selectend` event and the XRInputSourceEvent
     * event from the WebXR API.
     *
     * @event
     * @example
     * app.xr.input.on('selectend', (inputSource, evt) => {
     *     console.log('Select ended');
     * });
     */
    static EVENT_SELECTEND: string;
    /**
     * Fired when {@link XrInputSource} has triggered squeeze action. This is associated with
     * "grabbing" action on the controllers. The handler is passed the {@link XrInputSource} that
     * triggered the `squeeze` event and the XRInputSourceEvent event from the WebXR API.
     *
     * @event
     * @example
     * app.xr.input.on('squeeze', (inputSource, evt) => {
     *     console.log('Squeeze');
     * });
     */
    static EVENT_SQUEEZE: string;
    /**
     * Fired when {@link XrInputSource} has started to trigger squeeze action. The handler is
     * passed the {@link XrInputSource} that triggered the `squeezestart` event and the
     * XRInputSourceEvent event from the WebXR API.
     *
     * @event
     * @example
     * app.xr.input.on('squeezestart', (inputSource, evt) => {
     *     if (obj.containsPoint(inputSource.getPosition())) {
     *         // grabbed an object
     *     }
     * });
     */
    static EVENT_SQUEEZESTART: string;
    /**
     * Fired when {@link XrInputSource} has ended triggering squeeze action. The handler is passed
     * the {@link XrInputSource} that triggered the `squeezeend` event and the XRInputSourceEvent
     * event from the WebXR API.
     *
     * @event
     * @example
     * app.xr.input.on('squeezeend', (inputSource, evt) => {
     *     console.log('Squeeze ended');
     * });
     */
    static EVENT_SQUEEZEEND: string;
    /**
     * Create a new XrInput instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private manager;
    /**
     * @type {XrInputSource[]}
     * @private
     */
    private _inputSources;
    /**
     * @type {Function}
     * @private
     */
    private _onInputSourcesChangeEvt;
    /**
     * @type {boolean}
     * @ignore
     */
    velocitiesSupported: boolean;
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * @param {XRInputSourcesChangeEvent} evt - WebXR input sources change event.
     * @private
     */
    private _onInputSourcesChange;
    /**
     * @param {XRInputSource} xrInputSource - Input source to search for.
     * @returns {XrInputSource|null} The input source that matches the given WebXR input source or
     * null if no match is found.
     * @private
     */
    private _getByInputSource;
    /**
     * @param {XRInputSource} xrInputSource - Input source to add.
     * @private
     */
    private _addInputSource;
    /**
     * @param {XRInputSource} xrInputSource - Input source to remove.
     * @private
     */
    private _removeInputSource;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * List of active {@link XrInputSource} instances.
     *
     * @type {XrInputSource[]}
     */
    get inputSources(): XrInputSource[];
}

/**
 * Light Estimation provides illumination data from the real world, which is estimated by the
 * underlying AR system. It provides a reflection Cube Map, that represents the reflection
 * estimation from the viewer position. A more simplified approximation of light is provided by L2
 * Spherical Harmonics data. And the most simple level of light estimation is the most prominent
 * directional light, its rotation, intensity and color.
 *
 * @category XR
 */
declare class XrLightEstimation extends EventHandler {
    /**
     * Fired when light estimation data becomes available.
     *
     * @event
     * @example
     * app.xr.lightEstimation.on('available', () => {
     *     console.log('Light estimation is available');
     * });
     */
    static EVENT_AVAILABLE: string;
    /**
     * Fired when light estimation has failed to start. The handler is passed the Error object
     * related to failure of light estimation start.
     *
     * @event
     * @example
     * app.xr.lightEstimation.on('error', (error) => {
     *     console.error(error.message);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Create a new XrLightEstimation instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {boolean}
     * @private
     */
    private _available;
    /**
     * @type {boolean}
     * @private
     */
    private _lightProbeRequested;
    /**
     * @type {XRLightProbe|null}
     * @private
     */
    private _lightProbe;
    /**
     * @type {number}
     * @private
     */
    private _intensity;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /**
     * @type {Color}
     * @private
     */
    private _color;
    /**
     * @type {Float32Array}
     * @private
     */
    private _sphericalHarmonics;
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * Start estimation of illumination data. Availability of such data will come later and an
     * `available` event will be fired. If it failed to start estimation, an `error` event will be
     * fired.
     *
     * @example
     * app.xr.on('start', () => {
     *     if (app.xr.lightEstimation.supported) {
     *         app.xr.lightEstimation.start();
     *     }
     * });
     */
    start(): void;
    /**
     * End estimation of illumination data.
     */
    end(): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * True if Light Estimation is supported. This information is available only during an active AR
     * session.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if estimated light information is available.
     *
     * @type {boolean}
     * @example
     * if (app.xr.lightEstimation.available) {
     *     entity.light.intensity = app.xr.lightEstimation.intensity;
     * }
     */
    get available(): boolean;
    /**
     * Intensity of what is estimated to be the most prominent directional light. Or null if data
     * is not available.
     *
     * @type {number|null}
     */
    get intensity(): number | null;
    /**
     * Color of what is estimated to be the most prominent directional light. Or null if data is
     * not available.
     *
     * @type {Color|null}
     */
    get color(): Color | null;
    /**
     * Rotation of what is estimated to be the most prominent directional light. Or null if data is
     * not available.
     *
     * @type {Quat|null}
     */
    get rotation(): Quat | null;
    /**
     * Spherical harmonic coefficients of estimated ambient light. Or null if data is not available.
     *
     * @type {Float32Array|null}
     */
    get sphericalHarmonics(): Float32Array | null;
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * Represents an XR View which represents a screen (monoscopic scenario such as a mobile phone) or an eye
 * (stereoscopic scenario such as an HMD context). It provides access to the view's color and depth information
 * based on the capabilities of underlying AR system.
 *
 * @category XR
 */
declare class XrView extends EventHandler {
    /**
     * Fired when the depth sensing texture been resized. The {@link XrView#depthUvMatrix} needs
     * to be updated for relevant shaders. The handler is passed the new width and height of the
     * depth texture in pixels.
     *
     * @event
     * @example
     * view.on('depth:resize', () => {
     *     material.setParameter('matrix_depth_uv', view.depthUvMatrix);
     * });
     */
    static EVENT_DEPTHRESIZE: string;
    /**
     * Create a new XrView instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @param {XRView} xrView - XRView object that is created by WebXR API.
     * @param {number} viewsCount - Number of views available for the session.
     * @ignore
     */
    constructor(manager: XrManager, xrView: XRView, viewsCount: number);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {XRView}
     * @private
     */
    private _xrView;
    /**
     * @type {Float32Array}
     * @private
     */
    private _positionData;
    /**
     * @type {Vec4}
     * @private
     */
    private _viewport;
    /**
     * @type {Mat4}
     * @private
     */
    private _projMat;
    /**
     * @type {Mat4}
     * @private
     */
    private _projViewOffMat;
    /**
     * @type {Mat4}
     * @private
     */
    private _viewMat;
    /**
     * @type {Mat4}
     * @private
     */
    private _viewOffMat;
    /**
     * @type {Mat3}
     * @private
     */
    private _viewMat3;
    /**
     * @type {Mat4}
     * @private
     */
    private _viewInvMat;
    /**
     * @type {Mat4}
     * @private
     */
    private _viewInvOffMat;
    /**
     * @type {XRCamera}
     * @private
     */
    private _xrCamera;
    /**
     * @type {Texture|null}
     * @private
     */
    private _textureColor;
    /**
     * @type {Texture|null}
     * @private
     */
    private _textureDepth;
    /**
     * @type {XRDepthInformation|null}
     * @private
     */
    private _depthInfo;
    /**
     * @type {Uint8Array}
     * @private
     */
    private _emptyDepthBuffer;
    /**
     * @type {Mat4}
     * @private
     */
    private _depthMatrix;
    /**
     * Texture associated with this view's camera color. Equals to null if camera color is
     * not available or is not supported.
     *
     * @type {Texture|null}
     */
    get textureColor(): Texture | null;
    /**
     * Texture that contains packed depth information which is reconstructed using the underlying
     * AR system. This texture can be used (not limited to) for reconstructing real world
     * geometry, virtual object placement, occlusion of virtual object by the real world geometry,
     * and more.
     * The format of this texture is any of {@link PIXELFORMAT_LA8}, {@link PIXELFORMAT_DEPTH}, or
     * {@link PIXELFORMAT_R32F} based on {@link XrViews#depthPixelFormat}. It is UV transformed
     * based on the underlying AR system which can be normalized using {@link XrView#depthUvMatrix}.
     * Equals to null if camera depth is not supported.
     *
     * @type {Texture|null}
     * @example
     * // GPU path, attaching texture to material
     * material.setParameter('texture_depthSensingMap', view.textureDepth);
     * material.setParameter('matrix_depth_uv', view.depthUvMatrix.data);
     * material.setParameter('depth_to_meters', view.depthValueToMeters);
     * @example
     * // GLSL shader to unpack depth texture
     * // when depth information is provided in form of LA8
     * varying vec2 vUv0;
     *
     * uniform sampler2D texture_depthSensingMap;
     * uniform mat4 matrix_depth_uv;
     * uniform float depth_to_meters;
     *
     * void main(void) {
     *     // transform UVs using depth matrix
     *     vec2 texCoord = (matrix_depth_uv * vec4(vUv0.xy, 0.0, 1.0)).xy;
     *
     *     // get luminance alpha components from depth texture
     *     vec2 packedDepth = texture2D(texture_depthSensingMap, texCoord).ra;
     *
     *     // unpack into single value in millimeters
     *     float depth = dot(packedDepth, vec2(255.0, 256.0 * 255.0)) * depth_to_meters; // m
     *
     *     // normalize: 0m to 8m distance
     *     depth = min(depth / 8.0, 1.0); // 0..1 = 0m..8m
     *
     *     // paint scene from black to white based on distance
     *     gl_FragColor = vec4(depth, depth, depth, 1.0);
     * }
     */
    get textureDepth(): Texture | null;
    /**
     * 4x4 matrix that should be used to transform depth texture UVs to normalized UVs in a shader.
     * It is updated when the depth texture is resized. Refer to {@link EVENT_DEPTHRESIZE}.
     *
     * @type {Mat4}
     * @example
     * material.setParameter('matrix_depth_uv', view.depthUvMatrix.data);
     */
    get depthUvMatrix(): Mat4;
    /**
     * Multiply this coefficient number by raw depth value to get depth in meters.
     *
     * @type {number}
     * @example
     * material.setParameter('depth_to_meters', view.depthValueToMeters);
     */
    get depthValueToMeters(): number;
    /**
     * An eye with which this view is associated. Can be any of:
     *
     * - {@link XREYE_NONE}: None - inidcates a monoscopic view (likely mobile phone screen).
     * - {@link XREYE_LEFT}: Left - indicates left eye view.
     * - {@link XREYE_RIGHT}: Right - indicates a right eye view.
     *
     * @type {string}
     */
    get eye(): string;
    /**
     * A Vec4 (x, y, width, height) that represents a view's viewport. For a monoscopic screen,
     * it will define fullscreen view. But for stereoscopic views (left/right eye), it will define
     * a part of a whole screen that view is occupying.
     *
     * @type {Vec4}
     */
    get viewport(): Vec4;
    /**
     * @type {Mat4}
     * @ignore
     */
    get projMat(): Mat4;
    /**
     * @type {Mat4}
     * @ignore
     */
    get projViewOffMat(): Mat4;
    /**
     * @type {Mat4}
     * @ignore
     */
    get viewOffMat(): Mat4;
    /**
     * @type {Mat4}
     * @ignore
     */
    get viewInvOffMat(): Mat4;
    /**
     * @type {Mat3}
     * @ignore
     */
    get viewMat3(): Mat3;
    /**
     * @type {Float32Array}
     * @ignore
     */
    get positionData(): Float32Array;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @param {XRView} xrView - XRView from WebXR API.
     * @ignore
     */
    update(frame: XRFrame, xrView: XRView): void;
    /**
     * @private
     */
    private _updateTextureColor;
    _frameBufferSource: any;
    _frameBuffer: any;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @private
     */
    private _updateDepth;
    /**
     * @param {Mat4|null} transform - World Transform of a parents GraphNode.
     * @ignore
     */
    updateTransforms(transform: Mat4 | null): void;
    _onDeviceLost(): void;
    /**
     * Get a depth value from depth information in meters. The specified UV is in the range 0..1,
     * with the origin in the top-left corner of the depth texture.
     *
     * @param {number} u - U coordinate of pixel in depth texture, which is in range from 0.0 to
     * 1.0 (left to right).
     * @param {number} v - V coordinate of pixel in depth texture, which is in range from 0.0 to
     * 1.0 (top to bottom).
     * @returns {number|null} Depth in meters or null if depth information is currently not
     * available.
     * @example
     * const depth = view.getDepth(u, v);
     * if (depth !== null) {
     *     // depth in meters
     * }
     */
    getDepth(u: number, v: number): number | null;
    /** @ignore */
    destroy(): void;
}

/**
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * Provides access to list of {@link XrView}s and information about their capabilities, such as
 * support and availability of view's camera color texture, depth texture and other parameters.
 *
 * @category XR
 */
declare class XrViews extends EventHandler {
    /**
     * Fired when a view has been added. Views are not available straight away on session start and
     * are added mid-session. They can be added/removed mid session by the underlying system. The
     * handler is passed the {@link XrView} that has been added.
     *
     * @event
     * @example
     * xr.views.on('add', (view) => {
     *     console.log('View added');
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when a view has been removed. They can be added/removed mid session by the underlying
     * system. The handler is passed the {@link XrView} that has been removed.
     *
     * @event
     * @example
     * xr.views.on('remove', (view) => {
     *     console.log('View removed');
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Create a new XrViews instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @private
     */
    private _manager;
    /**
     * @type {Map<string,XrView>}
     * @private
     */
    private _index;
    /**
     * @type {Map<string,XrView>}
     * @private
     */
    private _indexTmp;
    /**
     * @type {XrView[]}
     * @private
     */
    private _list;
    /**
     * @type {boolean}
     * @private
     */
    private _supportedColor;
    /**
     * @type {boolean}
     * @private
     */
    private _supportedDepth;
    /**
     * @type {boolean}
     * @private
     */
    private _availableColor;
    /**
     * @type {boolean}
     * @private
     */
    private _availableDepth;
    /**
     * @type {string}
     * @private
     */
    private _depthUsage;
    /**
     * @type {string}
     * @private
     */
    private _depthFormat;
    /**
     * @type {object}
     * @private
     */
    private _depthFormats;
    /**
     * An array of {@link XrView}s of this session. Views are not available straight away on
     * session start, and can be added/removed mid-session. So use of `add`/`remove` events is
     * required for accessing views.
     *
     * @type {XrView[]}
     */
    get list(): XrView[];
    /**
     * Check if Camera Color is supported. It might be still unavailable even if requested,
     * based on hardware capabilities and granted permissions.
     *
     * @type {boolean}
     */
    get supportedColor(): boolean;
    /**
     * Check if Camera Depth is supported. It might be still unavailable even if requested,
     * based on hardware capabilities and granted permissions.
     *
     * @type {boolean}
     */
    get supportedDepth(): boolean;
    /**
     * Check if Camera Color is available. This information becomes available only after
     * session has started.
     *
     * @type {boolean}
     */
    get availableColor(): boolean;
    /**
     * Check if Camera Depth is available. This information becomes available only after
     * session has started.
     *
     * @type {boolean}
     */
    get availableDepth(): boolean;
    /**
     * @type {string}
     * @ignore
     */
    get depthUsage(): string;
    /**
     * Whether the depth sensing is GPU optimized.
     *
     * @type {boolean}
     */
    get depthGpuOptimized(): boolean;
    /**
     * @type {string}
     * @ignore
     */
    get depthFormat(): string;
    /**
     * The depth sensing pixel format. Can be:
     *
     * - {@link PIXELFORMAT_LA8}
     * - {@link PIXELFORMAT_R32F}
     *
     * @type {PIXELFORMAT_LA8|PIXELFORMAT_R32F|null}
     */
    get depthPixelFormat(): 2 | 15 | null;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @param {XRView} xrView - XRView from WebXR API.
     * @ignore
     */
    update(frame: XRFrame, xrViews: any): void;
    /**
     * Get an {@link XrView} by its associated eye constant.
     *
     * @param {string} eye - An XREYE_* view is associated with. Can be 'none' for monoscope views.
     * @returns {XrView|null} View or null if view of such eye is not available.
     */
    get(eye: string): XrView | null;
    /**
     * @private
     */
    private _onSessionStart;
    /**
     * @private
     */
    private _onSessionEnd;
}

/**
 * Callback used by {@link XrAnchor#persist}.
 */
type XrAnchorPersistCallback = (err: Error | null, uuid: string | null) => void;
/**
 * Callback used by {@link XrAnchor#forget}.
 */
type XrAnchorForgetCallback = (err: Error | null) => void;
/**
 * @import { XrAnchors } from './xr-anchors.js'
 */
/**
 * @callback XrAnchorPersistCallback
 * Callback used by {@link XrAnchor#persist}.
 * @param {Error|null} err - The Error object if failed to persist an anchor or null.
 * @param {string|null} uuid - Unique string that can be used to restore an {@link XrAnchor} in
 * another session.
 * @returns {void}
 */
/**
 * @callback XrAnchorForgetCallback
 * Callback used by {@link XrAnchor#forget}.
 * @param {Error|null} err - The Error object if failed to forget an {@link XrAnchor} or null if
 * succeeded.
 * @returns {void}
 */
/**
 * An anchor keeps track of a position and rotation that is fixed relative to the real world. This
 * allows the application to adjust the location of virtual objects placed in the scene in a way
 * that helps with maintaining the illusion that the placed objects are really present in the
 * user's environment.
 *
 * @category XR
 */
declare class XrAnchor extends EventHandler {
    /**
     * Fired when an anchor is destroyed.
     *
     * @event
     * @example
     * // once anchor is destroyed
     * anchor.once('destroy', () => {
     *     // destroy its related entity
     *     entity.destroy();
     * });
     */
    static EVENT_DESTROY: string;
    /**
     * Fired when an anchor's position and/or rotation is changed.
     *
     * @event
     * @example
     * anchor.on('change', () => {
     *     // anchor has been updated
     *     entity.setPosition(anchor.getPosition());
     *     entity.setRotation(anchor.getRotation());
     * });
     */
    static EVENT_CHANGE: string;
    /**
     * Fired when an anchor has has been persisted. The handler is passed the UUID string that can
     * be used to restore this anchor.
     *
     * @event
     * @example
     * anchor.on('persist', (uuid) => {
     *     // anchor has been persisted
     * });
     */
    static EVENT_PERSIST: string;
    /**
     * Fired when an anchor has been forgotten.
     *
     * @event
     * @example
     * anchor.on('forget', () => {
     *     // anchor has been forgotten
     * });
     */
    static EVENT_FORGET: string;
    /**
     * @param {XrAnchors} anchors - Anchor manager.
     * @param {object} xrAnchor - Native XRAnchor object that is provided by WebXR API.
     * @param {string|null} uuid - ID string associated with a persistent anchor.
     * @ignore
     */
    constructor(anchors: XrAnchors, xrAnchor: object, uuid?: string | null);
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Quat}
     * @private
     */
    private _rotation;
    /**
     * @type {string|null}
     * @private
     */
    private _uuid;
    /**
     * @type {XrAnchorPersistCallback[]|null}
     * @private
     */
    private _uuidRequests;
    _anchors: XrAnchors;
    _xrAnchor: any;
    /**
     * Destroy an anchor.
     */
    destroy(): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * Get the world space position of an anchor.
     *
     * @returns {Vec3} The world space position of an anchor.
     */
    getPosition(): Vec3;
    /**
     * Get the world space rotation of an anchor.
     *
     * @returns {Quat} The world space rotation of an anchor.
     */
    getRotation(): Quat;
    /**
     * Persists the anchor between WebXR sessions by generating a universally unique identifier
     * (UUID) for the anchor. This UUID can be used later to restore the anchor from the underlying
     * system. Note that the underlying system may have a limit on the number of anchors that can
     * be persisted per origin.
     *
     * @param {XrAnchorPersistCallback} [callback] - Optional callback function to be called when
     * the persistent UUID has been generated or if an error occurs.
     * @example
     * // Persist the anchor and log the UUID or error
     * anchor.persist((err, uuid) => {
     *     if (err) {
     *         console.error('Failed to persist anchor:', err);
     *     } else {
     *         console.log('Anchor persisted with UUID:', uuid);
     *     }
     * });
     */
    persist(callback?: XrAnchorPersistCallback): void;
    /**
     * Removes the persistent UUID of an anchor from the underlying system. This effectively makes
     * the anchor non-persistent, so it will not be restored in future WebXR sessions.
     *
     * @param {XrAnchorForgetCallback} [callback] - Optional callback function to be called when
     * the anchor has been forgotten or if an error occurs.
     * @example
     * // Forget the anchor and log the result or error
     * anchor.forget((err) => {
     *     if (err) {
     *         console.error('Failed to forget anchor:', err);
     *     } else {
     *         console.log('Anchor has been forgotten');
     *     }
     * });
     */
    forget(callback?: XrAnchorForgetCallback): void;
    /**
     * Gets the UUID string of a persisted anchor or null if the anchor is not persisted.
     *
     * @type {null|string}
     */
    get uuid(): null | string;
    /**
     * Gets whether an anchor is persistent.
     *
     * @type {boolean}
     */
    get persistent(): boolean;
}

/**
 * Callback used by {@link XrAnchors#create}.
 */
type XrAnchorCreateCallback = (err: Error | null, anchor: XrAnchor | null) => void;
/**
 * @import { Quat } from '../../core/math/quat.js'
 * @import { Vec3 } from '../../core/math/vec3.js'
 * @import { XrAnchorForgetCallback } from './xr-anchor.js'
 * @import { XrManager } from './xr-manager.js'
 */
/**
 * @callback XrAnchorCreateCallback
 * Callback used by {@link XrAnchors#create}.
 * @param {Error|null} err - The Error object if failed to create an anchor or null.
 * @param {XrAnchor|null} anchor - The anchor that is tracked against real world geometry.
 * @returns {void}
 */
/**
 * Anchors provide an ability to specify a point in the world that needs to be updated to
 * correctly reflect the evolving understanding of the world by the underlying AR system,
 * such that the anchor remains aligned with the same place in the physical world.
 * Anchors tend to persist better relative to the real world, especially during a longer
 * session with lots of movement.
 *
 * ```javascript
 * app.xr.start(camera, pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, {
 *     anchors: true
 * });
 * ```
 *
 * @category XR
 */
declare class XrAnchors extends EventHandler {
    /**
     * Fired when anchors become available.
     *
     * @event
     * @example
     * app.xr.anchors.on('available', () => {
     *     console.log('Anchors are available');
     * });
     */
    static EVENT_AVAILABLE: string;
    /**
     * Fired when anchors become unavailable.
     *
     * @event
     * @example
     * app.xr.anchors.on('unavailable', () => {
     *     console.log('Anchors are unavailable');
     * });
     */
    static EVENT_UNAVAILABLE: string;
    /**
     * Fired when an anchor failed to be created. The handler is passed an Error object.
     *
     * @event
     * @example
     * app.xr.anchors.on('error', (err) => {
     *     console.error(err.message);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Fired when a new {@link XrAnchor} is added. The handler is passed the {@link XrAnchor} that
     * was added.
     *
     * @event
     * @example
     * app.xr.anchors.on('add', (anchor) => {
     *     console.log('Anchor added');
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when an {@link XrAnchor} is destroyed. The handler is passed the {@link XrAnchor} that
     * was destroyed.
     *
     * @event
     * @example
     * app.xr.anchors.on('destroy', (anchor) => {
     *     console.log('Anchor destroyed');
     * });
     */
    static EVENT_DESTROY: string;
    /**
     * Create a new XrAnchors instance.
     *
     * @param {XrManager} manager - WebXR Manager.
     * @ignore
     */
    constructor(manager: XrManager);
    /**
     * @type {XrManager}
     * @ignore
     */
    manager: XrManager;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {boolean}
     * @private
     */
    private _available;
    /**
     * @type {boolean}
     * @private
     */
    private _checkingAvailability;
    /**
     * @type {boolean}
     * @private
     */
    private _persistence;
    /**
     * List of anchor creation requests.
     *
     * @type {object[]}
     * @private
     */
    private _creationQueue;
    /**
     * Index of XrAnchors, with XRAnchor (native handle) used as a key.
     *
     * @type {Map<XRAnchor,XrAnchor>}
     * @private
     */
    private _index;
    /**
     * Index of XrAnchors, with UUID (persistent string) used as a key.
     *
     * @type {Map<string,XrAnchor>}
     * @private
     */
    private _indexByUuid;
    /**
     * @type {XrAnchor[]}
     * @private
     */
    private _list;
    /**
     * Map of callbacks to XRAnchors so that we can call its callback once an anchor is updated
     * with a pose for the first time.
     *
     * @type {Map<XrAnchor, XrAnchorCreateCallback>}
     * @private
     */
    private _callbacksAnchors;
    /** @private */
    private _onSessionStart;
    /** @private */
    private _onSessionEnd;
    /**
     * @param {XRAnchor} xrAnchor - XRAnchor that has been added.
     * @param {string|null} [uuid] - UUID string associated with persistent anchor.
     * @returns {XrAnchor} new instance of XrAnchor.
     * @private
     */
    private _createAnchor;
    /**
     * @param {XRAnchor} xrAnchor - XRAnchor that has been destroyed.
     * @param {XrAnchor} anchor - Anchor that has been destroyed.
     * @private
     */
    private _onAnchorDestroy;
    /**
     * Create an anchor using position and rotation, or from hit test result.
     *
     * @param {Vec3|XRHitTestResult} position - Position for an anchor or a hit test result.
     * @param {Quat|XrAnchorCreateCallback} [rotation] - Rotation for an anchor or a callback if
     * creating from a hit test result.
     * @param {XrAnchorCreateCallback} [callback] - Callback to fire when anchor was created or
     * failed to be created.
     * @example
     * // create an anchor using a position and rotation
     * app.xr.anchors.create(position, rotation, (err, anchor) => {
     *     if (!err) {
     *         // new anchor has been created
     *     }
     * });
     * @example
     * // create an anchor from a hit test result
     * hitTestSource.on('result', (position, rotation, inputSource, hitTestResult) => {
     *     app.xr.anchors.create(hitTestResult, (err, anchor) => {
     *         if (!err) {
     *             // new anchor has been created
     *         }
     *     });
     * });
     */
    create(position: Vec3 | XRHitTestResult, rotation?: Quat | XrAnchorCreateCallback, callback?: XrAnchorCreateCallback): void;
    /**
     * Restore anchor using persistent UUID.
     *
     * @param {string} uuid - UUID string associated with persistent anchor.
     * @param {XrAnchorCreateCallback} [callback] - Callback to fire when anchor was created or
     * failed to be created.
     * @example
     * // restore an anchor using uuid string
     * app.xr.anchors.restore(uuid, (err, anchor) => {
     *     if (!err) {
     *         // new anchor has been created
     *     }
     * });
     * @example
     * // restore all available persistent anchors
     * const uuids = app.xr.anchors.uuids;
     * for(let i = 0; i < uuids.length; i++) {
     *     app.xr.anchors.restore(uuids[i]);
     * }
     */
    restore(uuid: string, callback?: XrAnchorCreateCallback): void;
    /**
     * Forget an anchor by removing its UUID from underlying systems.
     *
     * @param {string} uuid - UUID string associated with persistent anchor.
     * @param {XrAnchorForgetCallback} [callback] - Callback to fire when anchor persistent data
     * was removed or error if failed.
     * @example
     * // forget all available anchors
     * const uuids = app.xr.anchors.uuids;
     * for (let i = 0; i < uuids.length; i++) {
     *     app.xr.anchors.forget(uuids[i]);
     * }
     */
    forget(uuid: string, callback?: XrAnchorForgetCallback): void;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @ignore
     */
    update(frame: XRFrame): void;
    /**
     * True if Anchors are supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if Anchors are available. This information is available only when session has started.
     *
     * @type {boolean}
     */
    get available(): boolean;
    /**
     * True if Anchors support persistence.
     *
     * @type {boolean}
     */
    get persistence(): boolean;
    /**
     * Array of UUID strings of persistent anchors, or null if not available.
     *
     * @type {null|string[]}
     */
    get uuids(): null | string[];
    /**
     * List of available {@link XrAnchor}s.
     *
     * @type {XrAnchor[]}
     */
    get list(): XrAnchor[];
}

/**
 * Callback used by {@link XrManager#start} and {@link XrManager#end}.
 */
type XrErrorCallback = (err: Error | null) => void;
/**
 * Callback used by {@link XrManager#initiateRoomCapture}.
 */
type XrRoomCaptureCallback = (err: Error | null) => void;
/**
 * @import { AppBase } from '../app-base.js'
 * @import { CameraComponent } from '../components/camera/component.js'
 * @import { Entity } from '../entity.js'
 */
/**
 * @callback XrErrorCallback
 * Callback used by {@link XrManager#start} and {@link XrManager#end}.
 * @param {Error|null} err - The Error object or null if operation was successful.
 * @returns {void}
 */
/**
 * @callback XrRoomCaptureCallback
 * Callback used by {@link XrManager#initiateRoomCapture}.
 * @param {Error|null} err - The Error object or null if manual room capture was successful.
 * @returns {void}
 */
/**
 * XrManager provides a comprehensive interface for WebXR integration in PlayCanvas applications.
 * It manages the full lifecycle of XR sessions (VR/AR), handles device capabilities, and provides
 * access to various XR features through specialized subsystems.
 *
 * In order for XR to be available, ensure that your application is served over HTTPS or localhost.
 *
 * The {@link AppBase} class automatically creates an instance of this class and makes it available
 * as {@link AppBase#xr}.
 *
 * @category XR
 */
declare class XrManager extends EventHandler {
    /**
     * Fired when availability of the XR type is changed. This event is available in two
     * forms. They are as follows:
     *
     * 1. `available` - Fired when availability of any XR type is changed. The handler is passed
     * the session type that has changed availability and a boolean representing the availability.
     * 2. `available:[type]` - Fired when availability of specific XR type is changed. The handler
     * is passed a boolean representing the availability.
     *
     * @event
     * @example
     * app.xr.on('available', (type, available) => {
     *     console.log(`XR type ${type} is now ${available ? 'available' : 'unavailable'}`);
     * });
     * @example
     * app.xr.on(`available:${pc.XRTYPE_VR}`, (available) => {
     *     console.log(`XR type VR is now ${available ? 'available' : 'unavailable'}`);
     * });
     */
    static EVENT_AVAILABLE: string;
    /**
     * Fired when XR session is started.
     *
     * @event
     * @example
     * app.xr.on('start', () => {
     *     // XR session has started
     * });
     */
    static EVENT_START: string;
    /**
     * Fired when XR session is ended.
     *
     * @event
     * @example
     * app.xr.on('end', () => {
     *     // XR session has ended
     * });
     */
    static EVENT_END: string;
    /**
     * Fired when XR session is updated, providing relevant XRFrame object. The handler is passed
     * [XRFrame](https://developer.mozilla.org/en-US/docs/Web/API/XRFrame) object that can be used
     * for interfacing directly with WebXR APIs.
     *
     * @event
     * @example
     * app.xr.on('update', (frame) => {
     *     console.log('XR frame updated');
     * });
     */
    static EVENT_UPDATE: string;
    /**
     * Fired when XR session is failed to start or failed to check for session type support. The handler
     * is passed the [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error)
     * object related to failure of session start or check of session type support.
     *
     * @event
     * @example
     * app.xr.on('error', (error) => {
     *     console.error(error.message);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Create a new XrManager instance.
     *
     * @param {AppBase} app - The main application.
     * @ignore
     */
    constructor(app: AppBase);
    /**
     * @type {AppBase}
     * @ignore
     */
    app: AppBase;
    /**
     * @type {boolean}
     * @private
     */
    private _supported;
    /**
     * @type {Object<string, boolean>}
     * @private
     */
    private _available;
    /**
     * @type {string|null}
     * @private
     */
    private _type;
    /**
     * @type {string|null}
     * @private
     */
    private _spaceType;
    /**
     * @type {XRSession|null}
     * @private
     */
    private _session;
    /**
     * @type {XRWebGLLayer|null}
     * @private
     */
    private _baseLayer;
    /**
     * @type {XRWebGLBinding|null}
     * @ignore
     */
    webglBinding: XRWebGLBinding | null;
    /**
     * @type {XRReferenceSpace|null}
     * @ignore
     */
    _referenceSpace: XRReferenceSpace | null;
    /**
     * Provides access to DOM overlay capabilities.
     *
     * @type {XrDomOverlay}
     */
    domOverlay: XrDomOverlay;
    /**
     * Provides the ability to perform hit tests on the representation of real world geometry
     * of the underlying AR system.
     *
     * @type {XrHitTest}
     */
    hitTest: XrHitTest;
    /**
     * Provides access to image tracking capabilities.
     *
     * @type {XrImageTracking}
     */
    imageTracking: XrImageTracking;
    /**
     * Provides access to plane detection capabilities.
     *
     * @type {XrPlaneDetection}
     */
    planeDetection: XrPlaneDetection;
    /**
     * Provides access to mesh detection capabilities.
     *
     * @type {XrMeshDetection}
     */
    meshDetection: XrMeshDetection;
    /**
     * Provides access to Input Sources.
     *
     * @type {XrInput}
     */
    input: XrInput;
    /**
     * Provides access to light estimation capabilities.
     *
     * @type {XrLightEstimation}
     */
    lightEstimation: XrLightEstimation;
    /**
     * Provides access to views and their capabilities.
     *
     * @type {XrViews}
     */
    views: XrViews;
    /**
     * Provides access to Anchors.
     *
     * @type {XrAnchors}
     */
    anchors: XrAnchors;
    /**
     * @type {CameraComponent|null}
     * @private
     */
    private _camera;
    /**
     * @type {Vec3}
     * @private
     */
    private _localPosition;
    /**
     * @type {Quat}
     * @private
     */
    private _localRotation;
    /**
     * @type {number}
     * @private
     */
    private _depthNear;
    /**
     * @type {number}
     * @private
     */
    private _depthFar;
    /**
     * @type {number[]|null}
     * @private
     */
    private _supportedFrameRates;
    /**
     * @type {number}
     * @private
     */
    private _width;
    /**
     * @type {number}
     * @private
     */
    private _height;
    /**
     * @type {number}
     * @private
     */
    private _framebufferScaleFactor;
    /**
     * Destroys the XrManager instance.
     *
     * @ignore
     */
    destroy(): void;
    /**
     * Attempts to start XR session for provided {@link CameraComponent} and optionally fires
     * callback when session is created or failed to create. Integrated XR APIs need to be enabled
     * by providing relevant options.
     *
     * Note that the start method needs to be called in response to user action, such as a button
     * click. It will not work if called in response to a timer or other event.
     *
     * @param {CameraComponent} camera - It will be used to render XR session and manipulated based
     * on pose tracking.
     * @param {string} type - Session type. Can be one of the following:
     *
     * - {@link XRTYPE_INLINE}: Inline - always available type of session. It has limited features
     * availability and is rendered into HTML element.
     * - {@link XRTYPE_VR}: Immersive VR - session that provides exclusive access to VR device with
     * best available tracking features.
     * - {@link XRTYPE_AR}: Immersive AR - session that provides exclusive access to VR/AR device
     * that is intended to be blended with real-world environment.
     *
     * @param {string} spaceType - Reference space type. Can be one of the following:
     *
     * - {@link XRSPACE_VIEWER}: Viewer - always supported space with some basic tracking
     * capabilities.
     * - {@link XRSPACE_LOCAL}: Local - represents a tracking space with a native origin near the
     * viewer at the time of creation. It is meant for seated or basic local XR sessions.
     * - {@link XRSPACE_LOCALFLOOR}: Local Floor - represents a tracking space with a native origin
     * at the floor in a safe position for the user to stand. The y axis equals 0 at floor level.
     * Floor level value might be estimated by the underlying platform. It is meant for seated or
     * basic local XR sessions.
     * - {@link XRSPACE_BOUNDEDFLOOR}: Bounded Floor - represents a tracking space with its native
     * origin at the floor, where the user is expected to move within a pre-established boundary.
     * - {@link XRSPACE_UNBOUNDED}: Unbounded - represents a tracking space where the user is
     * expected to move freely around their environment, potentially long distances from their
     * starting point.
     *
     * @param {object} [options] - Object with additional options for XR session initialization.
     * @param {number} [options.framebufferScaleFactor] - Framebuffer scale factor should
     * be higher than 0.0, by default 1.0 (no scaling). A value of 0.5 will reduce the resolution
     * of an XR session in half, and a value of 2.0 will double the resolution.
     * @param {string[]} [options.optionalFeatures] - Optional features for XRSession start. It is
     * used for getting access to additional WebXR spec extensions.
     * @param {boolean} [options.anchors] - Set to true to attempt to enable
     * {@link XrAnchors}.
     * @param {boolean} [options.imageTracking] - Set to true to attempt to enable
     * {@link XrImageTracking}.
     * @param {boolean} [options.planeDetection] - Set to true to attempt to enable
     * {@link XrPlaneDetection}.
     * @param {boolean} [options.meshDetection] - Set to true to attempt to enable
     * {@link XrMeshDetection}.
     * @param {XrErrorCallback} [options.callback] - Optional callback function called once session
     * is started. The callback has one argument Error - it is null if successfully started XR
     * session.
     * @param {object} [options.depthSensing] - Optional object with parameters to attempt to enable
     * depth sensing.
     * @param {string} [options.depthSensing.usagePreference] - Optional usage preference for depth
     * sensing, can be 'cpu-optimized' or 'gpu-optimized' (XRDEPTHSENSINGUSAGE_*), defaults to
     * 'cpu-optimized'. Most preferred and supported will be chosen by the underlying depth sensing
     * system.
     * @param {string} [options.depthSensing.dataFormatPreference] - Optional data format
     * preference for depth sensing, can be 'luminance-alpha' or 'float32'
     * (XRDEPTHSENSINGFORMAT_*), defaults to 'luminance-alpha'. Most preferred and supported will
     * be chosen by the underlying depth sensing system.
     * @example
     * button.on('click', () => {
     *     app.xr.start(camera, pc.XRTYPE_VR, pc.XRSPACE_LOCALFLOOR);
     * });
     * @example
     * button.on('click', () => {
     *     app.xr.start(camera, pc.XRTYPE_AR, pc.XRSPACE_LOCALFLOOR, {
     *         anchors: true,
     *         imageTracking: true,
     *         depthSensing: { }
     *     });
     * });
     */
    start(camera: CameraComponent, type: string, spaceType: string, options?: {
        framebufferScaleFactor?: number;
        optionalFeatures?: string[];
        anchors?: boolean;
        imageTracking?: boolean;
        planeDetection?: boolean;
        meshDetection?: boolean;
        callback?: XrErrorCallback;
        depthSensing?: {
            usagePreference?: string;
            dataFormatPreference?: string;
        };
    }): void;
    /**
     * @param {string} type - Session type.
     * @param {string} spaceType - Reference space type.
     * @param {*} options - Session options.
     * @param {XrErrorCallback} callback - Error callback.
     * @private
     */
    private _onStartOptionsReady;
    /**
     * Attempts to end XR session and optionally fires callback when session is ended or failed to
     * end.
     *
     * @param {XrErrorCallback} [callback] - Optional callback function called once session is
     * ended. The callback has one argument Error - it is null if successfully ended XR session.
     * @example
     * app.keyboard.on('keydown', (evt) => {
     *     if (evt.key === pc.KEY_ESCAPE && app.xr.active) {
     *         app.xr.end();
     *     }
     * });
     */
    end(callback?: XrErrorCallback): void;
    /**
     * Check if the specified type of session is available.
     *
     * @param {string} type - Session type. Can be one of the following:
     *
     * - {@link XRTYPE_INLINE}: Inline - always available type of session. It has limited features
     * availability and is rendered into HTML element.
     * - {@link XRTYPE_VR}: Immersive VR - session that provides exclusive access to VR device with
     * best available tracking features.
     * - {@link XRTYPE_AR}: Immersive AR - session that provides exclusive access to VR/AR device
     * that is intended to be blended with real-world environment.
     *
     * @example
     * if (app.xr.isAvailable(pc.XRTYPE_VR)) {
     *     // VR is available
     * }
     * @returns {boolean} True if the specified session type is available.
     */
    isAvailable(type: string): boolean;
    /** @private */
    private _deviceAvailabilityCheck;
    /**
     * Initiate manual room capture. If the underlying XR system supports manual capture of the
     * room, it will start the capturing process, which can affect plane and mesh detection,
     * and improve hit-test quality against real-world geometry.
     *
     * @param {XrRoomCaptureCallback} callback - Callback that will be fired once capture is complete
     * or failed.
     *
     * @example
     * this.app.xr.initiateRoomCapture((err) => {
     *     if (err) {
     *         // capture failed
     *         return;
     *     }
     *     // capture was successful
     * });
     */
    initiateRoomCapture(callback: XrRoomCaptureCallback): void;
    /**
     * Update target frame rate of an XR session to one of supported value provided by
     * supportedFrameRates list.
     *
     * @param {number} frameRate - Target frame rate. It should be any value from the list
     * of supportedFrameRates.
     * @param {Function} [callback] - Callback that will be called when frameRate has been
     * updated or failed to update with error provided.
     */
    updateTargetFrameRate(frameRate: number, callback?: Function): void;
    /**
     * @param {string} type - Session type.
     * @private
     */
    private _sessionSupportCheck;
    /**
     * @param {XRSession} session - XR session.
     * @param {string} spaceType - Space type to request for the session.
     * @param {Function} callback - Callback to call when session is started.
     * @private
     */
    private _onSessionStart;
    /**
     * @param {number} near - Near plane distance.
     * @param {number} far - Far plane distance.
     * @private
     */
    private _setClipPlanes;
    _createBaseLayer(): void;
    /** @private */
    private _onDeviceLost;
    /** @private */
    private _onDeviceRestored;
    /**
     * @param {XRFrame} frame - XRFrame from requestAnimationFrame callback.
     * @returns {boolean} True if update was successful, false otherwise.
     * @ignore
     */
    update(frame: XRFrame): boolean;
    /**
     * True if XR is supported.
     *
     * @type {boolean}
     */
    get supported(): boolean;
    /**
     * True if XR session is running.
     *
     * @type {boolean}
     */
    get active(): boolean;
    /**
     * Returns type of currently running XR session or null if no session is running. Can be any of
     * XRTYPE_*.
     *
     * @type {string|null}
     */
    get type(): string | null;
    /**
     * Returns reference space type of currently running XR session or null if no session is
     * running. Can be any of XRSPACE_*.
     *
     * @type {string|null}
     */
    get spaceType(): string | null;
    /**
     * Provides access to XRSession of WebXR.
     *
     * @type {XRSession|null}
     */
    get session(): XRSession | null;
    /**
     * XR session frameRate or null if this information is not available. This value can change
     * during an active XR session.
     *
     * @type {number|null}
     */
    get frameRate(): number | null;
    /**
     * List of supported frame rates, or null if this data is not available.
     *
     * @type {number[]|null}
     */
    get supportedFrameRates(): number[] | null;
    /**
     * Framebuffer scale factor. This value is read-only and can only be set when starting a new
     * XR session.
     *
     * @type {number}
     */
    get framebufferScaleFactor(): number;
    /**
     * Set fixed foveation to the value between 0 and 1. Where 0 is no foveation and 1 is highest
     * foveation. It only can be set during an active XR session. Fixed foveation will reduce the
     * resolution of the back buffer at the edges of the screen, which can improve rendering
     * performance.
     *
     * @type {number}
     */
    set fixedFoveation(value: number | null);
    /**
     * Gets the current fixed foveation level, which is between 0 and 1. 0 is no forveation and 1
     * is highest foveation. If fixed foveation is not supported, this value returns null.
     *
     * @type {number|null}
     */
    get fixedFoveation(): number | null;
    /**
     * Active camera for which XR session is running or null.
     *
     * @type {Entity|null}
     */
    get camera(): Entity | null;
    /**
     * Indicates whether WebXR content is currently visible to the user, and if it is, whether it's
     * the primary focus. Can be 'hidden', 'visible' or 'visible-blurred'.
     *
     * @type {"hidden"|"visible"|"visible-blurred"|null}
     * @ignore
     */
    get visibilityState(): "hidden" | "visible" | "visible-blurred" | null;
}

declare class CameraComponentData {
    enabled: boolean;
}

/**
 * Used to add and remove {@link CameraComponent}s from Entities. It also holds an array of all
 * active cameras.
 *
 * @category Graphics
 */
declare class CameraComponentSystem extends ComponentSystem {
    /**
     * Holds all the active camera components.
     *
     * @type {CameraComponent[]}
     */
    cameras: CameraComponent[];
    id: string;
    ComponentType: typeof CameraComponent;
    DataType: typeof CameraComponentData;
    schema: string[];
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onBeforeRemove(entity: any, component: any): void;
    onAppPrerender(): void;
    addCamera(camera: any): void;
    removeCamera(camera: any): void;
}

/**
 * Callback used by {@link CameraComponent#calculateTransform} and {@link CameraComponent#calculateProjection}.
 */
type CalculateMatrixCallback = (transformMatrix: Mat4, view: number) => void;
/**
 * @import { CameraComponentSystem } from './system.js'
 * @import { Color } from '../../../core/math/color.js'
 * @import { Entity } from '../../entity.js'
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { Frustum } from '../../../core/shape/frustum.js'
 * @import { LayerComposition } from '../../../scene/composition/layer-composition.js'
 * @import { Layer } from '../../../scene/layer.js'
 * @import { Mat4 } from '../../../core/math/mat4.js'
 * @import { FramePass } from '../../../platform/graphics/frame-pass.js'
 * @import { RenderTarget } from '../../../platform/graphics/render-target.js'
 * @import { FogParams } from '../../../scene/fog-params.js'
 * @import { Vec3 } from '../../../core/math/vec3.js'
 * @import { Vec4 } from '../../../core/math/vec4.js'
 * @import { XrErrorCallback } from '../../xr/xr-manager.js'
 */
/**
 * @callback CalculateMatrixCallback
 * Callback used by {@link CameraComponent#calculateTransform} and {@link CameraComponent#calculateProjection}.
 * @param {Mat4} transformMatrix - Output of the function.
 * @param {number} view - Type of view. Can be {@link VIEW_CENTER}, {@link VIEW_LEFT} or
 * {@link VIEW_RIGHT}. Left and right are only used in stereo rendering.
 * @returns {void}
 */
/**
 * The CameraComponent enables an {@link Entity} to render the scene. A scene requires at least
 * one enabled camera component to be rendered. The camera's view direction is along the negative
 * z-axis of the owner entity.
 *
 * Note that multiple camera components can be enabled simultaneously (for split-screen or
 * offscreen rendering, for example).
 *
 * You should never need to use the CameraComponent constructor directly. To add a CameraComponent
 * to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('camera', {
 *     nearClip: 1,
 *     farClip: 100,
 *     fov: 55
 * });
 * ```
 *
 * Once the CameraComponent is added to the entity, you can access it via the {@link Entity#camera}
 * property:
 *
 * ```javascript
 * entity.camera.nearClip = 2; // Set the near clip of the camera
 *
 * console.log(entity.camera.nearClip); // Get the near clip of the camera
 * ```
 *
 * @hideconstructor
 * @category Graphics
 */
declare class CameraComponent extends Component {
    /**
     * Create a new CameraComponent instance.
     *
     * @param {CameraComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: CameraComponentSystem, entity: Entity);
    /**
     * Custom function that is called when postprocessing should execute.
     *
     * @type {Function|null}
     * @ignore
     */
    onPostprocessing: Function | null;
    /**
     * A counter of requests of depth map rendering.
     *
     * @type {number}
     * @private
     */
    private _renderSceneDepthMap;
    /**
     * A counter of requests of color map rendering.
     *
     * @type {number}
     * @private
     */
    private _renderSceneColorMap;
    /** @private */
    private _sceneDepthMapRequested;
    /** @private */
    private _sceneColorMapRequested;
    /** @private */
    private _priority;
    /**
     * Layer id at which the postprocessing stops for the camera.
     *
     * @type {number}
     * @private
     */
    private _disablePostEffectsLayer;
    /** @private */
    private _camera;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    _postEffects: PostEffectQueue;
    /**
     * Sets the name of the shader pass the camera will use when rendering.
     *
     * In addition to existing names (see the parameter description), a new name can be specified,
     * which creates a new shader pass with the given name. The name provided can only use
     * alphanumeric characters and underscores. When a shader is compiled for the new pass, a define
     * is added to the shader. For example, if the name is 'custom_rendering', the define
     * 'CUSTOM_RENDERING_PASS' is added to the shader, allowing the shader code to conditionally
     * execute code only when that shader pass is active.
     *
     * Another instance where this approach may prove useful is when a camera needs to render a more
     * cost-effective version of shaders, such as when creating a reflection texture. To accomplish
     * this, a callback on the material that triggers during shader compilation can be used. This
     * callback can modify the shader generation options specifically for this shader pass.
     *
     * ```javascript
     * const shaderPassId = camera.setShaderPass('custom_rendering');
     *
     * material.onUpdateShader = function (options) {
     *     if (options.pass === shaderPassId) {
     *         options.litOptions.normalMapEnabled = false;
     *         options.litOptions.useSpecular = false;
     *     }
     *     return options;
     * };
     * ```
     *
     * @param {string} name - The name of the shader pass. Defaults to undefined, which is
     * equivalent to {@link SHADERPASS_FORWARD}. Can be:
     *
     * - {@link SHADERPASS_FORWARD}
     * - {@link SHADERPASS_ALBEDO}
     * - {@link SHADERPASS_OPACITY}
     * - {@link SHADERPASS_WORLDNORMAL}
     * - {@link SHADERPASS_SPECULARITY}
     * - {@link SHADERPASS_GLOSS}
     * - {@link SHADERPASS_METALNESS}
     * - {@link SHADERPASS_AO}
     * - {@link SHADERPASS_EMISSION}
     * - {@link SHADERPASS_LIGHTING}
     * - {@link SHADERPASS_UV0}
     *
     * @returns {number} The id of the shader pass.
     */
    setShaderPass(name: string): number;
    /**
     * Shader pass name.
     *
     * @returns {string|undefined} The name of the shader pass, or undefined if no shader pass is set.
     */
    getShaderPass(): string | undefined;
    /**
     * Sets the frame passes the camera uses for rendering, instead of its default rendering.
     * Set this to null to return to the default behavior.
     *
     * @type {FramePass[]|null}
     * @ignore
     */
    set framePasses(passes: FramePass[]);
    /**
     * Gets the frame passes the camera uses for rendering, instead of its default rendering.
     *
     * @type {FramePass[]}
     * @ignore
     */
    get framePasses(): FramePass[];
    /**
     * @type {FramePass[]|null}
     * @deprecated Use {@link CameraComponent#framePasses} instead.
     * @ignore
     */
    set renderPasses(passes: FramePass[]);
    /**
     * @type {FramePass[]}
     * @deprecated Use {@link CameraComponent#framePasses} instead.
     * @ignore
     */
    get renderPasses(): FramePass[];
    get shaderParams(): CameraShaderParams;
    /**
     * Sets the gamma correction to apply when rendering the scene. Can be:
     *
     * - {@link GAMMA_SRGB}: Output is gamma-encoded for standard sRGB displays. This is the
     *   default and recommended setting for all normal rendering.
     * - {@link GAMMA_NONE}: Output remains in linear space. This is only intended for advanced
     *   HDR pipelines where the output is rendered to an intermediate HDR texture that will be
     *   tonemapped and gamma-corrected in a subsequent pass.
     *
     * **Warning**: Setting `GAMMA_NONE` will cause the entire scene (including UI) to appear
     * too dark on standard displays, as linear values are written directly without gamma
     * encoding. For HDR rendering with post-processing, use {@link CameraFrame} which handles
     * this automatically.
     *
     * Defaults to {@link GAMMA_SRGB}.
     *
     * @type {number}
     */
    set gammaCorrection(value: number);
    /**
     * Gets the gamma correction used when rendering the scene.
     *
     * @type {number}
     */
    get gammaCorrection(): number;
    /**
     * Sets the tonemapping transform to apply to the rendered color buffer. Can be:
     *
     * - {@link TONEMAP_LINEAR}
     * - {@link TONEMAP_FILMIC}
     * - {@link TONEMAP_HEJL}
     * - {@link TONEMAP_ACES}
     * - {@link TONEMAP_ACES2}
     * - {@link TONEMAP_NEUTRAL}
     *
     * Defaults to {@link TONEMAP_LINEAR}.
     *
     * @type {number}
     */
    set toneMapping(value: number);
    /**
     * Gets the tonemapping transform applied to the rendered color buffer.
     *
     * @type {number}
     */
    get toneMapping(): number;
    /**
     * Sets the fog parameters. If this is not null, the camera will use these fog parameters
     * instead of those specified on the {@link Scene#fog}.
     *
     * @type {FogParams|null}
     */
    set fog(value: FogParams | null);
    /**
     * Gets a {@link FogParams} that defines fog parameters, or null if those are not set.
     *
     * @type {FogParams|null}
     */
    get fog(): FogParams | null;
    /**
     * Sets the camera aperture in f-stops. Default is 16. Higher value means less exposure. Used
     * if {@link Scene#physicalUnits} is true.
     *
     * @type {number}
     */
    set aperture(value: number);
    /**
     * Gets the camera aperture in f-stops.
     *
     * @type {number}
     */
    get aperture(): number;
    /**
     * Sets the aspect ratio (width divided by height) of the camera. If {@link aspectRatioMode} is
     * {@link ASPECT_AUTO}, then this value will be automatically calculated every frame, and you
     * can only read it. If it's {@link ASPECT_MANUAL}, you can set the value.
     *
     * @type {number}
     */
    set aspectRatio(value: number);
    /**
     * Gets the aspect ratio (width divided by height) of the camera.
     *
     * @type {number}
     */
    get aspectRatio(): number;
    /**
     * Sets the aspect ratio mode of the camera. Can be:
     *
     * - {@link ASPECT_AUTO}: aspect ratio will be calculated from the current render
     * target's width divided by height.
     * - {@link ASPECT_MANUAL}: use the aspectRatio value.
     *
     * Defaults to {@link ASPECT_AUTO}.
     *
     * @type {number}
     */
    set aspectRatioMode(value: number);
    /**
     * Gets the aspect ratio mode of the camera.
     *
     * @type {number}
     */
    get aspectRatioMode(): number;
    /**
     * Sets the custom function to calculate the camera projection matrix manually. Can be used for
     * complex effects like doing oblique projection. Function is called using component's scope.
     *
     * Arguments:
     *
     * - {@link Mat4} transformMatrix: output of the function
     * - view: Type of view. Can be {@link VIEW_CENTER}, {@link VIEW_LEFT} or {@link VIEW_RIGHT}.
     *
     * Left and right are only used in stereo rendering.
     *
     * @type {CalculateMatrixCallback}
     */
    set calculateProjection(value: CalculateMatrixCallback);
    /**
     * Gets the custom function to calculate the camera projection matrix manually.
     *
     * @type {CalculateMatrixCallback}
     */
    get calculateProjection(): CalculateMatrixCallback;
    /**
     * Sets the custom function to calculate the camera transformation matrix manually. Can be used
     * for complex effects like reflections. Function is called using component's scope. Arguments:
     *
     * - {@link Mat4} transformMatrix: output of the function.
     * - view: Type of view. Can be {@link VIEW_CENTER}, {@link VIEW_LEFT} or {@link VIEW_RIGHT}.
     *
     * Left and right are only used in stereo rendering.
     *
     * @type {CalculateMatrixCallback}
     */
    set calculateTransform(value: CalculateMatrixCallback);
    /**
     * Gets the custom function to calculate the camera transformation matrix manually.
     *
     * @type {CalculateMatrixCallback}
     */
    get calculateTransform(): CalculateMatrixCallback;
    /**
     * Gets the camera component's underlying Camera instance.
     *
     * @type {Camera}
     * @ignore
     */
    get camera(): Camera;
    /**
     * Sets the camera component's clear color. Defaults to `[0.75, 0.75, 0.75, 1]`.
     *
     * @type {Color}
     */
    set clearColor(value: Color);
    /**
     * Gets the camera component's clear color.
     *
     * @type {Color}
     */
    get clearColor(): Color;
    /**
     * Sets whether the camera will automatically clear the color buffer before rendering. Defaults to true.
     *
     * @type {boolean}
     */
    set clearColorBuffer(value: boolean);
    /**
     * Gets whether the camera will automatically clear the color buffer before rendering.
     *
     * @type {boolean}
     */
    get clearColorBuffer(): boolean;
    /**
     * Sets the depth value to clear the depth buffer to. Defaults to 1.
     *
     * @type {number}
     */
    set clearDepth(value: number);
    /**
     * Gets the depth value to clear the depth buffer to.
     *
     * @type {number}
     */
    get clearDepth(): number;
    /**
     * Sets whether the camera will automatically clear the depth buffer before rendering. Defaults to true.
     *
     * @type {boolean}
     */
    set clearDepthBuffer(value: boolean);
    /**
     * Gets whether the camera will automatically clear the depth buffer before rendering.
     *
     * @type {boolean}
     */
    get clearDepthBuffer(): boolean;
    /**
     * Sets whether the camera will automatically clear the stencil buffer before rendering. Defaults to true.
     *
     * @type {boolean}
     */
    set clearStencilBuffer(value: boolean);
    /**
     * Gets whether the camera will automatically clear the stencil buffer before rendering.
     *
     * @type {boolean}
     */
    get clearStencilBuffer(): boolean;
    /**
     * Sets whether the camera will cull triangle faces. If true, the camera will take
     * {@link Material#cull} into account. Otherwise both front and back faces will be rendered.
     * Defaults to true.
     *
     * @type {boolean}
     */
    set cullFaces(value: boolean);
    /**
     * Gets whether the camera will cull triangle faces.
     *
     * @type {boolean}
     */
    get cullFaces(): boolean;
    /**
     * Sets the layer id of the layer on which the post-processing of the camera stops being applied
     * to. Defaults to {@link LAYERID_UI}, which causes post-processing to not be applied to UI
     * layer and any following layers for the camera. Set to `undefined` for post-processing to be
     * applied to all layers of the camera.
     *
     * @type {number}
     */
    set disablePostEffectsLayer(layer: number);
    /**
     * Gets the layer id of the layer on which the post-processing of the camera stops being applied
     * to.
     *
     * @type {number}
     */
    get disablePostEffectsLayer(): number;
    /**
     * Sets the distance from the camera after which no rendering will take place. Defaults to 1000.
     *
     * @type {number}
     */
    set farClip(value: number);
    /**
     * Gets the distance from the camera after which no rendering will take place.
     *
     * @type {number}
     */
    get farClip(): number;
    /**
     * Sets whether the camera will flip the face direction of triangles. If set to true, the
     * camera will invert front and back faces. Can be useful for reflection rendering. Defaults to
     * false.
     *
     * @type {boolean}
     */
    set flipFaces(value: boolean);
    /**
     * Gets whether the camera will flip the face direction of triangles.
     *
     * @type {boolean}
     */
    get flipFaces(): boolean;
    /**
     * Sets the field of view of the camera in degrees. Usually this is the Y-axis field of view
     * (see {@link horizontalFov}). Used for {@link PROJECTION_PERSPECTIVE} cameras only. Defaults to
     * 45.
     *
     * @type {number}
     */
    set fov(value: number);
    /**
     * Gets the field of view of the camera in degrees.
     *
     * @type {number}
     */
    get fov(): number;
    /**
     * Gets the camera's frustum shape.
     *
     * @type {Frustum}
     */
    get frustum(): Frustum;
    /**
     * Sets whether frustum culling is enabled. This controls the culling of {@link MeshInstance}s
     * against the camera frustum, i.e. if objects outside of the camera's frustum should be
     * omitted from rendering. If false, all mesh instances in the scene are rendered by the
     * camera, regardless of visibility. Defaults to false.
     *
     * @type {boolean}
     */
    set frustumCulling(value: boolean);
    /**
     * Gets whether frustum culling is enabled.
     *
     * @type {boolean}
     */
    get frustumCulling(): boolean;
    /**
     * Sets whether the camera's field of view ({@link fov}) is horizontal or vertical. Defaults to
     * false (meaning it is vertical by default).
     *
     * @type {boolean}
     */
    set horizontalFov(value: boolean);
    /**
     * Gets whether the camera's field of view ({@link fov}) is horizontal or vertical.
     *
     * @type {boolean}
     */
    get horizontalFov(): boolean;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which this camera should belong. Don't
     * push, pop, splice or modify this array. If you want to change it, set a new one instead.
     * Defaults to [{@link LAYERID_WORLD}, {@link LAYERID_DEPTH}, {@link LAYERID_SKYBOX},
     * {@link LAYERID_UI}, {@link LAYERID_IMMEDIATE}].
     *
     * @type {number[]}
     */
    set layers(newValue: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which this camera belongs.
     *
     * @type {number[]}
     */
    get layers(): number[];
    get layersSet(): Set<number>;
    /**
     * Sets the jitter intensity applied in the projection matrix. Used for jittered sampling by TAA.
     * A value of 1 represents a jitter in the range of `[-1, 1]` of a pixel. Smaller values result
     * in a crisper yet more aliased outcome, whereas increased values produce a smoother but blurred
     * result. Defaults to 0, representing no jitter.
     *
     * @type {number}
     */
    set jitter(value: number);
    /**
     * Gets the jitter intensity applied in the projection matrix.
     *
     * @type {number}
     */
    get jitter(): number;
    /**
     * Sets the distance from the camera before which no rendering will take place. Defaults to 0.1.
     *
     * @type {number}
     */
    set nearClip(value: number);
    /**
     * Gets the distance from the camera before which no rendering will take place.
     *
     * @type {number}
     */
    get nearClip(): number;
    /**
     * Sets the half-height of the orthographic view window (in the Y-axis). Used for
     * {@link PROJECTION_ORTHOGRAPHIC} cameras only. Defaults to 10.
     *
     * @type {number}
     */
    set orthoHeight(value: number);
    /**
     * Gets the half-height of the orthographic view window (in the Y-axis).
     *
     * @type {number}
     */
    get orthoHeight(): number;
    /**
     * Gets the post effects queue for this camera. Use this to add or remove post effects from the
     * camera.
     *
     * @type {PostEffectQueue}
     */
    get postEffects(): PostEffectQueue;
    get postEffectsEnabled(): boolean;
    /**
     * Sets the priority to control the render order of this camera. Cameras with a smaller
     * priority value are rendered first. Defaults to 0.
     *
     * @type {number}
     */
    set priority(newValue: number);
    /**
     * Gets the priority to control the render order of this camera.
     *
     * @type {number}
     */
    get priority(): number;
    /**
     * Sets the type of projection used to render the camera. Can be:
     *
     * - {@link PROJECTION_PERSPECTIVE}: A perspective projection. The camera frustum
     * resembles a truncated pyramid.
     * - {@link PROJECTION_ORTHOGRAPHIC}: An orthographic projection. The camera
     * frustum is a cuboid.
     *
     * Defaults to {@link PROJECTION_PERSPECTIVE}.
     *
     * @type {number}
     */
    set projection(value: number);
    /**
     * Gets the type of projection used to render the camera.
     *
     * @type {number}
     */
    get projection(): number;
    /**
     * Gets the camera's projection matrix.
     *
     * @type {Mat4}
     */
    get projectionMatrix(): Mat4;
    /**
     * Sets the rendering rectangle for the camera. This controls where on the screen the camera
     * will render in normalized screen coordinates. Defaults to `[0, 0, 1, 1]`.
     *
     * @type {Vec4}
     */
    set rect(value: Vec4);
    /**
     * Gets the rendering rectangle for the camera.
     *
     * @type {Vec4}
     */
    get rect(): Vec4;
    set renderSceneColorMap(value: boolean);
    get renderSceneColorMap(): boolean;
    set renderSceneDepthMap(value: boolean);
    get renderSceneDepthMap(): boolean;
    /**
     * Sets the render target to which rendering of the camera is performed. If not set, it will
     * render simply to the screen.
     *
     * @type {RenderTarget}
     */
    set renderTarget(value: RenderTarget);
    /**
     * Gets the render target to which rendering of the camera is performed.
     *
     * @type {RenderTarget}
     */
    get renderTarget(): RenderTarget;
    /**
     * Sets the scissor rectangle for the camera. This clips all pixels which are not in the
     * rectangle. The order of the values is `[x, y, width, height]`. Defaults to `[0, 0, 1, 1]`.
     *
     * @type {Vec4}
     */
    set scissorRect(value: Vec4);
    /**
     * Gets the scissor rectangle for the camera.
     *
     * @type {Vec4}
     */
    get scissorRect(): Vec4;
    /**
     * Sets the camera sensitivity in ISO. Defaults to 1000. Higher value means more exposure. Used
     * if {@link Scene#physicalUnits} is true.
     *
     * @type {number}
     */
    set sensitivity(value: number);
    /**
     * Gets the camera sensitivity in ISO.
     *
     * @type {number}
     */
    get sensitivity(): number;
    /**
     * Sets the camera shutter speed in seconds. Defaults to 1/1000s. Longer shutter means more
     * exposure. Used if {@link Scene#physicalUnits} is true.
     *
     * @type {number}
     */
    set shutter(value: number);
    /**
     * Gets the camera shutter speed in seconds.
     *
     * @type {number}
     */
    get shutter(): number;
    /**
     * Gets the camera's view matrix.
     *
     * @type {Mat4}
     */
    get viewMatrix(): Mat4;
    /**
     * Based on the value, the depth layer's enable counter is incremented or decremented.
     *
     * @param {boolean} value - True to increment the counter, false to decrement it.
     * @returns {boolean} True if the counter was incremented or decremented, false if the depth
     * layer is not present.
     * @private
     */
    private _enableDepthLayer;
    /**
     * Request the scene to generate a texture containing the scene color map. Note that this call
     * is accumulative, and for each enable request, a disable request need to be called. Note that
     * this setting is ignored when the {@link CameraComponent#renderPasses} is used.
     *
     * @param {boolean} enabled - True to request the generation, false to disable it.
     */
    requestSceneColorMap(enabled: boolean): void;
    /**
     * Request the scene to generate a texture containing the scene depth map. Note that this call
     * is accumulative, and for each enable request, a disable request need to be called. Note that
     * this setting is ignored when the {@link CameraComponent#renderPasses} is used.
     *
     * @param {boolean} enabled - True to request the generation, false to disable it.
     */
    requestSceneDepthMap(enabled: boolean): void;
    dirtyLayerCompositionCameras(): void;
    /**
     * Convert a point from 2D screen space to 3D world space.
     *
     * @param {number} screenx - X coordinate on PlayCanvas' canvas element. Should be in the range
     * 0 to `canvas.offsetWidth` of the application's canvas element.
     * @param {number} screeny - Y coordinate on PlayCanvas' canvas element. Should be in the range
     * 0 to `canvas.offsetHeight` of the application's canvas element.
     * @param {number} cameraz - The distance from the camera in world space to create the new
     * point.
     * @param {Vec3} [worldCoord] - 3D vector to receive world coordinate result.
     * @example
     * // Get the start and end points of a 3D ray fired from a screen click position
     * const start = entity.camera.screenToWorld(clickX, clickY, entity.camera.nearClip);
     * const end = entity.camera.screenToWorld(clickX, clickY, entity.camera.farClip);
     *
     * // Use the ray coordinates to perform a raycast
     * app.systems.rigidbody.raycastFirst(start, end, function (result) {
     *     console.log("Entity " + result.entity.name + " was selected");
     * });
     * @returns {Vec3} The world space coordinate.
     */
    screenToWorld(screenx: number, screeny: number, cameraz: number, worldCoord?: Vec3): Vec3;
    /**
     * Convert a point from 3D world space to 2D screen space.
     *
     * @param {Vec3} worldCoord - The world space coordinate.
     * @param {Vec3} [screenCoord] - 3D vector to receive screen coordinate result.
     * @returns {Vec3} The screen space coordinate.
     */
    worldToScreen(worldCoord: Vec3, screenCoord?: Vec3): Vec3;
    /**
     * Called before application renders the scene.
     *
     * @ignore
     */
    onAppPrerender(): void;
    /** @private */
    private addCameraToLayers;
    /** @private */
    private removeCameraFromLayers;
    /**
     * @param {LayerComposition} oldComp - Old layer composition.
     * @param {LayerComposition} newComp - New layer composition.
     * @private
     */
    private onLayersChanged;
    /**
     * @param {Layer} layer - The layer to add the camera to.
     * @private
     */
    private onLayerAdded;
    /**
     * @param {Layer} layer - The layer to remove the camera from.
     * @private
     */
    private onLayerRemoved;
    onRemove(): void;
    /**
     * Calculates aspect ratio value for a given render target.
     *
     * @param {RenderTarget|null} [rt] - Optional
     * render target. If unspecified, the backbuffer is used.
     * @returns {number} The aspect ratio of the render target (or backbuffer).
     */
    calculateAspectRatio(rt?: RenderTarget | null): number;
    /**
     * Prepare the camera for frame rendering.
     *
     * @param {RenderTarget|null} [rt] - Render
     * target to which rendering will be performed. Will affect camera's aspect ratio, if
     * aspectRatioMode is {@link ASPECT_AUTO}.
     * @ignore
     */
    frameUpdate(rt?: RenderTarget | null): void;
    /**
     * Attempt to start XR session with this camera.
     *
     * @param {string} type - The type of session. Can be one of the following:
     *
     * - {@link XRTYPE_INLINE}: Inline - always available type of session. It has limited feature
     * availability and is rendered into HTML element.
     * - {@link XRTYPE_VR}: Immersive VR - session that provides exclusive access to the VR device
     * with the best available tracking features.
     * - {@link XRTYPE_AR}: Immersive AR - session that provides exclusive access to the VR/AR
     * device that is intended to be blended with the real-world environment.
     *
     * @param {string} spaceType - Reference space type. Can be one of the following:
     *
     * - {@link XRSPACE_VIEWER}: Viewer - always supported space with some basic tracking
     * capabilities.
     * - {@link XRSPACE_LOCAL}: Local - represents a tracking space with a native origin near the
     * viewer at the time of creation. It is meant for seated or basic local XR sessions.
     * - {@link XRSPACE_LOCALFLOOR}: Local Floor - represents a tracking space with a native origin
     * at the floor in a safe position for the user to stand. The y-axis equals 0 at floor level.
     * Floor level value might be estimated by the underlying platform. It is meant for seated or
     * basic local XR sessions.
     * - {@link XRSPACE_BOUNDEDFLOOR}: Bounded Floor - represents a tracking space with its native
     * origin at the floor, where the user is expected to move within a pre-established boundary.
     * - {@link XRSPACE_UNBOUNDED}: Unbounded - represents a tracking space where the user is
     * expected to move freely around their environment, potentially long distances from their
     * starting point.
     *
     * @param {object} [options] - Object with options for XR session initialization.
     * @param {string[]} [options.optionalFeatures] - Optional features for XRSession start. It is
     * used for getting access to additional WebXR spec extensions.
     * @param {boolean} [options.imageTracking] - Set to true to attempt to enable {@link XrImageTracking}.
     * @param {boolean} [options.planeDetection] - Set to true to attempt to enable {@link XrPlaneDetection}.
     * @param {XrErrorCallback} [options.callback] - Optional callback function called once the
     * session is started. The callback has one argument Error - it is null if the XR session
     * started successfully.
     * @param {boolean} [options.anchors] - Optional boolean to attempt to enable {@link XrAnchors}.
     * @param {object} [options.depthSensing] - Optional object with parameters to attempt to enable
     * depth sensing.
     * @param {string} [options.depthSensing.usagePreference] - Optional usage preference for depth
     * sensing, can be 'cpu-optimized' or 'gpu-optimized' (XRDEPTHSENSINGUSAGE_*), defaults to
     * 'cpu-optimized'. Most preferred and supported will be chosen by the underlying depth sensing
     * system.
     * @param {string} [options.depthSensing.dataFormatPreference] - Optional data format
     * preference for depth sensing. Can be 'luminance-alpha' or 'float32' (XRDEPTHSENSINGFORMAT_*),
     * defaults to 'luminance-alpha'. Most preferred and supported will be chosen by the underlying
     * depth sensing system.
     * @example
     * // On an entity with a camera component
     * this.entity.camera.startXr(pc.XRTYPE_VR, pc.XRSPACE_LOCAL, {
     *     callback: (err) => {
     *         if (err) {
     *             // failed to start XR session
     *         } else {
     *             // in XR
     *         }
     *     }
     * });
     */
    startXr(type: string, spaceType: string, options?: {
        optionalFeatures?: string[];
        imageTracking?: boolean;
        planeDetection?: boolean;
        callback?: XrErrorCallback;
        anchors?: boolean;
        depthSensing?: {
            usagePreference?: string;
            dataFormatPreference?: string;
        };
    }): void;
    /**
     * Attempt to end XR session of this camera.
     *
     * @param {XrErrorCallback} [callback] - Optional callback function called once session is
     * ended. The callback has one argument Error - it is null if successfully ended XR session.
     * @example
     * // On an entity with a camera component
     * this.entity.camera.endXr((err) => {
     *     // not anymore in XR
     * });
     */
    endXr(callback?: XrErrorCallback): void;
    /**
     * Function to copy properties from the source CameraComponent. Properties not copied:
     * postEffects. Inherited properties not copied (all): system, entity, enabled.
     *
     * @param {CameraComponent} source - The source component.
     * @ignore
     */
    copy(source: CameraComponent): void;
}

/**
 * Callback used by {@link Layer} to calculate the "sort distance" for a {@link MeshInstance},
 * which determines its place in the render order.
 */
type CalculateSortDistanceCallback = (meshInstance: MeshInstance, cameraPosition: Vec3, cameraForward: Vec3) => void;
/**
 * @callback CalculateSortDistanceCallback
 * Callback used by {@link Layer} to calculate the "sort distance" for a {@link MeshInstance},
 * which determines its place in the render order.
 * @param {MeshInstance} meshInstance - The mesh instance.
 * @param {Vec3} cameraPosition - The position of the camera.
 * @param {Vec3} cameraForward - The forward vector of the camera.
 * @returns {void}
 */
/**
 * An instance of a {@link Mesh}. A single mesh can be referenced by many mesh instances that can
 * have different transforms and materials.
 *
 * ### Instancing
 *
 * Hardware instancing lets the GPU draw many copies of the same geometry with a single draw call.
 * Use {@link setInstancing} to attach a vertex buffer that holds per-instance data
 * (for example a mat4 world-matrix for every instance). Set {@link instancingCount}
 * to control how many instances are rendered. Passing `null` to {@link setInstancing}
 * disables instancing once again.
 *
 * ```javascript
 * // vb is a vertex buffer with one 4×4 matrix per instance
 * meshInstance.setInstancing(vb);
 * meshInstance.instancingCount = numInstances;
 * ```
 *
 * **Examples**
 *
 * - {@link https://playcanvas.github.io/#graphics/instancing-basic graphics/instancing-basic}
 * - {@link https://playcanvas.github.io/#graphics/instancing-custom graphics/instancing-custom}
 *
 * ### GPU-Driven Indirect Rendering (WebGPU Only)
 *
 * Instead of issuing draw calls from the CPU, parameters are written into a GPU
 * storage buffer and executed via indirect draw commands. Allocate one or more slots with
 * `GraphicsDevice.getIndirectDrawSlot(count)`, then bind the mesh instance to those slots:
 *
 * ```javascript
 * const slot = app.graphicsDevice.getIndirectDrawSlot(count);
 * meshInstance.setIndirect(null, slot, count); // first arg can be a CameraComponent or null
 * ```
 *
 * **Example**
 *
 * - {@link https://playcanvas.github.io/#compute/indirect-draw compute/indirect-draw}
 *
 * ### Multi-draw
 *
 * Multi-draw lets the engine submit multiple sub-draws with a single API call. On WebGL2 this maps
 * to the `WEBGL_multi_draw` extension; on WebGPU, to indirect multi-draw. Use {@link setMultiDraw}
 * to allocate a {@link DrawCommands} container, fill it with sub-draws using
 * {@link DrawCommands#add} and finalize with {@link DrawCommands#update} whenever the data changes.
 *
 * Support: {@link GraphicsDevice#supportsMultiDraw} is true on WebGPU and commonly true on WebGL2
 * (high coverage). When not supported, the engine can still render by issuing a fast internal loop
 * of single draws using the multi-draw data.
 *
 * ```javascript
 * // two indexed sub-draws from a single mesh
 * const cmd = meshInstance.setMultiDraw(null, 2);
 * cmd.add(0, 36, 1, 0);
 * cmd.add(1, 60, 1, 36);
 * cmd.update(2);
 * ```
 *
 * @category Graphics
 */
declare class MeshInstance {
    static lightmapParamNames: string[];
    /**
     * Sets the render style for an array of mesh instances.
     *
     * @param {MeshInstance[]} meshInstances - The mesh instances to set the render style for.
     * @param {number} renderStyle - The render style to set.
     * @ignore
     */
    static _prepareRenderStyleForArray(meshInstances: MeshInstance[], renderStyle: number): void;
    /**
     * Create a new MeshInstance instance.
     *
     * @param {Mesh} mesh - The graphics mesh to instance.
     * @param {Material} material - The material to use for this mesh instance.
     * @param {GraphNode} [node] - The graph node defining the transform for this instance. This
     * parameter is optional when used with {@link RenderComponent} and will use the node the
     * component is attached to.
     * @example
     * // Create a mesh instance pointing to a 1x1x1 'cube' mesh
     * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, new pc.BoxGeometry());
     * const material = new pc.StandardMaterial();
     *
     * const meshInstance = new pc.MeshInstance(mesh, material);
     *
     * const entity = new pc.Entity();
     * entity.addComponent('render', {
     *     meshInstances: [meshInstance]
     * });
     *
     * // Add the entity to the scene hierarchy
     * this.app.scene.root.addChild(entity);
     */
    constructor(mesh: Mesh, material: Material, node?: GraphNode);
    /**
     * Enable shadow casting for this mesh instance. Use this property to enable/disable shadow
     * casting without overhead of removing from scene. Note that this property does not add the
     * mesh instance to appropriate list of shadow casters on a {@link Layer}, but allows mesh to
     * be skipped from shadow casting while it is in the list already. Defaults to false.
     *
     * @type {boolean}
     */
    castShadow: boolean;
    /**
     * Specifies a bitmask that controls which shadow cascades a mesh instance contributes
     * to when rendered with a {@link LIGHTTYPE_DIRECTIONAL} light source.
     * This setting is only effective if the {@link castShadow} property is enabled.
     * Defaults to {@link SHADOW_CASCADE_ALL}, which means the mesh casts shadows into all available cascades.
     *
     * @type {number}
     */
    shadowCascadeMask: number;
    /**
     * Controls whether the mesh instance can be culled by frustum culling (see
     * {@link CameraComponent#frustumCulling}). Defaults to true.
     *
     * @type {boolean}
     */
    cull: boolean;
    /**
     * Determines the rendering order of mesh instances. Only used when mesh instances are added to
     * a {@link Layer} with {@link Layer#opaqueSortMode} or {@link Layer#transparentSortMode}
     * (depending on the material) set to {@link SORTMODE_MANUAL}.
     *
     * @type {number}
     */
    drawOrder: number;
    /**
     * @type {number}
     * @ignore
     */
    _drawBucket: number;
    /**
     * The graph node defining the transform for this instance.
     *
     * @type {GraphNode}
     */
    node: GraphNode;
    /**
     * Enable rendering for this mesh instance. Use visible property to enable/disable rendering
     * without overhead of removing from scene. But note that the mesh instance is still in the
     * hierarchy and still in the draw call list.
     *
     * @type {boolean}
     */
    visible: boolean;
    /**
     * Read this value in {@link Scene.EVENT_POSTCULL} event to determine if the object is actually going
     * to be rendered.
     *
     * @type {boolean}
     */
    visibleThisFrame: boolean;
    /**
     * Negative scale batching support.
     *
     * @type {number}
     * @ignore
     */
    flipFacesFactor: number;
    /**
     * @type {GSplatInstance|null}
     * @ignore
     */
    gsplatInstance: GSplatInstance | null;
    /** @ignore */
    id: number;
    /**
     * Custom function used to customize culling (e.g. for 2D UI elements).
     *
     * @type {Function|null}
     * @ignore
     */
    isVisibleFunc: Function | null;
    /**
     * @type {InstancingData|null}
     * @ignore
     */
    instancingData: InstancingData | null;
    /**
     * @type {DrawCommands|null}
     * @ignore
     */
    indirectData: DrawCommands | null;
    /**
     * Map of camera to their corresponding indirect draw data. Lazily allocated.
     *
     * @type {Map<Camera|null, DrawCommands>|null}
     * @ignore
     */
    drawCommands: Map<Camera | null, DrawCommands> | null;
    /**
     * Stores mesh metadata used for indirect rendering. Lazily allocated on first access
     * via getIndirectMetaData().
     *
     * @type {Int32Array|null}
     * @ignore
     */
    meshMetaData: Int32Array | null;
    /**
     * @type {Record<string, {scopeId: ScopeId|null, data: any, passFlags: number}>}
     * @ignore
     */
    parameters: Record<string, {
        scopeId: ScopeId | null;
        data: any;
        passFlags: number;
    }>;
    /**
     * True if the mesh instance is pickable by the {@link Picker}. Defaults to true.
     *
     * @type {boolean}
     * @ignore
     */
    pick: boolean;
    /**
     * The stencil parameters for front faces or null if no stencil is enabled.
     *
     * @type {StencilParameters|null}
     * @ignore
     */
    stencilFront: StencilParameters | null;
    /**
     * The stencil parameters for back faces or null if no stencil is enabled.
     *
     * @type {StencilParameters|null}
     * @ignore
     */
    stencilBack: StencilParameters | null;
    /**
     * True if the material of the mesh instance is transparent. Optimization to avoid accessing
     * the material. Updated by the material instance itself.
     *
     * @ignore
     */
    transparent: boolean;
    /** @private */
    private _aabb;
    /** @private */
    private _aabbVer;
    /** @private */
    private _aabbMeshVer;
    /**
     * @type {BoundingBox|null}
     * @private
     */
    private _customAabb;
    /** @private */
    private _updateAabb;
    /** @private */
    private _updateAabbFunc;
    /**
     * The internal sorting key used by the shadow renderer.
     *
     * @ignore
     */
    _sortKeyShadow: number;
    /**
     * The internal sorting key used by the forward renderer, in case SORTMODE_MATERIALMESH sorting
     * is used.
     *
     * @private
     */
    private _sortKeyForward;
    /**
     * The internal sorting key used by the forward renderer, in case SORTMODE_BACK2FRONT or
     * SORTMODE_FRONT2BACK sorting is used.
     *
     * @ignore
     */
    _sortKeyDynamic: number;
    /** @private */
    private _layer;
    /**
     * @type {Material|null}
     * @private
     */
    private _material;
    /**
     * @type {SkinInstance|null}
     * @private
     */
    private _skinInstance;
    /**
     * @type {MorphInstance|null}
     * @private
     */
    private _morphInstance;
    /** @private */
    private _receiveShadow;
    /** @private */
    private _renderStyle;
    /** @private */
    private _screenSpace;
    /**
     * The cache of shaders, indexed by a hash value.
     *
     * @type {Map<number, ShaderInstance>}
     * @private
     */
    private _shaderCache;
    /**
     * 2 byte toggles, 2 bytes light mask; Default value is no toggles and mask = pc.MASK_AFFECT_DYNAMIC
     *
     * @private
     */
    private _shaderDefs;
    /**
     * @type {CalculateSortDistanceCallback|null}
     * @private
     */
    private _calculateSortDistance;
    _mesh: Mesh;
    /**
     * Sets the material used by this mesh instance.
     *
     * @type {Material}
     */
    set material(material: Material);
    /**
     * Gets the material used by this mesh instance.
     *
     * @type {Material}
     */
    get material(): Material;
    /**
     * Sets the draw bucket for mesh instances. The draw bucket, an integer from 0 to 255 (default
     * 127), serves as the primary sort key for mesh rendering. Meshes are sorted by draw bucket,
     * then by sort mode. This setting is only effective when mesh instances are added to a
     * {@link Layer} with its {@link Layer#opaqueSortMode} or {@link Layer#transparentSortMode}
     * (depending on the material) set to {@link SORTMODE_BACK2FRONT}, {@link SORTMODE_FRONT2BACK},
     * or {@link SORTMODE_MATERIALMESH}.
     *
     * Note: When {@link SORTMODE_BACK2FRONT} is used, a descending sort order is used; otherwise,
     * an ascending sort order is used.
     *
     * @type {number}
     */
    set drawBucket(bucket: number);
    /**
     * Gets the draw bucket for mesh instance.
     *
     * @type {number}
     */
    get drawBucket(): number;
    /**
     * Sets the render style of the mesh instance. Can be:
     *
     * - {@link RENDERSTYLE_SOLID}
     * - {@link RENDERSTYLE_WIREFRAME}
     * - {@link RENDERSTYLE_POINTS}
     *
     * Defaults to {@link RENDERSTYLE_SOLID}.
     *
     * @type {number}
     */
    set renderStyle(renderStyle: number);
    /**
     * Gets the render style of the mesh instance.
     *
     * @type {number}
     */
    get renderStyle(): number;
    /**
     * Sets the graphics mesh being instanced.
     *
     * @type {Mesh}
     */
    set mesh(mesh: Mesh);
    /**
     * Gets the graphics mesh being instanced.
     *
     * @type {Mesh}
     */
    get mesh(): Mesh;
    /**
     * Sets the world space axis-aligned bounding box for this mesh instance.
     *
     * @type {BoundingBox}
     */
    set aabb(aabb: BoundingBox);
    /**
     * Gets the world space axis-aligned bounding box for this mesh instance.
     *
     * @type {BoundingBox}
     */
    get aabb(): BoundingBox;
    /**
     * Clear the internal shader cache.
     *
     * @ignore
     */
    clearShaders(): void;
    /**
     * Returns the shader instance for the specified shader pass and light hash that is compatible
     * with this mesh instance.
     *
     * @param {number} shaderPass - The shader pass index.
     * @param {number} lightHash - The hash value of the lights that are affecting this mesh instance.
     * @param {Scene} scene - The scene.
     * @param {CameraShaderParams} cameraShaderParams - The camera shader parameters.
     * @param {UniformBufferFormat} [viewUniformFormat] - The format of the view uniform buffer.
     * @param {BindGroupFormat} [viewBindGroupFormat] - The format of the view bind group.
     * @param {any} [sortedLights] - Array of arrays of lights.
     * @returns {ShaderInstance} - the shader instance.
     * @ignore
     */
    getShaderInstance(shaderPass: number, lightHash: number, scene: Scene, cameraShaderParams: CameraShaderParams, viewUniformFormat?: UniformBufferFormat, viewBindGroupFormat?: BindGroupFormat, sortedLights?: any): ShaderInstance;
    /**
     * @param {number} shaderDefs - The shader definitions to set.
     * @private
     */
    private _updateShaderDefs;
    /**
     * Sets the callback to calculate sort distance. In some circumstances mesh instances are
     * sorted by a distance calculation to determine their rendering order. Set this callback to
     * override the default distance calculation, which gives the dot product of the camera forward
     * vector and the vector between the camera position and the center of the mesh instance's
     * axis-aligned bounding box. This option can be particularly useful for rendering transparent
     * meshes in a better order than the default.
     *
     * @type {CalculateSortDistanceCallback|null}
     */
    set calculateSortDistance(calculateSortDistance: CalculateSortDistanceCallback | null);
    /**
     * Gets the callback to calculate sort distance.
     *
     * @type {CalculateSortDistanceCallback|null}
     */
    get calculateSortDistance(): CalculateSortDistanceCallback | null;
    set receiveShadow(val: boolean);
    get receiveShadow(): boolean;
    set batching(val: boolean);
    get batching(): boolean;
    /**
     * Sets the skin instance managing skinning of this mesh instance. Set to null if skinning is
     * not used.
     *
     * @type {SkinInstance|null}
     */
    set skinInstance(val: SkinInstance | null);
    /**
     * Gets the skin instance managing skinning of this mesh instance.
     *
     * @type {SkinInstance|null}
     */
    get skinInstance(): SkinInstance | null;
    /**
     * Sets the morph instance managing morphing of this mesh instance. Set to null if morphing is
     * not used.
     *
     * @type {MorphInstance|null}
     */
    set morphInstance(val: MorphInstance | null);
    /**
     * Gets the morph instance managing morphing of this mesh instance.
     *
     * @type {MorphInstance|null}
     */
    get morphInstance(): MorphInstance | null;
    set screenSpace(val: boolean);
    get screenSpace(): boolean;
    set key(val: number);
    get key(): number;
    /**
     * Sets the mask controlling which {@link LightComponent}s light this mesh instance, which
     * {@link CameraComponent} sees it and in which {@link Layer} it is rendered. Defaults to 1.
     *
     * @type {number}
     */
    set mask(val: number);
    /**
     * Gets the mask controlling which {@link LightComponent}s light this mesh instance, which
     * {@link CameraComponent} sees it and in which {@link Layer} it is rendered.
     *
     * @type {number}
     */
    get mask(): number;
    /**
     * Sets the number of instances when using hardware instancing to render the mesh.
     *
     * @type {number}
     */
    set instancingCount(value: number);
    /**
     * Gets the number of instances when using hardware instancing to render the mesh.
     *
     * @type {number}
     */
    get instancingCount(): number;
    destroy(): void;
    destroyDrawCommands(): void;
    /**
     * Test if meshInstance is visible by camera. It requires the frustum of the camera to be up to
     * date, which forward-renderer takes care of. This function should not be called elsewhere.
     *
     * @param {Camera} camera - The camera to test visibility against.
     * @returns {boolean} - True if the mesh instance is visible by the camera, false otherwise.
     * @ignore
     */
    _isVisible(camera: Camera): boolean;
    updateKey(): void;
    /**
     * Sets up {@link MeshInstance} to be rendered using Hardware Instancing.
     * Note that {@link instancingCount} is automatically set to the number of vertices of the
     * vertex buffer when it is provided.
     *
     * @param {VertexBuffer|true|null} vertexBuffer - Vertex buffer to hold per-instance vertex data
     * (usually world matrices). Pass `true` to enable attributeless instancing where the instance
     * index is derived from `gl_InstanceID` / `instance_index` builtins rather than a vertex
     * buffer attribute — the caller must set {@link instancingCount} manually. Pass null to turn
     * off hardware instancing.
     * @param {boolean} cull - Whether to perform frustum culling on this instance. If true, the whole
     * instance will be culled by the  camera frustum. This often involves setting
     * {@link RenderComponent#customAabb} containing all instances. Defaults to false, which means
     * the whole instance is always rendered.
     */
    setInstancing(vertexBuffer: VertexBuffer | true | null, cull?: boolean): void;
    /**
     * Sets the {@link MeshInstance} to be rendered using indirect rendering, where the GPU,
     * typically using a Compute shader, stores draw call parameters in a buffer.
     * Note that this is only supported on WebGPU, and ignored on other platforms.
     *
     * @param {CameraComponent|null} camera - Camera component to set indirect data for, or
     * null if the indirect slot should be used for all cameras.
     * @param {number} slot - Slot in the buffer to set the draw call parameters. Allocate a slot
     * in the buffer by calling {@link GraphicsDevice#getIndirectDrawSlot}. Pass -1 to disable
     * indirect rendering for the specified camera (or the shared entry when camera is null).
     * @param {number} [count] - Optional number of consecutive slots to use. Defaults to 1.
     */
    setIndirect(camera: CameraComponent | null, slot: number, count?: number): void;
    /**
     * Sets the {@link MeshInstance} to be rendered using multi-draw, where multiple sub-draws are
     * executed with a single draw call.
     *
     * Note: Each call to this method invalidates any previously stored draw command data for the
     * specified camera.
     *
     * @param {CameraComponent|null} camera - Camera component to bind commands to, or null to share
     * across all cameras.
     * @param {number} [maxCount] - Maximum number of sub-draws to allocate. Defaults to 1. Pass 0
     * to disable multi-draw for the specified camera (or the shared entry when camera is null).
     * @returns {DrawCommands|undefined} The commands container to populate with sub-draw commands.
     */
    setMultiDraw(camera: CameraComponent | null, maxCount?: number): DrawCommands | undefined;
    _deleteDrawCommandsKey(key: any): void;
    /**
     * Retrieves the draw commands for a specific camera, or the default commands when none are
     * bound to that camera.
     *
     * @param {Camera} camera - The camera to retrieve commands for.
     * @returns {DrawCommands|undefined} - The draw commands, or undefined.
     * @ignore
     */
    getDrawCommands(camera: Camera): DrawCommands | undefined;
    /**
     * Retrieves the mesh metadata needed for indirect rendering.
     *
     * @returns {Int32Array} - A typed array with 4 elements representing the mesh metadata, which
     * is typically needed when generating indirect draw call parameters using Compute shader. These
     * can be provided to the Compute shader using vec4i uniform. The values are based on
     * {@link Mesh#primitive}, stored in this order: [count, base, baseVertex, 0]. The last value is
     * always zero and is reserved for future use.
     */
    getIndirectMetaData(): Int32Array;
    ensureMaterial(device: any): void;
    clearParameters(): void;
    getParameters(): Record<string, {
        scopeId: ScopeId | null;
        data: any;
        passFlags: number;
    }>;
    /**
     * Retrieves the specified shader parameter from a mesh instance.
     *
     * @param {string} name - The name of the parameter to query.
     * @returns {object} The named parameter.
     */
    getParameter(name: string): object;
    /**
     * Sets a shader parameter on a mesh instance. Note that this parameter will take precedence
     * over parameter of the same name if set on Material this mesh instance uses for rendering.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {number|number[]|Texture|Float32Array} data - The value for the specified parameter.
     * @param {number} [passFlags] - Mask describing which passes the material should be included
     * in. Defaults to 0xFFFFFFFF (all passes).
     */
    setParameter(name: string, data: number | number[] | Texture | Float32Array, passFlags?: number): void;
    /**
     * A wrapper over settings parameter specifically for realtime baked lightmaps. This handles
     * reference counting of lightmaps and releases them when no longer referenced.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {Texture|null} texture - The lightmap texture to set.
     * @ignore
     */
    setRealtimeLightmap(name: string, texture: Texture | null): void;
    /**
     * Deletes a shader parameter on a mesh instance.
     *
     * @param {string} name - The name of the parameter to delete.
     */
    deleteParameter(name: string): void;
    /**
     * Used to apply parameters from this mesh instance into scope of uniforms, called internally
     * by forward-renderer.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {number} passFlag - The pass flag for the current render pass.
     * @ignore
     */
    setParameters(device: GraphicsDevice, passFlag: number): void;
    /**
     * @param {boolean} value - True to enable lightmapped rendering, false to disable.
     * @ignore
     */
    setLightmapped(value: boolean): void;
    /**
     * @param {BoundingBox|null} aabb - The custom axis-aligned bounding box or null to reset to
     * the mesh's bounding box.
     * @ignore
     */
    setCustomAabb(aabb: BoundingBox | null): void;
    /** @private */
    private _setupSkinUpdate;
}

/**
 * Internal data structure used to store data used by hardware instancing.
 *
 * @ignore
 */
declare class InstancingData {
    /**
     * @param {number} numObjects - The number of objects instanced.
     */
    constructor(numObjects: number);
    /** @type {VertexBuffer|null} */
    vertexBuffer: VertexBuffer | null;
    /**
     * True if the vertex buffer is destroyed when the mesh instance is destroyed.
     *
     * @type {boolean}
     */
    _destroyVertexBuffer: boolean;
    count: number;
    destroy(): void;
}

/**
 * Internal helper class for storing the shader and related mesh bind group in the shader cache.
 *
 * @ignore
 */
declare class ShaderInstance {
    /**
     * A shader.
     *
     * @type {Shader|undefined}
     */
    shader: Shader | undefined;
    /**
     * A bind group storing mesh textures / samplers for the shader. but not the uniform buffer.
     *
     * @type {BindGroup|null}
     */
    bindGroup: BindGroup | null;
    /**
     * A uniform buffer storing mesh uniforms for the shader.
     *
     * @type {UniformBuffer|null}
     */
    uniformBuffer: UniformBuffer | null;
    /**
     * The full array of hashes used to lookup the pipeline, used in case of hash collision.
     *
     * @type {Uint32Array}
     */
    hashes: Uint32Array;
    /**
     * Returns the mesh bind group for the shader.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @returns {BindGroup} - The mesh bind group.
     */
    getBindGroup(device: GraphicsDevice): BindGroup;
    /**
     * Returns the uniform buffer for the shader.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @returns {UniformBuffer} - The uniform buffer.
     */
    getUniformBuffer(device: GraphicsDevice): UniformBuffer;
    destroy(): void;
}

/**
 * A light.
 *
 * @ignore
 */
declare class Light {
    /**
     * Get conversion factor for luminance -> light specific light unit.
     *
     * @param {number} type - The type of light.
     * @param {number} [outerAngle] - The outer angle of a spot light.
     * @param {number} [innerAngle] - The inner angle of a spot light.
     * @returns {number} The scaling factor to multiply with the luminance value.
     */
    static getLightUnitConversion(type: number, outerAngle?: number, innerAngle?: number): number;
    /**
     * @param {GraphicsDevice} graphicsDevice - The graphics device.
     * @param {boolean} clusteredLighting - True if the clustered lighting is enabled.
     */
    constructor(graphicsDevice: GraphicsDevice, clusteredLighting: boolean);
    /**
     * The Layers the light is on.
     *
     * @type {Set<Layer>}
     */
    layers: Set<Layer>;
    /**
     * True if the clustered lighting is enabled.
     *
     * @type {boolean}
     */
    clusteredLighting: boolean;
    /**
     * The depth state used when rendering the shadow map.
     *
     * @type {DepthState}
     */
    shadowDepthState: DepthState;
    /**
     * The flags used for clustered lighting. Stored as a bitfield, updated as properties change to
     * avoid those being updated each frame.
     *
     * @type {number}
     * @ignore
     */
    clusteredFlags: number;
    /**
     * Storage data for light properties encoded as a Uint32Array.
     *
     * @type {Uint32Array}
     * @ignore
     */
    clusteredData: Uint32Array;
    /**
     * Alias for clusteredData using 16bit unsigned integers.
     *
     * @type {Uint16Array}
     * @ignore
     */
    clusteredData16: Uint16Array;
    /**
     * Event handle for device restored event.
     *
     * @type {EventHandle|null}
     * @private
     */
    private _evtDeviceRestored;
    device: GraphicsDevice;
    id: number;
    _type: number;
    _color: Color;
    _intensity: number;
    _affectSpecularity: boolean;
    _luminance: number;
    _castShadows: boolean;
    _enabled: boolean;
    _mask: number;
    isStatic: boolean;
    key: number;
    bakeDir: boolean;
    bakeNumSamples: number;
    bakeArea: number;
    attenuationStart: number;
    attenuationEnd: number;
    _falloffMode: number;
    _shadowType: number;
    _vsmBlurSize: number;
    vsmBlurMode: number;
    vsmBias: number;
    _cookie: any;
    cookieIntensity: number;
    _cookieFalloff: boolean;
    _cookieChannel: string;
    _cookieTransform: any;
    _cookieTransformUniform: Float32Array<ArrayBuffer>;
    _cookieOffset: any;
    _cookieOffsetUniform: Float32Array<ArrayBuffer>;
    _cookieTransformSet: boolean;
    _cookieOffsetSet: boolean;
    _innerConeAngle: number;
    _outerConeAngle: number;
    cascades: any;
    _shadowMatrixPalette: Float32Array<ArrayBuffer>;
    _shadowCascadeDistances: Float32Array<ArrayBuffer>;
    set numCascades(value: any);
    get numCascades(): any;
    _cascadeBlend: number;
    cascadeDistribution: number;
    _shape: number;
    _colorLinear: Float32Array<ArrayBuffer>;
    _position: Vec3;
    _direction: Vec3;
    _innerConeAngleCos: number;
    _usePhysicalUnits: any;
    _shadowMap: any;
    _shadowRenderParams: any[];
    _shadowCameraParams: any[];
    shadowDistance: number;
    _shadowResolution: number;
    _shadowBias: number;
    _shadowIntensity: number;
    _normalOffsetBias: number;
    shadowUpdateMode: number;
    shadowUpdateOverrides: any;
    _isVsm: boolean;
    _isPcf: boolean;
    _softShadowParams: Float32Array<ArrayBuffer>;
    set shadowSamples(value: number);
    get shadowSamples(): number;
    set shadowBlockerSamples(value: number);
    get shadowBlockerSamples(): number;
    set penumbraSize(value: any);
    get penumbraSize(): any;
    set penumbraFalloff(value: number);
    get penumbraFalloff(): number;
    _cookieMatrix: Mat4;
    _atlasViewport: Vec4;
    atlasViewportAllocated: boolean;
    atlasVersion: number;
    atlasSlotIndex: number;
    atlasSlotUpdated: boolean;
    _node: any;
    _renderData: any[];
    visibleThisFrame: boolean;
    maxScreenSize: number;
    destroy(): void;
    onDeviceRestored(): void;
    releaseRenderData(): void;
    addLayer(layer: any): void;
    removeLayer(layer: any): void;
    set shadowBias(value: number);
    get shadowBias(): number;
    set cascadeBlend(value: number);
    get cascadeBlend(): number;
    set shadowMap(shadowMap: any);
    get shadowMap(): any;
    set mask(value: number);
    get mask(): number;
    get numShadowFaces(): any;
    set type(value: number);
    get type(): number;
    set shadowType(value: number);
    get shadowType(): number;
    set shape(value: number);
    get shape(): number;
    set usePhysicalUnits(value: any);
    get usePhysicalUnits(): any;
    set enabled(value: boolean);
    get enabled(): boolean;
    set castShadows(value: boolean);
    get castShadows(): boolean;
    set shadowIntensity(value: number);
    get shadowIntensity(): number;
    get bakeShadows(): boolean;
    set shadowResolution(value: number);
    get shadowResolution(): number;
    set vsmBlurSize(value: number);
    get vsmBlurSize(): number;
    set normalOffsetBias(value: number);
    get normalOffsetBias(): number;
    set falloffMode(value: number);
    get falloffMode(): number;
    set innerConeAngle(value: number);
    get innerConeAngle(): number;
    set outerConeAngle(value: number);
    get outerConeAngle(): number;
    _penumbraSize: any;
    _updateOuterAngle(angle: any): void;
    _outerConeAngleCos: number;
    _outerConeAngleSin: number;
    set intensity(value: number);
    get intensity(): number;
    set affectSpecularity(value: boolean);
    get affectSpecularity(): boolean;
    set luminance(value: number);
    get luminance(): number;
    get cookieMatrix(): Mat4;
    get atlasViewport(): Vec4;
    set cookie(value: any);
    get cookie(): any;
    set cookieFalloff(value: boolean);
    get cookieFalloff(): boolean;
    set cookieChannel(value: string);
    get cookieChannel(): string;
    set cookieTransform(value: any);
    get cookieTransform(): any;
    set cookieOffset(value: any);
    get cookieOffset(): any;
    beginFrame(): void;
    _destroyShadowMap(): void;
    getRenderData(camera: any, face: any): any;
    /**
     * Duplicates a light node but does not 'deep copy' the hierarchy.
     *
     * @returns {Light} A cloned Light.
     */
    clone(): Light;
    _getUniformBiasValues(lightRenderData: any): {
        bias: number;
        normalBias: number;
    };
    getColor(): Color;
    getBoundingSphere(sphere: any): void;
    getBoundingBox(box: any): void;
    _updateShadowBias(): void;
    _updateLinearColor(): void;
    setColor(...args: any[]): void;
    layersDirty(): void;
    /**
     * Updates a integer key for the light. The key is used to identify all shader related features
     * of the light, and so needs to have all properties that modify the generated shader encoded.
     * Properties without an effect on the shader (color, shadow intensity) should not be encoded.
     */
    updateKey(): void;
    /**
     * Updates 32bit flags used by the clustered lighting. This only stores constant data.
     * Note: this needs to match shader code in clusteredLight.js
     */
    updateClusteredFlags(): void;
    /**
     * Adds per-frame dynamic data to the 32bit flags used by the clustered lighting.
     */
    getClusteredFlags(castShadows: any, useCookie: any): number;
    updateClusterData(updateColor: any, updateAngles: any): void;
}

/**
 * @import { Light } from '../../../scene/light.js'
 */
declare class LightComponentData {
    enabled: boolean;
    /** @type {Light} */
    light: Light;
    type: string;
    color: Color;
    intensity: number;
    luminance: number;
    shape: number;
    affectSpecularity: boolean;
    castShadows: boolean;
    shadowDistance: number;
    shadowIntensity: number;
    shadowResolution: number;
    shadowBias: number;
    numCascades: number;
    cascadeBlend: number;
    bakeNumSamples: number;
    bakeArea: number;
    cascadeDistribution: number;
    normalOffsetBias: number;
    range: number;
    innerConeAngle: number;
    outerConeAngle: number;
    falloffMode: number;
    shadowType: number;
    vsmBlurSize: number;
    vsmBlurMode: number;
    vsmBias: number;
    cookieAsset: any;
    cookie: any;
    cookieIntensity: number;
    cookieFalloff: boolean;
    cookieChannel: string;
    cookieAngle: number;
    cookieScale: any;
    cookieOffset: any;
    shadowUpdateMode: number;
    mask: number;
    affectDynamic: boolean;
    affectLightmapped: boolean;
    bake: boolean;
    bakeDir: boolean;
    isStatic: boolean;
    layers: number[];
    penumbraSize: number;
    penumbraFalloff: number;
    shadowSamples: number;
    shadowBlockerSamples: number;
}

/**
 * @import { Color } from '../../../core/math/color.js'
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { LightComponentData } from './data.js'
 * @import { Light } from '../../../scene/light.js'
 * @import { Texture } from '../../../platform/graphics/texture.js'
 * @import { Vec2 } from '../../../core/math/vec2.js'
 */
/**
 * The LightComponent enables an {@link Entity} to light the scene. There are three types of light:
 *
 * - `directional`: A global light that emits light in the direction of the negative y-axis of the
 * owner entity. Emulates light sources that appear to be infinitely far away such as the sun. The
 * owner entity's position is effectively ignored.
 * - `omni`: A local light that emits light in all directions from the owner entity's position.
 * Emulates candles, lamps, bulbs, etc.
 * - `spot`: A local light that emits light similarly to an omni light but is bounded by a cone
 * centered on the owner entity's negative y-axis. Emulates flashlights, spotlights, etc.
 *
 * You should never need to use the LightComponent constructor directly. To add an LightComponent
 * to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('light', {
 *     type: 'omni',
 *     color: new pc.Color(1, 0, 0),
 *     intensity: 2
 * });
 * ```
 *
 * Once the LightComponent is added to the entity, you can access it via the {@link Entity#light}
 * property:
 *
 * ```javascript
 * entity.light.intensity = 3; // Set the intensity of the light
 *
 * console.log(entity.light.intensity); // Get the intensity of the light
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Area Lights](https://playcanvas.github.io/#/graphics/area-lights)
 * - [Clustered Area Lights](https://playcanvas.github.io/#/graphics/clustered-area-lights)
 * - [Clustered Lighting](https://playcanvas.github.io/#/graphics/clustered-lighting)
 * - [Clustered Onmi Shadows](https://playcanvas.github.io/#/graphics/clustered-omni-shadows)
 * - [Clustered Spot Shadows](https://playcanvas.github.io/#/graphics/clustered-spot-shadows)
 * - [Lights](https://playcanvas.github.io/#/graphics/lights)
 *
 * @hideconstructor
 * @category Graphics
 */
declare class LightComponent extends Component {
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    /** @private */
    private _cookieAsset;
    /** @private */
    private _cookieAssetId;
    /** @private */
    private _cookieAssetAdd;
    /** @private */
    private _cookieMatrix;
    /**
     * @type {LightComponentData}
     * @ignore
     */
    get data(): LightComponentData;
    /**
     * @type {Light}
     * @ignore
     */
    set light(arg: Light);
    /**
     * @type {Light}
     * @ignore
     */
    get light(): Light;
    /**
     * Sets the type of the light. Can be:
     *
     * - "directional": A light that is infinitely far away and lights the entire scene from one
     * direction.
     * - "omni": An omni-directional light that illuminates in all directions from the light source.
     * - "spot": An omni-directional light but is bounded by a cone.
     *
     * Defaults to "directional".
     *
     * @type {string}
     */
    set type(arg: string);
    /**
     * Gets the type of the light.
     *
     * @type {string}
     */
    get type(): string;
    /**
     * Sets the color of the light. The alpha component of the color is ignored. Defaults to white
     * (`[1, 1, 1]`).
     *
     * @type {Color};
     */
    set color(arg: Color);
    /**
     * Gets the color of the light.
     *
     * @type {Color};
     */
    get color(): Color;
    /**
     * Sets the brightness of the light. Defaults to 1.
     *
     * @type {number}
     */
    set intensity(arg: number);
    /**
     * Gets the brightness of the light.
     *
     * @type {number}
     */
    get intensity(): number;
    /**
     * Sets the physically-based luminance. Only used if `scene.physicalUnits` is true. Defaults to 0.
     *
     * @type {number}
     */
    set luminance(arg: number);
    /**
     * Gets the physically-based luminance.
     *
     * @type {number}
     */
    get luminance(): number;
    /**
     * Sets the light source shape. Can be:
     *
     * - {@link LIGHTSHAPE_PUNCTUAL}: Infinitesimally small point.
     * - {@link LIGHTSHAPE_RECT}: Rectangle shape.
     * - {@link LIGHTSHAPE_DISK}: Disk shape.
     * - {@link LIGHTSHAPE_SPHERE}: Sphere shape.
     *
     * Defaults to pc.LIGHTSHAPE_PUNCTUAL.
     *
     * @type {number}
     */
    set shape(arg: number);
    /**
     * Gets the light source shape.
     *
     * @type {number}
     */
    get shape(): number;
    /**
     * Sets whether material specularity will be affected by this light. Ignored for lights other
     * than {@link LIGHTTYPE_DIRECTIONAL}. Defaults to true.
     *
     * @type {boolean}
     */
    set affectSpecularity(arg: boolean);
    /**
     * Gets whether material specularity will be affected by this light.
     *
     * @type {boolean}
     */
    get affectSpecularity(): boolean;
    /**
     * Sets whether the light will cast shadows. Defaults to false.
     *
     * @type {boolean}
     */
    set castShadows(arg: boolean);
    /**
     * Gets whether the light will cast shadows.
     *
     * @type {boolean}
     */
    get castShadows(): boolean;
    /**
     * Sets the distance from the viewpoint beyond which shadows are no longer rendered. Affects
     * directional lights only. Defaults to 40.
     *
     * @type {number}
     */
    set shadowDistance(arg: number);
    /**
     * Gets the distance from the viewpoint beyond which shadows are no longer rendered.
     *
     * @type {number}
     */
    get shadowDistance(): number;
    /**
     * Sets the intensity of the shadow darkening. 0 having no effect and 1 meaning shadows are
     * entirely black. Defaults to 1.
     *
     * @type {number}
     */
    set shadowIntensity(arg: number);
    /**
     * Gets the intensity of the shadow darkening.
     *
     * @type {number}
     */
    get shadowIntensity(): number;
    /**
     * Sets the size of the texture used for the shadow map. Valid sizes are 64, 128, 256, 512,
     * 1024, 2048. Defaults to 1024.
     *
     * @type {number}
     */
    set shadowResolution(arg: number);
    /**
     * Gets the size of the texture used for the shadow map.
     *
     * @type {number}
     */
    get shadowResolution(): number;
    /**
     * Set the depth bias for tuning the appearance of the shadow mapping generated by this light. Valid
     * range is 0 to 1. Defaults to 0.05.
     *
     * @type {number}
     */
    set shadowBias(arg: number);
    /**
     * Get the depth bias for tuning the appearance of the shadow mapping generated by this light.
     *
     * @type {number}
     */
    get shadowBias(): number;
    /**
     * Sets the number of shadow cascades. Can be 1, 2, 3 or 4. Defaults to 1, representing no
     * cascades.
     *
     * @type {number}
     */
    set numCascades(arg: number);
    /**
     * Gets the number of shadow cascades.
     *
     * @type {number}
     */
    get numCascades(): number;
    /**
     * Sets the blend factor for cascaded shadow maps, defining the fraction of each cascade level
     * used for blending between adjacent cascades. The value should be between 0 and 1, with
     * a default of 0, which disables blending between cascades.
     *
     * @type {number}
     */
    set cascadeBlend(value: number);
    /**
     * Gets the blend factor for cascaded shadow maps.
     *
     * @type {number}
     */
    get cascadeBlend(): number;
    /**
     * Sets the number of samples used to bake this light into the lightmap. Defaults to 1. Maximum
     * value is 255.
     *
     * @type {number}
     */
    set bakeNumSamples(arg: number);
    /**
     * Gets the number of samples used to bake this light into the lightmap.
     *
     * @type {number}
     */
    get bakeNumSamples(): number;
    /**
     * Sets the penumbra angle in degrees, allowing for a soft shadow boundary. Defaults to 0.
     * Requires `bake` to be set to true and the light type is {@link LIGHTTYPE_DIRECTIONAL}.
     *
     * @type {number}
     */
    set bakeArea(arg: number);
    /**
     * Gets the penumbra angle in degrees.
     *
     * @type {number}
     */
    get bakeArea(): number;
    /**
     * Sets the distribution of subdivision of the camera frustum for individual shadow cascades.
     * Only used if {@link LightComponent#numCascades} is larger than 1. Can be a value in range of
     * 0 and 1. Value of 0 represents a linear distribution, value of 1 represents a logarithmic
     * distribution. Defaults to 0.5. Larger value increases the resolution of the shadows in the
     * near distance.
     *
     * @type {number}
     */
    set cascadeDistribution(arg: number);
    /**
     * Gets the distribution of subdivision of the camera frustum for individual shadow cascades.
     *
     * @type {number}
     */
    get cascadeDistribution(): number;
    /**
     * Sets the normal offset depth bias. Valid range is 0 to 1. Defaults to 0.
     *
     * @type {number}
     */
    set normalOffsetBias(arg: number);
    /**
     * Gets the normal offset depth bias.
     *
     * @type {number}
     */
    get normalOffsetBias(): number;
    /**
     * Sets the range of the light. Affects omni and spot lights only. Defaults to 10.
     *
     * @type {number}
     */
    set range(arg: number);
    /**
     * Gets the range of the light.
     *
     * @type {number}
     */
    get range(): number;
    /**
     * Sets the angle at which the spotlight cone starts to fade off. The angle is specified in
     * degrees. Affects spot lights only. Defaults to 40.
     *
     * @type {number}
     */
    set innerConeAngle(arg: number);
    /**
     * Gets the angle at which the spotlight cone starts to fade off.
     *
     * @type {number}
     */
    get innerConeAngle(): number;
    /**
     * Sets the angle at which the spotlight cone has faded to nothing. The angle is specified in
     * degrees. Affects spot lights only. Defaults to 45.
     *
     * @type {number}
     */
    set outerConeAngle(arg: number);
    /**
     * Gets the angle at which the spotlight cone has faded to nothing.
     *
     * @type {number}
     */
    get outerConeAngle(): number;
    /**
     * Sets the fall off mode for the light. This controls the rate at which a light attenuates
     * from its position. Can be:
     *
     * - {@link LIGHTFALLOFF_LINEAR}: Linear.
     * - {@link LIGHTFALLOFF_INVERSESQUARED}: Inverse squared.
     *
     * Affects omni and spot lights only. Defaults to {@link LIGHTFALLOFF_LINEAR}.
     *
     * @type {number}
     */
    set falloffMode(arg: number);
    /**
     * Gets the fall off mode for the light.
     *
     * @type {number}
     */
    get falloffMode(): number;
    /**
     * Sets the type of shadows being rendered by this light. Can be:
     *
     * - {@link SHADOW_PCF1_32F}
     * - {@link SHADOW_PCF3_32F}
     * - {@link SHADOW_PCF5_32F}
     * - {@link SHADOW_PCF1_16F}
     * - {@link SHADOW_PCF3_16F}
     * - {@link SHADOW_PCF5_16F}
     * - {@link SHADOW_VSM_16F}
     * - {@link SHADOW_VSM_32F}
     * - {@link SHADOW_PCSS_32F}
     *
     * @type {number}
     */
    set shadowType(arg: number);
    /**
     * Gets the type of shadows being rendered by this light.
     *
     * @type {number}
     */
    get shadowType(): number;
    /**
     * Sets the number of samples used for blurring a variance shadow map. Only uneven numbers
     * work, even are incremented. Minimum value is 1, maximum is 25. Defaults to 11.
     *
     * @type {number}
     */
    set vsmBlurSize(arg: number);
    /**
     * Gets the number of samples used for blurring a variance shadow map.
     *
     * @type {number}
     */
    get vsmBlurSize(): number;
    /**
     * Sets the blurring mode for variance shadow maps. Can be:
     *
     * - {@link BLUR_BOX}: Box filter.
     * - {@link BLUR_GAUSSIAN}: Gaussian filter. May look smoother than box, but requires more samples.
     *
     * @type {number}
     */
    set vsmBlurMode(arg: number);
    /**
     * Gets the blurring mode for variance shadow maps.
     *
     * @type {number}
     */
    get vsmBlurMode(): number;
    /**
     * Sets the VSM bias value.
     *
     * @type {number}
     */
    set vsmBias(arg: number);
    /**
     * Gets the VSM bias value.
     *
     * @type {number}
     */
    get vsmBias(): number;
    /**
     * Sets the texture asset to be used as the cookie for this light. Only spot and omni lights can
     * have cookies. Defaults to null.
     *
     * @type {number|null}
     */
    set cookieAsset(arg: number | null);
    /**
     * Gets the texture asset to be used as the cookie for this light.
     *
     * @type {number|null}
     */
    get cookieAsset(): number | null;
    /**
     * Sets the texture to be used as the cookie for this light. Only spot and omni lights can have
     * cookies. Defaults to null.
     *
     * @type {Texture|null}
     */
    set cookie(arg: Texture | null);
    /**
     * Gets the texture to be used as the cookie for this light.
     *
     * @type {Texture|null}
     */
    get cookie(): Texture | null;
    /**
     * Sets the cookie texture intensity. Defaults to 1.
     *
     * @type {number}
     */
    set cookieIntensity(arg: number);
    /**
     * Gets the cookie texture intensity.
     *
     * @type {number}
     */
    get cookieIntensity(): number;
    /**
     * Sets whether normal spotlight falloff is active when a cookie texture is set. When set to
     * false, a spotlight will work like a pure texture projector (only fading with distance).
     * Default is false.
     *
     * @type {boolean}
     */
    set cookieFalloff(arg: boolean);
    /**
     * Gets whether normal spotlight falloff is active when a cookie texture is set.
     *
     * @type {boolean}
     */
    get cookieFalloff(): boolean;
    /**
     * Sets the color channels of the cookie texture to use. Can be "r", "g", "b", "a", "rgb".
     *
     * @type {string}
     */
    set cookieChannel(arg: string);
    /**
     * Gets the color channels of the cookie texture to use.
     *
     * @type {string}
     */
    get cookieChannel(): string;
    /**
     * Sets the angle for spotlight cookie rotation (in degrees).
     *
     * @type {number}
     */
    set cookieAngle(arg: number);
    /**
     * Gets the angle for spotlight cookie rotation (in degrees).
     *
     * @type {number}
     */
    get cookieAngle(): number;
    /**
     * Sets the spotlight cookie scale.
     *
     * @type {Vec2|null}
     */
    set cookieScale(arg: Vec2 | null);
    /**
     * Gets the spotlight cookie scale.
     *
     * @type {Vec2|null}
     */
    get cookieScale(): Vec2 | null;
    /**
     * Sets the spotlight cookie position offset.
     *
     * @type {Vec2|null}
     */
    set cookieOffset(arg: Vec2 | null);
    /**
     * Gets the spotlight cookie position offset.
     *
     * @type {Vec2|null}
     */
    get cookieOffset(): Vec2 | null;
    /**
     * Sets the shadow update model. This tells the renderer how often shadows must be updated for
     * this light. Can be:
     *
     * - {@link SHADOWUPDATE_NONE}: Don't render shadows.
     * - {@link SHADOWUPDATE_THISFRAME}: Render shadows only once (then automatically switches
     * to {@link SHADOWUPDATE_NONE}.
     * - {@link SHADOWUPDATE_REALTIME}: Render shadows every frame (default).
     *
     * @type {number}
     */
    set shadowUpdateMode(arg: number);
    /**
     * Gets the shadow update model.
     *
     * @type {number}
     */
    get shadowUpdateMode(): number;
    /**
     * Sets the mask to determine which {@link MeshInstance}s are lit by this light. Defaults to 1.
     *
     * @type {number}
     */
    set mask(arg: number);
    /**
     * Gets the mask to determine which {@link MeshInstance}s are lit by this light.
     *
     * @type {number}
     */
    get mask(): number;
    /**
     * Sets whether the light will affect non-lightmapped objects.
     *
     * @type {boolean}
     */
    set affectDynamic(arg: boolean);
    /**
     * Gets whether the light will affect non-lightmapped objects.
     *
     * @type {boolean}
     */
    get affectDynamic(): boolean;
    /**
     * Sets whether the light will affect lightmapped objects.
     *
     * @type {boolean}
     */
    set affectLightmapped(arg: boolean);
    /**
     * Gets whether the light will affect lightmapped objects.
     *
     * @type {boolean}
     */
    get affectLightmapped(): boolean;
    /**
     * Sets whether the light will be rendered into lightmaps.
     *
     * @type {boolean}
     */
    set bake(arg: boolean);
    /**
     * Gets whether the light will be rendered into lightmaps.
     *
     * @type {boolean}
     */
    get bake(): boolean;
    /**
     * Sets whether the light's direction will contribute to directional lightmaps. The light must
     * be enabled and `bake` set to true. Be aware, that directional lightmap is an approximation
     * and can only hold single direction per pixel. Intersecting multiple lights with bakeDir=true
     * may lead to incorrect look of specular/bump-mapping in the area of intersection. The error
     * is not always visible though, and highly scene-dependent.
     *
     * @type {boolean}
     */
    set bakeDir(arg: boolean);
    /**
     * Gets whether the light's direction will contribute to directional lightmaps.
     *
     * @type {boolean}
     */
    get bakeDir(): boolean;
    /**
     * Sets whether the light ever moves. This is an optimization hint.
     *
     * @type {boolean}
     */
    set isStatic(arg: boolean);
    /**
     * Gets whether the light ever moves.
     *
     * @type {boolean}
     */
    get isStatic(): boolean;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which this light should belong. Don't
     * push/pop/splice or modify this array. If you want to change it, set a new one instead.
     *
     * @type {number[]}
     */
    set layers(arg: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which this light should belong.
     *
     * @type {number[]}
     */
    get layers(): number[];
    /**
     * Sets an array of SHADOWUPDATE_ settings per shadow cascade. Set to undefined if not used.
     *
     * @type {number[] | null}
     */
    set shadowUpdateOverrides(values: number[] | null);
    /**
     * Gets an array of SHADOWUPDATE_ settings per shadow cascade.
     *
     * @type {number[] | null}
     */
    get shadowUpdateOverrides(): number[] | null;
    /**
     * Sets the number of shadow samples used for soft shadows when the shadow type is
     * {@link SHADOW_PCSS_32F}. This value must be a positive whole number starting at 1. Higher
     * values result in smoother shadows but can significantly decrease performance. Defaults to 16.
     *
     * @type {number}
     */
    set shadowSamples(value: number);
    /**
     * Gets the number of shadow samples used for soft shadows.
     *
     * @type {number}
     */
    get shadowSamples(): number;
    /**
     * Sets the number of blocker samples used for soft shadows when the shadow type is
     * {@link SHADOW_PCSS_32F}. These samples are used to estimate the distance between the shadow
     * caster and the shadow receiver, which is then used for the estimation of contact hardening in
     * the shadow. This value must be a positive whole number starting at 0. Higher values improve
     * shadow quality by considering more occlusion points, but can decrease performance. When set
     * to 0, contact hardening is disabled and the shadow has constant softness. Defaults to 16. Note
     * that this values can be lower than shadowSamples to optimize performance, often without large
     * impact on quality.
     *
     * @type {number}
     */
    set shadowBlockerSamples(value: number);
    /**
     * Gets the number of blocker samples used for contact hardening shadows.
     *
     * @type {number}
     */
    get shadowBlockerSamples(): number;
    /**
     * Sets the size of penumbra for contact hardening shadows. For area lights, acts as a
     * multiplier with the dimensions of the area light. For punctual and directional lights it's
     * the area size of the light. Defaults to 1.
     *
     * @type {number}
     */
    set penumbraSize(value: number);
    /**
     * Gets the size of penumbra for contact hardening shadows.
     *
     * @type {number}
     */
    get penumbraSize(): number;
    /**
     * Sets the falloff rate for shadow penumbra for contact hardening shadows. This is a value larger
     * than or equal to 1. This parameter determines how quickly the shadow softens with distance.
     * Higher values result in a faster softening of the shadow, while lower values produce a more
     * gradual transition. Defaults to 1.
     *
     * @type {number}
     */
    set penumbraFalloff(value: number);
    /**
     * Gets the falloff rate for shadow penumbra for contact hardening shadows.
     *
     * @type {number}
     */
    get penumbraFalloff(): number;
    /** @ignore */
    _setValue(name: any, value: any, setFunc: any, skipEqualsCheck: any): void;
    addLightToLayers(): void;
    removeLightFromLayers(): void;
    onLayersChanged(oldComp: any, newComp: any): void;
    onLayerAdded(layer: any): void;
    onLayerRemoved(layer: any): void;
    refreshProperties(): void;
    onCookieAssetSet(): void;
    onCookieAssetAdd(asset: any): void;
    onCookieAssetLoad(): void;
    onCookieAssetRemove(): void;
    onRemove(): void;
}

/**
 * A Layer represents a renderable subset of the scene. It can contain a list of mesh instances,
 * lights and cameras, their render settings and also defines custom callbacks before, after or
 * during rendering. Layers are organized inside {@link LayerComposition} in a desired order.
 *
 * @category Graphics
 */
declare class Layer {
    /**
     * Create a new Layer instance.
     *
     * @param {object} options - Object for passing optional arguments. These arguments are the
     * same as properties of the Layer.
     */
    constructor(options?: object);
    /**
     * Mesh instances assigned to this layer.
     *
     * @type {MeshInstance[]}
     * @ignore
     */
    meshInstances: MeshInstance[];
    /**
     * Mesh instances assigned to this layer, stored in a set.
     *
     * @type {Set<MeshInstance>}
     * @ignore
     */
    meshInstancesSet: Set<MeshInstance>;
    /**
     * Shadow casting instances assigned to this layer.
     *
     * @type {MeshInstance[]}
     * @ignore
     */
    shadowCasters: MeshInstance[];
    /**
     * Shadow casting instances assigned to this layer, stored in a set.
     *
     * @type {Set<MeshInstance>}
     * @ignore
     */
    shadowCastersSet: Set<MeshInstance>;
    /**
     * Visible (culled) mesh instances assigned to this layer. Looked up by the Camera.
     *
     * @type {WeakMap<Camera, CulledInstances>}
     * @private
     */
    private _visibleInstances;
    /**
     * All lights assigned to a layer.
     *
     * @type {Light[]}
     * @private
     */
    private _lights;
    /**
     * All lights assigned to a layer stored in a set.
     *
     * @type {Set<Light>}
     * @private
     */
    private _lightsSet;
    /**
     * Set of light used by clustered lighting (omni and spot, but no directional).
     *
     * @type {Set<Light>}
     * @private
     */
    private _clusteredLightsSet;
    /**
     * Lights separated by light type. Lights in the individual arrays are sorted by the key,
     * to match their order in _lightIdHash, so that their order matches the order expected by the
     * generated shader code.
     *
     * @type {Light[][]}
     * @private
     */
    private _splitLights;
    /**
     * True if _splitLights needs to be updated, which means if lights were added or removed from
     * the layer, or their key changed.
     *
     * @type {boolean}
     * @private
     */
    private _splitLightsDirty;
    /**
     * True if the objects rendered on the layer require light cube (emitters with lighting do).
     *
     * @type {boolean}
     * @ignore
     */
    requiresLightCube: boolean;
    /**
     * @type {CameraComponent[]}
     * @ignore
     */
    cameras: CameraComponent[];
    /**
     * @type {Set<Camera>}
     * @ignore
     */
    camerasSet: Set<Camera>;
    /**
     * @type {GSplatPlacement[]}
     * @ignore
     */
    gsplatPlacements: GSplatPlacement[];
    /**
     * @type {Set<GSplatPlacement>}
     * @ignore
     */
    gsplatPlacementsSet: Set<GSplatPlacement>;
    /**
     * @type {GSplatPlacement[]}
     * @ignore
     */
    gsplatShadowCasters: GSplatPlacement[];
    /**
     * @type {Set<GSplatPlacement>}
     * @ignore
     */
    gsplatShadowCastersSet: Set<GSplatPlacement>;
    /**
     * True if the gsplatPlacements array was modified.
     *
     * @type {boolean}
     * @ignore
     */
    gsplatPlacementsDirty: boolean;
    /**
     * True if the composition is invalidated.
     *
     * @ignore
     */
    _dirtyComposition: boolean;
    /**
     * A unique ID of the layer. Layer IDs are stored inside {@link ModelComponent#layers},
     * {@link RenderComponent#layers}, {@link CameraComponent#layers},
     * {@link LightComponent#layers} and {@link ElementComponent#layers} instead of names.
     * Can be used in {@link LayerComposition#getLayerById}.
     *
     * @type {number}
     */
    id: number;
    /**
     * Name of the layer. Can be used in {@link LayerComposition#getLayerByName}.
     *
     * @type {string}
     */
    name: string;
    /**
     * @type {boolean}
     * @private
     */
    private _enabled;
    /**
     * @type {number}
     * @private
     */
    private _refCounter;
    /**
     * Defines the method used for sorting opaque (that is, not semi-transparent) mesh
     * instances before rendering. Can be:
     *
     * - {@link SORTMODE_NONE}
     * - {@link SORTMODE_MANUAL}
     * - {@link SORTMODE_MATERIALMESH}
     * - {@link SORTMODE_BACK2FRONT}
     * - {@link SORTMODE_FRONT2BACK}
     *
     * Defaults to {@link SORTMODE_MATERIALMESH}.
     *
     * @type {number}
     */
    opaqueSortMode: number;
    /**
     * Defines the method used for sorting semi-transparent mesh instances before rendering. Can be:
     *
     * - {@link SORTMODE_NONE}
     * - {@link SORTMODE_MANUAL}
     * - {@link SORTMODE_MATERIALMESH}
     * - {@link SORTMODE_BACK2FRONT}
     * - {@link SORTMODE_FRONT2BACK}
     *
     * Defaults to {@link SORTMODE_BACK2FRONT}.
     *
     * @type {number}
     */
    transparentSortMode: number;
    renderTarget: any;
    /**
     * @type {boolean}
     * @private
     */
    private _clearColorBuffer;
    /**
     * @type {boolean}
     * @private
     */
    private _clearDepthBuffer;
    /**
     * @type {boolean}
     * @private
     */
    private _clearStencilBuffer;
    /**
     * Custom function that is called after the layer has been enabled. This happens when:
     *
     * - The layer is created with {@link Layer#enabled} set to true (which is the default value).
     * - {@link Layer#enabled} was changed from false to true
     *
     * @type {Function}
     */
    onEnable: Function;
    /**
     * Custom function that is called after the layer has been disabled. This happens when:
     *
     * - {@link Layer#enabled} was changed from true to false
     * - {@link Layer#decrementCounter} was called and set the counter to zero.
     *
     * @type {Function}
     */
    onDisable: Function;
    /**
     * @type {Function|null}
     * @ignore
     */
    customSortCallback: Function | null;
    /**
     * @type {Function|null}
     * @ignore
     */
    customCalculateSortValues: Function | null;
    _lightHash: number;
    _lightHashDirty: boolean;
    _lightIdHash: number;
    _lightIdHashDirty: boolean;
    skipRenderAfter: number;
    _skipRenderCounter: number;
    _renderTime: number;
    _forwardDrawCalls: number;
    _shadowDrawCalls: number;
    _shaderVersion: number;
    /**
     * Sets the enabled state of the layer. Disabled layers are skipped. Defaults to true.
     *
     * @type {boolean}
     */
    set enabled(val: boolean);
    /**
     * Gets the enabled state of the layer.
     *
     * @type {boolean}
     */
    get enabled(): boolean;
    /**
     * Sets whether the camera will clear the color buffer when it renders this layer.
     *
     * @type {boolean}
     */
    set clearColorBuffer(val: boolean);
    /**
     * Gets whether the camera will clear the color buffer when it renders this layer.
     *
     * @type {boolean}
     */
    get clearColorBuffer(): boolean;
    /**
     * Sets whether the camera will clear the depth buffer when it renders this layer.
     *
     * @type {boolean}
     */
    set clearDepthBuffer(val: boolean);
    /**
     * Gets whether the camera will clear the depth buffer when it renders this layer.
     *
     * @type {boolean}
     */
    get clearDepthBuffer(): boolean;
    /**
     * Sets whether the camera will clear the stencil buffer when it renders this layer.
     *
     * @type {boolean}
     */
    set clearStencilBuffer(val: boolean);
    /**
     * Gets whether the camera will clear the stencil buffer when it renders this layer.
     *
     * @type {boolean}
     */
    get clearStencilBuffer(): boolean;
    /**
     * Gets whether the layer contains omni or spot lights.
     *
     * @type {boolean}
     * @ignore
     */
    get hasClusteredLights(): boolean;
    /**
     * Gets the lights used by clustered lighting in a set.
     *
     * @type {Set<Light>}
     * @ignore
     */
    get clusteredLightsSet(): Set<Light>;
    /**
     * Increments the usage counter of this layer. By default, layers are created with counter set
     * to 1 (if {@link Layer.enabled} is true) or 0 (if it was false). Incrementing the counter
     * from 0 to 1 will enable the layer and call {@link Layer.onEnable}. Use this function to
     * "subscribe" multiple effects to the same layer. For example, if the layer is used to render
     * a reflection texture which is used by 2 mirrors, then each mirror can call this function
     * when visible and {@link Layer.decrementCounter} if invisible. In such case the reflection
     * texture won't be updated, when there is nothing to use it, saving performance.
     *
     * @ignore
     */
    incrementCounter(): void;
    /**
     * Decrements the usage counter of this layer. Decrementing the counter from 1 to 0 will
     * disable the layer and call {@link Layer.onDisable}.
     *
     * @ignore
     */
    decrementCounter(): void;
    /**
     * Adds a gsplat placement to this layer.
     *
     * @param {GSplatPlacement} placement - A placement of a gsplat.
     * @ignore
     */
    addGSplatPlacement(placement: GSplatPlacement): void;
    /**
     * Removes a gsplat placement from this layer.
     *
     * @param {GSplatPlacement} placement - A placement of a gsplat.
     * @ignore
     */
    removeGSplatPlacement(placement: GSplatPlacement): void;
    /**
     * Adds a gsplat placement to this layer as a shadow caster.
     *
     * @param {GSplatPlacement} placement - A placement of a gsplat.
     * @ignore
     */
    addGSplatShadowCaster(placement: GSplatPlacement): void;
    /**
     * Removes a gsplat placement from the shadow casters of this layer.
     *
     * @param {GSplatPlacement} placement - A placement of a gsplat.
     * @ignore
     */
    removeGSplatShadowCaster(placement: GSplatPlacement): void;
    /**
     * Adds an array of mesh instances to this layer.
     *
     * @param {MeshInstance[]} meshInstances - Array of {@link MeshInstance}.
     * @param {boolean} [skipShadowCasters] - Set it to true if you don't want these mesh instances
     * to cast shadows in this layer. Defaults to false.
     */
    addMeshInstances(meshInstances: MeshInstance[], skipShadowCasters?: boolean): void;
    /**
     * Removes multiple mesh instances from this layer.
     *
     * @param {MeshInstance[]} meshInstances - Array of {@link MeshInstance}. If they were added to
     * this layer, they will be removed.
     * @param {boolean} [skipShadowCasters] - Set it to true if you want to still cast shadows from
     * removed mesh instances or if they never did cast shadows before. Defaults to false.
     */
    removeMeshInstances(meshInstances: MeshInstance[], skipShadowCasters?: boolean): void;
    /**
     * Adds an array of mesh instances to this layer, but only as shadow casters (they will not be
     * rendered anywhere, but only cast shadows on other objects).
     *
     * @param {MeshInstance[]} meshInstances - Array of {@link MeshInstance}.
     */
    addShadowCasters(meshInstances: MeshInstance[]): void;
    /**
     * Removes multiple mesh instances from the shadow casters list of this layer, meaning they
     * will stop casting shadows.
     *
     * @param {MeshInstance[]} meshInstances - Array of {@link MeshInstance}. If they were added to
     * this layer, they will be removed.
     */
    removeShadowCasters(meshInstances: MeshInstance[]): void;
    /**
     * Removes all mesh instances from this layer.
     *
     * @param {boolean} [skipShadowCasters] - Set it to true if you want to continue the existing mesh
     * instances to cast shadows. Defaults to false, which removes shadow casters as well.
     */
    clearMeshInstances(skipShadowCasters?: boolean): void;
    markLightsDirty(): void;
    hasLight(light: any): boolean;
    /**
     * Adds a light to this layer.
     *
     * @param {LightComponent} light - A {@link LightComponent}.
     */
    addLight(light: LightComponent): void;
    /**
     * Removes a light from this layer.
     *
     * @param {LightComponent} light - A {@link LightComponent}.
     */
    removeLight(light: LightComponent): void;
    /**
     * Removes all lights from this layer.
     */
    clearLights(): void;
    get splitLights(): Light[][];
    evaluateLightHash(localLights: any, directionalLights: any, useIds: any): number;
    getLightHash(isClustered: any): number;
    getLightIdHash(): number;
    /**
     * Adds a camera to this layer.
     *
     * @param {CameraComponent} camera - A {@link CameraComponent}.
     */
    addCamera(camera: CameraComponent): void;
    /**
     * Removes a camera from this layer.
     *
     * @param {CameraComponent} camera - A {@link CameraComponent}.
     */
    removeCamera(camera: CameraComponent): void;
    /**
     * Removes all cameras from this layer.
     */
    clearCameras(): void;
    /**
     * @param {MeshInstance[]} drawCalls - Array of mesh instances.
     * @param {Vec3} camPos - Camera position.
     * @param {Vec3} camFwd - Camera forward vector.
     * @private
     */
    private _calculateSortDistances;
    /**
     * Get access to culled mesh instances for the provided camera.
     *
     * @param {Camera} camera - The camera.
     * @returns {CulledInstances} The culled mesh instances.
     * @ignore
     */
    getCulledInstances(camera: Camera): CulledInstances;
    /**
     * @param {Camera} camera - The camera to sort the visible mesh instances for.
     * @param {boolean} transparent - True if transparent sorting should be used.
     * @ignore
     */
    sortVisible(camera: Camera, transparent: boolean): void;
}
declare class CulledInstances {
    /**
     * Visible opaque mesh instances.
     *
     * @type {MeshInstance[]}
     */
    opaque: MeshInstance[];
    /**
     * Visible transparent mesh instances.
     *
     * @type {MeshInstance[]}
     */
    transparent: MeshInstance[];
}

/**
 * @import { GraphNode } from '../graph-node.js'
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Scene } from '../scene.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * A visual representation of the sky.
 *
 * @ignore
 */
declare class SkyMesh {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {Scene} scene - The scene owning the sky.
     * @param {GraphNode} node - The graph node of the sky mesh instance.
     * @param {Texture} texture - The texture of the sky.
     * @param {string} type - The type of the sky. One of the SKYTYPE_* constants.
     */
    constructor(device: GraphicsDevice, scene: Scene, node: GraphNode, texture: Texture, type: string);
    /**
     * Mesh instance representing the visuals of the sky.
     *
     * @type {MeshInstance|null}
     */
    meshInstance: MeshInstance | null;
    /**
     * @type {boolean}
     */
    _depthWrite: boolean;
    skyLayer: Layer;
    destroy(): void;
    set depthWrite(value: boolean);
    get depthWrite(): boolean;
}

/**
 * @import { Scene } from '../scene.js'
 */
/**
 * Implementation of the sky.
 *
 * @category Graphics
 */
declare class Sky {
    /**
     * Constructs a new sky.
     *
     * @param {Scene} scene - The scene owning the sky.
     * @ignore
     */
    constructor(scene: Scene);
    /**
     * The type of the sky. One of the SKYTYPE_* constants.
     *
     * @type {string}
     * @private
     */
    private _type;
    /**
     * The center of the sky.
     *
     * @type {Vec3}
     * @private
     */
    private _center;
    /**
     * The sky mesh of the scene.
     *
     * @type {SkyMesh|null}
     * @ignore
     */
    skyMesh: SkyMesh | null;
    /**
     * @type {boolean}
     * @private
     */
    private _depthWrite;
    /**
     * @type {number}
     * @private
     */
    private _fisheye;
    /**
     * Lazily created on first non-zero fisheye set.
     *
     * @type {FisheyeProjection|null}
     * @private
     */
    private _fisheyeProj;
    /**
     * A graph node with a transform used to render the sky mesh. Adjust the position, rotation and
     * scale of this node to orient the sky mesh. Ignored for {@link SKYTYPE_INFINITE}.
     *
     * @type {GraphNode}
     * @readonly
     */
    readonly node: GraphNode;
    device: GraphicsDevice;
    scene: Scene;
    /**
     * The center of the sky. Ignored for {@link SKYTYPE_INFINITE}. Typically only the y-coordinate
     * is used, representing the tripod height. Defaults to (0, 1, 0).
     *
     * @type {Vec3}
     */
    set center(value: Vec3);
    get center(): Vec3;
    centerArray: Float32Array<ArrayBuffer>;
    projectedSkydomeCenterId: ScopeId;
    _preRenderEvt: EventHandle;
    destroy(): void;
    applySettings(render: any): void;
    /**
     * The type of the sky. One of the SKYTYPE_* constants. Defaults to {@link SKYTYPE_INFINITE}.
     * Can be:
     *
     * - {@link SKYTYPE_INFINITE}
     * - {@link SKYTYPE_BOX}
     * - {@link SKYTYPE_DOME}
     *
     * @type {string}
     */
    set type(value: string);
    get type(): string;
    /**
     * Whether depth writing is enabled for the sky. Defaults to false.
     *
     * Writing a depth value for the skydome is supported when its type is not
     * {@link SKYTYPE_INFINITE}. When enabled, the depth is written during a prepass render pass and
     * can be utilized by subsequent passes to apply depth-based effects, such as Depth of Field.
     *
     * Note: For the skydome to be rendered during the prepass, the Sky Layer must be ordered before
     * the Depth layer, which is the final layer used in the prepass.
     *
     * @type {boolean}
     */
    set depthWrite(value: boolean);
    /**
     * Returns whether depth writing is enabled for the sky.
     *
     * @type {boolean}
     */
    get depthWrite(): boolean;
    /**
     * Controls the fisheye projection strength for the sky. The value is in the range [0, 1]:
     *
     * - 0: Standard rectilinear (perspective) projection.
     * - (0, 1]: Increasing barrel distortion, producing a wider field of view.
     *
     * Only supported with {@link SKYTYPE_INFINITE}. Has no effect on dome or box sky types,
     * and has no effect with orthographic cameras.
     *
     * Defaults to 0.
     *
     * @type {number}
     */
    set fisheye(value: number);
    get fisheye(): number;
    updateSkyMesh(): void;
    resetSkyMesh(): void;
    update(): void;
    /**
     * @param {boolean} enabled - Whether to enable the SKY_FISHEYE define.
     * @private
     */
    private _setFisheyeDefine;
    /**
     * Per-camera prerender callback that updates fisheye uniforms for the active camera.
     *
     * @param {import('../../framework/components/camera/component.js').CameraComponent} cameraComponent - The camera about to render.
     * @private
     */
    private _onPreRender;
}

declare class Immediate {
    constructor(device: any);
    shaderDescs: Map<any, any>;
    device: any;
    quadMesh: Mesh;
    textureShader: any;
    depthTextureShader: any;
    cubeLocalPos: any;
    cubeWorldPos: any;
    batchesMap: Map<any, any>;
    allBatches: Set<any>;
    updatedLayers: Set<any>;
    _materialDepth: ShaderMaterial;
    _materialNoDepth: ShaderMaterial;
    layerMeshInstances: Map<any, any>;
    createMaterial(depthTest: any): ShaderMaterial;
    get materialDepth(): ShaderMaterial;
    get materialNoDepth(): ShaderMaterial;
    getBatch(layer: any, depthTest: any): any;
    getShaderDesc(id: any, fragmentGLSL: any, fragmentWGSL: any): any;
    getTextureShaderDesc(encoding: any): any;
    getUnfilterableTextureShaderDesc(): any;
    getDepthTextureShaderDesc(): any;
    getQuadMesh(): Mesh;
    drawMesh(material: any, matrix: any, mesh: any, meshInstance: any, layer: any): void;
    drawWireAlignedBox(min: any, max: any, color: any, depthTest: any, layer: any, mat: any): void;
    drawWireSphere(center: any, radius: any, color: any, numSegments: any, depthTest: any, layer: any): void;
    getGraphNode(matrix: any): GraphNode;
    onPreRenderLayer(layer: any, visibleList: any, transparent: any): void;
    onPostRender(): void;
}

/**
 * @import { BindGroup } from '../../platform/graphics/bind-group.js'
 * @import { Layer } from '../layer.js'
 * @import { RenderTarget } from '../../platform/graphics/render-target.js'
 */
/**
 * Class representing an entry in the final order of rendering of cameras and layers in the engine
 * this is populated at runtime based on LayerComposition
 *
 * @ignore
 */
declare class RenderAction {
    camera: any;
    /** @type {Layer|null} */
    layer: Layer | null;
    transparent: boolean;
    /**
     * Render target this render action renders to.
     *
     * @type {RenderTarget|null}
     */
    renderTarget: RenderTarget | null;
    lightClusters: any;
    clearColor: boolean;
    clearDepth: boolean;
    clearStencil: boolean;
    triggerPostprocess: boolean;
    firstCameraUse: boolean;
    lastCameraUse: boolean;
    /** @type {BindGroup[]} */
    viewBindGroups: BindGroup[];
    useCameraPasses: boolean;
    destroy(): void;
    setupClears(camera: any, layer: any): void;
}

/**
 * @import { CameraComponent } from '../../framework/components/camera/component.js'
 * @import { Layer } from '../layer.js'
 * @import { Camera } from '../camera.js'
 */
/**
 * Layer Composition is a collection of {@link Layer} that is fed to {@link Scene#layers} to define
 * rendering order.
 *
 * @category Graphics
 */
declare class LayerComposition extends EventHandler {
    /**
     * Create a new layer composition.
     *
     * @param {string} [name] - Optional non-unique name of the layer composition. Defaults to
     * "Untitled" if not specified.
     */
    constructor(name?: string);
    /**
     * A read-only array of {@link Layer} sorted in the order they will be rendered.
     *
     * @type {Layer[]}
     */
    layerList: Layer[];
    /**
     * A mapping of {@link Layer#id} to {@link Layer}.
     *
     * @type {Map<number, Layer>}
     * @ignore
     */
    layerIdMap: Map<number, Layer>;
    /**
     * A mapping of {@link Layer#name} to {@link Layer}.
     *
     * @type {Map<string, Layer>}
     * @ignore
     */
    layerNameMap: Map<string, Layer>;
    /**
     * A mapping of {@link Layer} to its opaque index in {@link LayerComposition#layerList}.
     *
     * @type {Map<Layer, number>}
     * @ignore
     */
    layerOpaqueIndexMap: Map<Layer, number>;
    /**
     * A mapping of {@link Layer} to its transparent index in {@link LayerComposition#layerList}.
     *
     * @type {Map<Layer, number>}
     * @ignore
     */
    layerTransparentIndexMap: Map<Layer, number>;
    /**
     * A read-only array of boolean values, matching {@link LayerComposition#layerList}. True means only
     * semi-transparent objects are rendered, and false means opaque.
     *
     * @type {boolean[]}
     * @ignore
     */
    subLayerList: boolean[];
    /**
     * A read-only array of boolean values, matching {@link LayerComposition#layerList}. True means the
     * layer is rendered, false means it's skipped.
     *
     * @type {boolean[]}
     */
    subLayerEnabled: boolean[];
    /**
     * An array of {@link CameraComponent}s.
     *
     * @type {CameraComponent[]}
     * @ignore
     */
    cameras: CameraComponent[];
    /**
     * A set of {@link Camera}s.
     *
     * @type {Set<Camera>}
     * @ignore
     */
    camerasSet: Set<Camera>;
    /**
     * The actual rendering sequence, generated based on layers and cameras
     *
     * @type {RenderAction[]}
     * @ignore
     */
    _renderActions: RenderAction[];
    /**
     * True if the composition needs to be updated before rendering.
     *
     * @ignore
     */
    _dirty: boolean;
    name: string;
    _opaqueOrder: {};
    _transparentOrder: {};
    destroy(): void;
    destroyRenderActions(): void;
    markDirty(): void;
    _update(): void;
    getNextRenderAction(renderActionIndex: any): RenderAction;
    addDummyRenderAction(renderActionIndex: any, camera: any): void;
    addRenderAction(renderActionIndex: any, layer: any, isTransparent: any, camera: any, cameraFirstRenderAction: any, postProcessMarked: any): RenderAction;
    propagateRenderTarget(startIndex: any, fromCamera: any): void;
    _logRenderActions(): void;
    _isLayerAdded(layer: any): boolean;
    _isSublayerAdded(layer: any, transparent: any): boolean;
    /**
     * Adds a layer (both opaque and semi-transparent parts) to the end of the {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to add.
     */
    push(layer: Layer): void;
    /**
     * Inserts a layer (both opaque and semi-transparent parts) at the chosen index in the
     * {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to add.
     * @param {number} index - Insertion position.
     */
    insert(layer: Layer, index: number): void;
    /**
     * Removes a layer (both opaque and semi-transparent parts) from {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to remove.
     */
    remove(layer: Layer): void;
    /**
     * Adds part of the layer with opaque (non semi-transparent) objects to the end of the
     * {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to add.
     */
    pushOpaque(layer: Layer): void;
    /**
     * Inserts an opaque part of the layer (non semi-transparent mesh instances) at the chosen
     * index in the {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to add.
     * @param {number} index - Insertion position.
     */
    insertOpaque(layer: Layer, index: number): void;
    /**
     * Removes an opaque part of the layer (non semi-transparent mesh instances) from
     * {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to remove.
     */
    removeOpaque(layer: Layer): void;
    /**
     * Adds part of the layer with semi-transparent objects to the end of the {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to add.
     */
    pushTransparent(layer: Layer): void;
    /**
     * Inserts a semi-transparent part of the layer at the chosen index in the {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to add.
     * @param {number} index - Insertion position.
     */
    insertTransparent(layer: Layer, index: number): void;
    /**
     * Removes a transparent part of the layer from {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to remove.
     */
    removeTransparent(layer: Layer): void;
    /**
     * Gets index of the opaque part of the supplied layer in the {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to find index of.
     * @returns {number} The index of the opaque part of the specified layer, or -1 if it is not
     * part of the composition.
     */
    getOpaqueIndex(layer: Layer): number;
    /**
     * Gets index of the semi-transparent part of the supplied layer in the {@link LayerComposition#layerList}.
     *
     * @param {Layer} layer - A {@link Layer} to find index of.
     * @returns {number} The index of the semi-transparent part of the specified layer, or -1 if it
     * is not part of the composition.
     */
    getTransparentIndex(layer: Layer): number;
    isEnabled(layer: any, transparent: any): boolean;
    /**
     * Update maps of layer IDs and names to match the layer list.
     *
     * @private
     */
    private _updateLayerMaps;
    /**
     * Finds a layer inside this composition by its ID. Null is returned, if nothing is found.
     *
     * @param {number} id - An ID of the layer to find.
     * @returns {Layer|null} The layer corresponding to the specified ID. Returns null if layer is
     * not found.
     */
    getLayerById(id: number): Layer | null;
    /**
     * Finds a layer inside this composition by its name. Null is returned, if nothing is found.
     *
     * @param {string} name - The name of the layer to find.
     * @returns {Layer|null} The layer corresponding to the specified name. Returns null if layer
     * is not found.
     */
    getLayerByName(name: string): Layer | null;
    _updateOpaqueOrder(startIndex: any, endIndex: any): void;
    _updateTransparentOrder(startIndex: any, endIndex: any): void;
    _sortLayersDescending(layersA: any, layersB: any, order: any): number;
    /**
     * Used to determine which array of layers has any transparent sublayer that is on top of all
     * the transparent sublayers in the other array.
     *
     * @param {number[]} layersA - IDs of layers.
     * @param {number[]} layersB - IDs of layers.
     * @returns {number} Returns a negative number if any of the transparent sublayers in layersA
     * is on top of all the transparent sublayers in layersB, or a positive number if any of the
     * transparent sublayers in layersB is on top of all the transparent sublayers in layersA, or 0
     * otherwise.
     * @private
     */
    private sortTransparentLayers;
    /**
     * Used to determine which array of layers has any opaque sublayer that is on top of all the
     * opaque sublayers in the other array.
     *
     * @param {number[]} layersA - IDs of layers.
     * @param {number[]} layersB - IDs of layers.
     * @returns {number} Returns a negative number if any of the opaque sublayers in layersA is on
     * top of all the opaque sublayers in layersB, or a positive number if any of the opaque
     * sublayers in layersB is on top of all the opaque sublayers in layersA, or 0 otherwise.
     * @private
     */
    private sortOpaqueLayers;
}

/**
 * @import { Entity } from '../framework/entity.js'
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 * @import { LayerComposition } from './composition/layer-composition.js'
 * @import { Layer } from './layer.js'
 * @import { Texture } from '../platform/graphics/texture.js'
 */
/**
 * A scene is graphical representation of an environment. It manages the scene hierarchy, all
 * graphical objects, lights, and scene-wide properties.
 *
 * @category Graphics
 */
declare class Scene extends EventHandler {
    /**
     * Fired when the layer composition is set. Use this event to add callbacks or advanced
     * properties to your layers. The handler is passed the old and the new
     * {@link LayerComposition}.
     *
     * @event
     * @example
     * app.scene.on('set:layers', (oldComp, newComp) => {
     *     const list = newComp.layerList;
     *     for (let i = 0; i < list.length; i++) {
     *         const layer = list[i];
     *         switch (layer.name) {
     *             case 'MyLayer':
     *                 layer.onEnable = myOnEnableFunction;
     *                 layer.onDisable = myOnDisableFunction;
     *                 break;
     *             case 'MyOtherLayer':
     *                 layer.clearColorBuffer = true;
     *                 break;
     *         }
     *     }
     * });
     */
    static EVENT_SETLAYERS: string;
    /**
     * Fired when the skybox is set. The handler is passed the {@link Texture} that is the
     * previously used skybox cubemap texture. The new skybox cubemap texture is in the
     * {@link Scene#skybox} property.
     *
     * @event
     * @example
     * app.scene.on('set:skybox', (oldSkybox) => {
     *     console.log(`Skybox changed from ${oldSkybox.name} to ${app.scene.skybox.name}`);
     * });
     */
    static EVENT_SETSKYBOX: string;
    /**
     * Fired before the camera renders the scene. The handler is passed the {@link CameraComponent}
     * that will render the scene.
     *
     * @event
     * @example
     * app.scene.on('prerender', (camera) => {
     *    console.log(`Camera ${camera.entity.name} will render the scene`);
     * });
     */
    static EVENT_PRERENDER: string;
    /**
     * Fired when the camera renders the scene. The handler is passed the {@link CameraComponent}
     * that rendered the scene.
     *
     * @event
     * @example
     * app.scene.on('postrender', (camera) => {
     *    console.log(`Camera ${camera.entity.name} rendered the scene`);
     * });
     */
    static EVENT_POSTRENDER: string;
    /**
     * Fired before the camera renders a layer. The handler is passed the {@link CameraComponent},
     * the {@link Layer} that will be rendered, and a boolean parameter set to true if the layer is
     * transparent. This is called during rendering to a render target or a default framebuffer, and
     * additional rendering can be performed here, for example using {@link QuadRender#render}.
     *
     * @event
     * @example
     * app.scene.on('prerender:layer', (camera, layer, transparent) => {
     *    console.log(`Camera ${camera.entity.name} will render the layer ${layer.name} (transparent: ${transparent})`);
     * });
     */
    static EVENT_PRERENDER_LAYER: string;
    /**
     * Fired when the camera renders a layer. The handler is passed the {@link CameraComponent},
     * the {@link Layer} that will be rendered, and a boolean parameter set to true if the layer is
     * transparent. This is called during rendering to a render target or a default framebuffer, and
     * additional rendering can be performed here, for example using {@link QuadRender#render}.
     *
     * @event
     * @example
     * app.scene.on('postrender:layer', (camera, layer, transparent) => {
     *    console.log(`Camera ${camera.entity.name} rendered the layer ${layer.name} (transparent: ${transparent})`);
     * });
     */
    static EVENT_POSTRENDER_LAYER: string;
    /**
     * Fired before visibility culling is performed for the camera.
     *
     * @event
     * @example
     * app.scene.on('precull', (camera) => {
     *    console.log(`Visibility culling will be performed for camera ${camera.entity.name}`);
     * });
     */
    static EVENT_PRECULL: string;
    /**
     * Fired after visibility culling is performed for the camera.
     *
     * @event
     * @example
     * app.scene.on('postcull', (camera) => {
     *    console.log(`Visibility culling was performed for camera ${camera.entity.name}`);
     * });
     */
    static EVENT_POSTCULL: string;
    /**
     * Create a new Scene instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used to manage this scene.
     * @ignore
     */
    constructor(graphicsDevice: GraphicsDevice);
    /**
     * If enabled, the ambient lighting will be baked into lightmaps. This will be either the
     * {@link Scene#skybox} if set up, otherwise {@link Scene#ambientLight}. Defaults to false.
     *
     * @type {boolean}
     */
    ambientBake: boolean;
    /**
     * If {@link Scene#ambientBake} is true, this specifies the brightness of ambient occlusion.
     * Typical range is -1 to 1. Defaults to 0, representing no change to brightness.
     *
     * @type {number}
     */
    ambientBakeOcclusionBrightness: number;
    /**
     * If {@link Scene#ambientBake} is true, this specifies the contrast of ambient occlusion.
     * Typical range is -1 to 1. Defaults to 0, representing no change to contrast.
     *
     * @type {number}
     */
    ambientBakeOcclusionContrast: number;
    /**
     * The color of the scene's ambient light, specified in sRGB color space. Defaults to black
     * (0, 0, 0).
     *
     * @type {Color}
     */
    ambientLight: Color;
    /**
     * The luminosity of the scene's ambient light in lux (lm/m^2). Used if physicalUnits is true. Defaults to 0.
     *
     * @type {number}
     */
    ambientLuminance: number;
    /**
     * The exposure value tweaks the overall brightness of the scene. Ignored if physicalUnits is true. Defaults to 1.
     *
     * @type {number}
     */
    exposure: number;
    /**
     * The lightmap resolution multiplier. Defaults to 1.
     *
     * @type {number}
     */
    lightmapSizeMultiplier: number;
    /**
     * The maximum lightmap resolution. Defaults to 2048.
     *
     * @type {number}
     */
    lightmapMaxResolution: number;
    /**
     * The lightmap baking mode. Can be:
     *
     * - {@link BAKE_COLOR}: single color lightmap
     * - {@link BAKE_COLORDIR}: single color lightmap + dominant light direction (used for bump or
     * specular). Only lights with bakeDir=true will be used for generating the dominant light
     * direction.
     *
     * Defaults to {@link BAKE_COLORDIR}.
     *
     * @type {number}
     */
    lightmapMode: number;
    /**
     * Enables bilateral filter on runtime baked color lightmaps, which removes the noise and
     * banding while preserving the edges. Defaults to false. Note that the filtering takes place
     * in the image space of the lightmap, and it does not filter across lightmap UV space seams,
     * often making the seams more visible. It's important to balance the strength of the filter
     * with number of samples used for lightmap baking to limit the visible artifacts.
     *
     * @type {boolean}
     */
    lightmapFilterEnabled: boolean;
    /**
     * Enables HDR lightmaps. This can result in smoother lightmaps especially when many samples
     * are used. Defaults to false.
     *
     * @type {boolean}
     */
    lightmapHDR: boolean;
    /**
     * The root entity of the scene, which is usually the only child to the {@link Application}
     * root entity.
     *
     * @type {Entity}
     */
    root: Entity;
    /**
     * Use physically based units for cameras and lights. When used, the exposure value is ignored.
     *
     * @type {boolean}
     */
    physicalUnits: boolean;
    /**
     * Environment lighting atlas
     *
     * @type {Texture|null}
     * @private
     */
    private _envAtlas;
    /**
     * The skybox cubemap as set by user (gets used when skyboxMip === 0)
     *
     * @type {Texture|null}
     * @private
     */
    private _skyboxCubeMap;
    /**
     * The fog parameters.
     *
     * @private
     */
    private _fogParams;
    /**
     * Internal flag to indicate that the specular (and sheen) maps of standard materials should be
     * assumed to be in a linear space, instead of sRGB. This is used by the editor using engine v2
     * internally to render in a style of engine v1, where spec those textures were specified as
     * linear, while engine 2 assumes they are in sRGB space. This should be removed when the editor
     * no longer supports engine v1 projects.
     *
     * @ignore
     */
    forcePassThroughSpecular: boolean;
    device: GraphicsDevice;
    _gravity: Vec3;
    /**
     * @type {LayerComposition}
     * @private
     */
    private _layers;
    /**
     * Array of 6 prefiltered lighting data cubemaps.
     *
     * @type {Texture[]}
     * @private
     */
    private _prefilteredCubemaps;
    _internalEnvAtlas: any;
    _skyboxIntensity: number;
    _skyboxLuminance: number;
    _skyboxMip: number;
    _skyboxHighlightMultiplier: number;
    _skyboxRotationShaderInclude: boolean;
    _skyboxRotation: Quat;
    _skyboxRotationMat3: Mat3;
    _skyboxRotationMat4: Mat4;
    _ambientBakeNumSamples: number;
    _ambientBakeSpherePart: number;
    _lightmapFilterRange: number;
    _lightmapFilterSmoothness: number;
    _clusteredLightingEnabled: boolean;
    _lightingParams: LightingParams;
    updateShaders: boolean;
    _gsplatParams: GSplatParams;
    _sky: Sky;
    _stats: {
        meshInstances: number;
        lights: number;
        dynamicLights: number;
        bakedLights: number;
        updateShadersTime: number;
    };
    _shaderVersion: number;
    immediate: Immediate;
    /**
     * Gets the default layer used by the immediate drawing functions.
     *
     * @type {Layer}
     * @ignore
     */
    get defaultDrawLayer(): Layer;
    /**
     * Sets the number of samples used to bake the ambient light into the lightmap. Note that
     * {@link Scene#ambientBake} must be true for this to have an effect. Defaults to 1. Maximum
     * value is 255.
     *
     * @type {number}
     */
    set ambientBakeNumSamples(value: number);
    /**
     * Gets the number of samples used to bake the ambient light into the lightmap.
     *
     * @type {number}
     */
    get ambientBakeNumSamples(): number;
    /**
     * Sets the part of the sphere which represents the source of ambient light. Note that
     * {@link Scene#ambientBake} must be true for this to have an effect. The valid range is 0..1,
     * representing a part of the sphere from top to the bottom. A value of 0.5 represents the
     * upper hemisphere. A value of 1 represents a full sphere. Defaults to 0.4, which is a smaller
     * upper hemisphere as this requires fewer samples to bake.
     *
     * @type {number}
     */
    set ambientBakeSpherePart(value: number);
    /**
     * Gets the part of the sphere which represents the source of ambient light.
     *
     * @type {number}
     */
    get ambientBakeSpherePart(): number;
    /**
     * Sets whether clustered lighting is enabled. Set to false before the first frame is rendered
     * to use non-clustered lighting. Defaults to true.
     *
     * @type {boolean}
     */
    set clusteredLightingEnabled(value: boolean);
    /**
     * Gets whether clustered lighting is enabled.
     *
     * @type {boolean}
     */
    get clusteredLightingEnabled(): boolean;
    /**
     * Sets the environment lighting atlas.
     *
     * @type {Texture|null}
     */
    set envAtlas(value: Texture | null);
    /**
     * Gets the environment lighting atlas.
     *
     * @type {Texture|null}
     */
    get envAtlas(): Texture | null;
    /**
     * Sets the {@link LayerComposition} that defines rendering order of this scene.
     *
     * @type {LayerComposition}
     */
    set layers(layers: LayerComposition);
    /**
     * Gets the {@link LayerComposition} that defines rendering order of this scene.
     *
     * @type {LayerComposition}
     */
    get layers(): LayerComposition;
    /**
     * Gets the {@link Sky} that defines sky properties.
     *
     * @type {Sky}
     */
    get sky(): Sky;
    /**
     * Gets the {@link LightingParams} that define lighting parameters.
     *
     * @type {LightingParams}
     */
    get lighting(): LightingParams;
    /**
     * Gets the GSplat parameters.
     *
     * @type {GSplatParams}
     */
    get gsplat(): GSplatParams;
    /**
     * Gets the {@link FogParams} that define fog parameters.
     *
     * @type {FogParams}
     */
    get fog(): FogParams;
    /**
     * Sets the range parameter of the bilateral filter. It's used when {@link Scene#lightmapFilterEnabled}
     * is enabled. Larger value applies more widespread blur. This needs to be a positive non-zero
     * value. Defaults to 10.
     *
     * @type {number}
     */
    set lightmapFilterRange(value: number);
    /**
     * Gets the range parameter of the bilateral filter.
     *
     * @type {number}
     */
    get lightmapFilterRange(): number;
    /**
     * Sets the spatial parameter of the bilateral filter. It's used when {@link Scene#lightmapFilterEnabled}
     * is enabled. Larger value blurs less similar colors. This needs to be a positive non-zero
     * value. Defaults to 0.2.
     *
     * @type {number}
     */
    set lightmapFilterSmoothness(value: number);
    /**
     * Gets the spatial parameter of the bilateral filter.
     *
     * @type {number}
     */
    get lightmapFilterSmoothness(): number;
    /**
     * Sets the 6 prefiltered cubemaps acting as the source of image-based lighting.
     *
     * @type {Texture[]}
     */
    set prefilteredCubemaps(value: Texture[]);
    /**
     * Gets the 6 prefiltered cubemaps acting as the source of image-based lighting.
     *
     * @type {Texture[]}
     */
    get prefilteredCubemaps(): Texture[];
    /**
     * Sets the base cubemap texture used as the scene's skybox when skyboxMip is 0. Defaults to null.
     *
     * @type {Texture|null}
     */
    set skybox(value: Texture | null);
    /**
     * Gets the base cubemap texture used as the scene's skybox when skyboxMip is 0.
     *
     * @type {Texture|null}
     */
    get skybox(): Texture | null;
    /**
     * Sets the multiplier for skybox intensity. Defaults to 1. Unused if physical units are used.
     *
     * @type {number}
     */
    set skyboxIntensity(value: number);
    /**
     * Gets the multiplier for skybox intensity.
     *
     * @type {number}
     */
    get skyboxIntensity(): number;
    /**
     * Sets the luminance (in lm/m^2) of the skybox. Defaults to 0. Only used if physical units are used.
     *
     * @type {number}
     */
    set skyboxLuminance(value: number);
    /**
     * Gets the luminance (in lm/m^2) of the skybox.
     *
     * @type {number}
     */
    get skyboxLuminance(): number;
    /**
     * Sets the mip level of the skybox to be displayed. Only valid for prefiltered cubemap skyboxes.
     * Defaults to 0 (base level).
     *
     * @type {number}
     */
    set skyboxMip(value: number);
    /**
     * Gets the mip level of the skybox to be displayed.
     *
     * @type {number}
     */
    get skyboxMip(): number;
    /**
     * Sets the highlight multiplier for the skybox. The HDR skybox can represent brightness levels
     * up to a maximum of 64, with any values beyond this being clipped. This limitation prevents
     * the accurate representation of extremely bright sources, such as the Sun, which can affect
     * HDR bloom rendering by not producing enough bloom. The multiplier adjusts the brightness
     * after clipping, enhancing the bloom effect for bright sources. Defaults to 1.
     *
     * @type {number}
     */
    set skyboxHighlightMultiplier(value: number);
    /**
     * Gets the highlight multiplied for the skybox.
     *
     * @type {number}
     */
    get skyboxHighlightMultiplier(): number;
    /**
     * Sets the rotation of the skybox to be displayed. Defaults to {@link Quat.IDENTITY}.
     *
     * @type {Quat}
     */
    set skyboxRotation(value: Quat);
    /**
     * Gets the rotation of the skybox to be displayed.
     *
     * @type {Quat}
     */
    get skyboxRotation(): Quat;
    destroy(): void;
    drawLine(start: any, end: any, color?: Color, depthTest?: boolean, layer?: Layer): void;
    drawLines(positions: any, colors: any, depthTest?: boolean, layer?: Layer): void;
    drawLineArrays(positions: any, colors: any, depthTest?: boolean, layer?: Layer): void;
    applySettings(settings: any): void;
    _getSkyboxTex(): Texture;
    _updateSkyMesh(): void;
    _resetSkyMesh(): void;
    /**
     * Sets the cubemap for the scene skybox.
     *
     * @param {Texture[]} [cubemaps] - An array of cubemaps corresponding to the skybox at
     * different mip levels. If undefined, scene will remove skybox. Cubemap array should be of
     * size 7, with the first element (index 0) corresponding to the base cubemap (mip level 0)
     * with original resolution. Each remaining element (index 1-6) corresponds to a fixed
     * prefiltered resolution (128x128, 64x64, 32x32, 16x16, 8x8, 4x4).
     */
    setSkybox(cubemaps?: Texture[]): void;
    /**
     * Gets the lightmap pixel format.
     *
     * @type {number}
     */
    get lightmapPixelFormat(): number;
}

/**
 * - The description of the parameters used by the
 * Material#getShaderVariant function.
 */
type ShaderVariantParams = {
    /**
     * - The graphics device.
     */
    device: GraphicsDevice;
    /**
     * - The scene.
     */
    scene: Scene;
    /**
     * - The object definitions.
     */
    objDefs: number;
    /**
     * - The camera shader parameters.
     */
    cameraShaderParams: CameraShaderParams;
    /**
     * - The shader pass.
     */
    pass: number;
    /**
     * - The sorted lights.
     */
    sortedLights: Light[][];
    /**
     * - The view uniform format.
     */
    viewUniformFormat: UniformBufferFormat | undefined;
    /**
     * - The view bind group format.
     */
    viewBindGroupFormat: BindGroupFormat | undefined;
    /**
     * - The vertex format.
     */
    vertexFormat: VertexFormat;
};
/**
 * @typedef {object} ShaderVariantParams - The description of the parameters used by the
 * Material#getShaderVariant function.
 * @property {GraphicsDevice} device - The graphics device.
 * @property {Scene} scene - The scene.
 * @property {number} objDefs - The object definitions.
 * @property {CameraShaderParams} cameraShaderParams - The camera shader parameters.
 * @property {number} pass - The shader pass.
 * @property {Light[][]} sortedLights - The sorted lights.
 * @property {UniformBufferFormat|undefined} viewUniformFormat - The view uniform format.
 * @property {BindGroupFormat|undefined} viewBindGroupFormat - The view bind group format.
 * @property {VertexFormat} vertexFormat - The vertex format.
 * @ignore
 */
/**
 * A material determines how a particular {@link MeshInstance} is rendered, and specifies
 * render state including uniforms, textures, defines, and other properties.
 *
 * This is a base class and cannot be instantiated and used directly. Only subclasses such
 * as {@link ShaderMaterial} and {@link StandardMaterial} can be used to define materials
 * for rendering.
 *
 * @category Graphics
 */
declare class Material {
    /**
     * The mesh instances referencing this material
     *
     * @type {Set<MeshInstance>}
     * @private
     */
    private meshInstances;
    /**
     * The name of the material.
     *
     * @type {string}
     */
    name: string;
    /**
     * A unique id the user can assign to the material. The engine internally does not use this for
     * anything, and the user can assign a value to this id for any purpose they like. Defaults to
     * an empty string.
     *
     * @type {string}
     */
    userId: string;
    id: number;
    /**
     * The cache of shader variants generated for this material. The key represents the unique
     * variant, the value is the shader.
     *
     * @type {Map<number, Shader>}
     * @ignore
     */
    variants: Map<number, Shader>;
    /**
     * The set of defines used to generate the shader variants.
     *
     * @type {Map<string, string>}
     * @ignore
     */
    defines: Map<string, string>;
    _definesDirty: boolean;
    parameters: {};
    /**
     * The alpha test reference value to control which fragments are written to the currently
     * active render target based on alpha value. All fragments with an alpha value of less than
     * the alphaTest reference value will be discarded. alphaTest defaults to 0 (all fragments
     * pass).
     *
     * @type {number}
     */
    alphaTest: number;
    /**
     * Enables or disables alpha to coverage (WebGL2 only). When enabled, and if hardware
     * anti-aliasing is on, limited order-independent transparency can be achieved. Quality depends
     * on the number of MSAA samples of the current render target. It can nicely soften edges of
     * otherwise sharp alpha cutouts, but isn't recommended for large area semi-transparent
     * surfaces. Note, that you don't need to enable blending to make alpha to coverage work. It
     * will work without it, just like alphaTest.
     *
     * @type {boolean}
     */
    alphaToCoverage: boolean;
    /** @ignore */
    _blendState: BlendState;
    /** @ignore */
    _depthState: DepthState;
    /**
     * Controls how triangles are culled based on their face direction with respect to the
     * viewpoint. Can be:
     *
     * - {@link CULLFACE_NONE}: Do not cull triangles based on face direction.
     * - {@link CULLFACE_BACK}: Cull the back faces of triangles (do not render triangles facing
     * away from the view point).
     * - {@link CULLFACE_FRONT}: Cull the front faces of triangles (do not render triangles facing
     * towards the view point).
     *
     * Defaults to {@link CULLFACE_BACK}.
     *
     * @type {number}
     */
    cull: number;
    /**
     * Controls whether polygons are front- or back-facing by setting a winding
     * orientation. Can be:
     *
     * - {@link FRONTFACE_CW}: The clock-wise winding.
     * - {@link FRONTFACE_CCW}: The counterclockwise winding.
     *
     * Defaults to {@link FRONTFACE_CCW}.
     *
     * @type {number}
     */
    frontFace: number;
    /**
     * Stencil parameters for front faces (default is null).
     *
     * @type {StencilParameters|null}
     */
    stencilFront: StencilParameters | null;
    /**
     * Stencil parameters for back faces (default is null).
     *
     * @type {StencilParameters|null}
     */
    stencilBack: StencilParameters | null;
    /**
     * @type {ShaderChunks|null}
     * @private
     */
    private _shaderChunks;
    _oldChunks: {};
    _dirtyShader: boolean;
    /**
     * Returns true if the material has custom shader chunks.
     *
     * @type {boolean}
     * @ignore
     */
    get hasShaderChunks(): boolean;
    /**
     * Returns the shader chunks for the material. Those get allocated if they are not already.
     *
     * @type {ShaderChunks}
     * @ignore
     */
    get shaderChunks(): ShaderChunks;
    /**
     * Returns an object containing shader chunks for a specific shader language for the material.
     * These chunks define custom GLSL or WGSL code used to construct the final shader for the
     * material. The chunks can be also be included in shaders using the `#include "ChunkName"`
     * directive.
     *
     * On the WebGL platform:
     *  - If GLSL chunks are provided, they are used directly.
     *
     * On the WebGPU platform:
     * - If WGSL chunks are provided, they are used directly.
     * - If only GLSL chunks are provided, a GLSL shader is generated and then transpiled to WGSL,
     * which is less efficient.
     *
     * To ensure faster shader compilation, it is recommended to provide shader chunks for all
     * supported platforms.
     *
     * A simple example on how to override a shader chunk providing emissive color for both GLSL and
     * WGSL to simply return a red color:
     *
     * ```javascript
     * material.getShaderChunks(pc.SHADERLANGUAGE_GLSL).set('emissivePS', `
     *     void getEmission() {
     *         dEmission = vec3(1.0, 0.0, 1.0);
     *     }
     * `);
     *
     * material.getShaderChunks(pc.SHADERLANGUAGE_WGSL).set('emissivePS', `
     *     fn getEmission() {
     *         dEmission = vec3f(1.0, 0.0, 1.0);
     *     }
     * `);
     *
     * // call update to apply the changes
     * material.update();
     * ```
     *
     * @param {string} [shaderLanguage] - Specifies the shader language of shaders. Defaults to
     * {@link SHADERLANGUAGE_GLSL}.
     * @returns {ShaderChunkMap} - The shader chunks for the specified shader language.
     */
    getShaderChunks(shaderLanguage?: string): ShaderChunkMap;
    /**
     * Sets the version of the shader chunks.
     *
     * This should be a string containing the current engine major and minor version (e.g., '2.8'
     * for engine v2.8.1) and ensures compatibility with the current engine version. When providing
     * custom shader chunks, set this to the latest supported version. If a future engine release no
     * longer supports the specified version, a warning will be issued. In that case, update your
     * shader chunks to match the new format and set this to the latest version accordingly.
     *
     * @type {string}
     */
    set shaderChunksVersion(value: string);
    /**
     * Returns the version of the shader chunks.
     *
     * @type {string}
     */
    get shaderChunksVersion(): string;
    set chunks(value: {});
    get chunks(): {};
    /**
     * Sets the offset for the output depth buffer value. Useful for decals to prevent z-fighting.
     * Typically a small negative value (-0.1) is used to render the mesh slightly closer to the
     * camera.
     *
     * @type {number}
     */
    set depthBias(value: number);
    /**
     * Gets the offset for the output depth buffer value.
     *
     * @type {number}
     */
    get depthBias(): number;
    /**
     * Sets the offset for the output depth buffer value based on the slope of the triangle
     * relative to the camera.
     *
     * @type {number}
     */
    set slopeDepthBias(value: number);
    /**
     * Gets the offset for the output depth buffer value based on the slope of the triangle
     * relative to the camera.
     *
     * @type {number}
     */
    get slopeDepthBias(): number;
    _shaderVersion: number;
    _scene: any;
    dirty: boolean;
    /**
     * Sets whether the red channel is written to the color buffer. If true, the red component of
     * fragments generated by the shader of this material is written to the color buffer of the
     * currently active render target. If false, the red component will not be written. Defaults to
     * true.
     *
     * @type {boolean}
     */
    set redWrite(value: boolean);
    /**
     * Gets whether the red channel is written to the color buffer.
     *
     * @type {boolean}
     */
    get redWrite(): boolean;
    /**
     * Sets whether the green channel is written to the color buffer. If true, the red component of
     * fragments generated by the shader of this material is written to the color buffer of the
     * currently active render target. If false, the green component will not be written. Defaults
     * to true.
     *
     * @type {boolean}
     */
    set greenWrite(value: boolean);
    /**
     * Gets whether the green channel is written to the color buffer.
     *
     * @type {boolean}
     */
    get greenWrite(): boolean;
    /**
     * Sets whether the blue channel is written to the color buffer. If true, the red component of
     * fragments generated by the shader of this material is written to the color buffer of the
     * currently active render target. If false, the blue component will not be written. Defaults
     * to true.
     *
     * @type {boolean}
     */
    set blueWrite(value: boolean);
    /**
     * Gets whether the blue channel is written to the color buffer.
     *
     * @type {boolean}
     */
    get blueWrite(): boolean;
    /**
     * Sets whether the alpha channel is written to the color buffer. If true, the red component of
     * fragments generated by the shader of this material is written to the color buffer of the
     * currently active render target. If false, the alpha component will not be written. Defaults
     * to true.
     *
     * @type {boolean}
     */
    set alphaWrite(value: boolean);
    /**
     * Gets whether the alpha channel is written to the color buffer.
     *
     * @type {boolean}
     */
    get alphaWrite(): boolean;
    get transparent(): boolean;
    _updateTransparency(): void;
    /**
     * Sets the blend state for this material. Controls how fragment shader outputs are blended
     * when being written to the currently active render target. This overwrites blending type set
     * using {@link Material#blendType}, and offers more control over blending.
     *
     * @type {BlendState}
     */
    set blendState(value: BlendState);
    /**
     * Gets the blend state for this material.
     *
     * @type {BlendState}
     */
    get blendState(): BlendState;
    /**
     * Sets the blend mode for this material. Controls how fragment shader outputs are blended when
     * being written to the currently active render target. Can be:
     *
     * - {@link BLEND_SUBTRACTIVE}: Subtract the color of the source fragment from the destination
     * fragment and write the result to the frame buffer.
     * - {@link BLEND_ADDITIVE}: Add the color of the source fragment to the destination fragment
     * and write the result to the frame buffer.
     * - {@link BLEND_NORMAL}: Enable simple translucency for materials such as glass. This is
     * equivalent to enabling a source blend mode of {@link BLENDMODE_SRC_ALPHA} and a destination
     * blend mode of {@link BLENDMODE_ONE_MINUS_SRC_ALPHA}.
     * - {@link BLEND_NONE}: Disable blending.
     * - {@link BLEND_PREMULTIPLIED}: Similar to {@link BLEND_NORMAL} expect the source fragment is
     * assumed to have already been multiplied by the source alpha value.
     * - {@link BLEND_MULTIPLICATIVE}: Multiply the color of the source fragment by the color of the
     * destination fragment and write the result to the frame buffer.
     * - {@link BLEND_ADDITIVEALPHA}: Same as {@link BLEND_ADDITIVE} except the source RGB is
     * multiplied by the source alpha.
     * - {@link BLEND_MULTIPLICATIVE2X}: Multiplies colors and doubles the result.
     * - {@link BLEND_SCREEN}: Softer version of additive.
     * - {@link BLEND_MIN}: Minimum color.
     * - {@link BLEND_MAX}: Maximum color.
     *
     * Defaults to {@link BLEND_NONE}.
     *
     * @type {number}
     */
    set blendType(type: number);
    /**
     * Gets the blend mode for this material.
     *
     * @type {number}
     */
    get blendType(): number;
    /**
     * Sets the depth state. Note that this can also be done by using {@link Material#depthTest},
     * {@link Material#depthFunc} and {@link Material#depthWrite}.
     *
     * @type {DepthState}
     */
    set depthState(value: DepthState);
    /**
     * Gets the depth state.
     *
     * @type {DepthState}
     */
    get depthState(): DepthState;
    /**
     * Sets whether depth testing is enabled. If true, fragments generated by the shader of this
     * material are only written to the current render target if they pass the depth test. If
     * false, fragments generated by the shader of this material are written to the current render
     * target regardless of what is in the depth buffer. Defaults to true.
     *
     * @type {boolean}
     */
    set depthTest(value: boolean);
    /**
     * Gets whether depth testing is enabled.
     *
     * @type {boolean}
     */
    get depthTest(): boolean;
    /**
     * Sets the depth test function. Controls how the depth of new fragments is compared against
     * the current depth contained in the depth buffer. Can be:
     *
     * - {@link FUNC_NEVER}: don't draw
     * - {@link FUNC_LESS}: draw if new depth < depth buffer
     * - {@link FUNC_EQUAL}: draw if new depth == depth buffer
     * - {@link FUNC_LESSEQUAL}: draw if new depth <= depth buffer
     * - {@link FUNC_GREATER}: draw if new depth > depth buffer
     * - {@link FUNC_NOTEQUAL}: draw if new depth != depth buffer
     * - {@link FUNC_GREATEREQUAL}: draw if new depth >= depth buffer
     * - {@link FUNC_ALWAYS}: always draw
     *
     * Defaults to {@link FUNC_LESSEQUAL}.
     *
     * @type {number}
     */
    set depthFunc(value: number);
    /**
     * Gets the depth test function.
     *
     * @type {number}
     */
    get depthFunc(): number;
    /**
     * Sets whether depth writing is enabled. If true, fragments generated by the shader of this
     * material write a depth value to the depth buffer of the currently active render target. If
     * false, no depth value is written. Defaults to true.
     *
     * @type {boolean}
     */
    set depthWrite(value: boolean);
    /**
     * Gets whether depth writing is enabled.
     *
     * @type {boolean}
     */
    get depthWrite(): boolean;
    /**
     * Copy a material.
     *
     * @param {Material} source - The material to copy.
     * @returns {Material} The destination material.
     */
    copy(source: Material): Material;
    /**
     * Clone a material.
     *
     * @returns {this} A newly cloned material.
     */
    clone(): this;
    _updateMeshInstanceKeys(): void;
    updateUniforms(device: any, scene: any): void;
    /**
     * @param {ShaderVariantParams} params - The parameters used to generate the shader variant.
     * @ignore
     */
    getShaderVariant(params: ShaderVariantParams): void;
    /**
     * Applies any changes made to the material's properties. This method should be called after
     * modifying material properties to ensure the changes take effect.
     *
     * The method will clear cached shader variants and trigger recompilation if:
     * - Modified material properties require a different shader variant (e.g., enabling/disabling
     *   textures or other properties that affect shader generation)
     * - Material-specific shader chunks (from {@link Material#getShaderChunks}) have been modified
     * - Global shader chunks (from {@link ShaderChunks.get}) have been modified
     * - Material defines have been changed
     *
     * Note: Shaders are not compiled immediately. Instead, existing shader variants are cleared
     * and new variants will be compiled on-demand as they are needed for different render passes
     * (e.g., {@link SHADER_FORWARD}, {@link SHADER_SHADOW}).
     *
     * When global shader chunks are modified, `update()` must be called on each material that
     * should reflect those changes.
     */
    update(): void;
    clearParameters(): void;
    getParameters(): {};
    clearVariants(): void;
    /**
     * Retrieves the specified shader parameter from a material.
     *
     * @param {string} name - The name of the parameter to query.
     * @returns {object} The named parameter.
     */
    getParameter(name: string): object;
    _setParameterSimple(name: any, data: any): void;
    /**
     * Sets a shader parameter on a material.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {number|number[]|ArrayBufferView|Texture|StorageBuffer} data - The value for the specified parameter.
     */
    setParameter(name: string, data: number | number[] | ArrayBufferView | Texture | StorageBuffer): void;
    /**
     * Deletes a shader parameter on a material.
     *
     * @param {string} name - The name of the parameter to delete.
     */
    deleteParameter(name: string): void;
    setParameters(device: any, names: any): void;
    /**
     * Adds or removes a define on the material. Defines can be used to enable or disable various
     * parts of the shader code.
     *
     * @param {string} name - The name of the define to set.
     * @param {string|undefined|boolean} value - The value of the define. If undefined or false, the
     * define is removed.
     *
     * A simple example on how to set a custom shader define value used by the shader processor.
     *
     * ```javascript
     * material.setDefine('MY_DEFINE', true);
     *
     * // call update to apply the changes, which will recompile the shader using the new define
     * material.update();
     * ```
     */
    setDefine(name: string, value: string | undefined | boolean): void;
    /**
     * Returns true if a define is enabled on the material, otherwise false.
     *
     * @param {string} name - The name of the define to check.
     * @returns {boolean} The value of the define.
     */
    getDefine(name: string): boolean;
    /**
     * Removes this material from the scene and possibly frees up memory from its shaders (if there
     * are no other materials using it).
     */
    destroy(): void;
    /**
     * Registers mesh instance as referencing the material.
     *
     * @param {MeshInstance} meshInstance - The mesh instance to register.
     * @ignore
     */
    addMeshInstanceRef(meshInstance: MeshInstance): void;
    /**
     * De-registers mesh instance as referencing the material.
     *
     * @param {MeshInstance} meshInstance - The mesh instance to de-register.
     * @ignore
     */
    removeMeshInstanceRef(meshInstance: MeshInstance): void;
}

/**
 * @import { GraphNode } from './graph-node.js'
 */
/**
 * A model is a graphical object that can be added to or removed from a scene. It contains a
 * hierarchy and any number of mesh instances.
 *
 * @category Graphics
 */
declare class Model {
    /**
     * The root node of the model's graph node hierarchy.
     *
     * @type {GraphNode|null}
     */
    graph: GraphNode | null;
    /**
     * An array of MeshInstances contained in this model.
     *
     * @type {MeshInstance[]}
     */
    meshInstances: MeshInstance[];
    /**
     * An array of SkinInstances contained in this model.
     *
     * @type {SkinInstance[]}
     */
    skinInstances: SkinInstance[];
    /**
     * An array of MorphInstances contained in this model.
     *
     * @type {MorphInstance[]}
     */
    morphInstances: MorphInstance[];
    cameras: any[];
    lights: any[];
    _shadersVersion: number;
    _immutable: boolean;
    getGraph(): GraphNode;
    setGraph(graph: any): void;
    getCameras(): any[];
    setCameras(cameras: any): void;
    getLights(): any[];
    setLights(lights: any): void;
    getMaterials(): Material[];
    /**
     * Clones a model. The returned model has a newly created hierarchy and mesh instances, but
     * meshes are shared between the clone and the specified model.
     *
     * @returns {Model} A clone of the specified model.
     * @example
     * const clonedModel = model.clone();
     */
    clone(): Model;
    /**
     * Destroys skinning texture and possibly deletes vertex/index buffers of a model. Mesh is
     * reference-counted, so buffers are only deleted if all models with referencing mesh instances
     * were deleted. That means all in-scene models + the "base" one (asset.resource) which is
     * created when the model is parsed. It is recommended to use asset.unload() instead, which
     * will also remove the model from the scene.
     */
    destroy(): void;
    /**
     * Generates the necessary internal data for a model to be renderable as wireframe. Once this
     * function has been called, any mesh instance in the model can have its renderStyle property
     * set to {@link RENDERSTYLE_WIREFRAME}.
     *
     * @example
     * model.generateWireframe();
     * for (let i = 0; i < model.meshInstances.length; i++) {
     *     model.meshInstances[i].renderStyle = pc.RENDERSTYLE_WIREFRAME;
     * }
     */
    generateWireframe(): void;
}

/**
 * Represents a skeleton used to play animations.
 *
 * @category Animation
 */
declare class Skeleton {
    /**
     * Create a new Skeleton instance.
     *
     * @param {GraphNode} graph - The root {@link GraphNode} of the skeleton.
     */
    constructor(graph: GraphNode);
    /**
     * Determines whether skeleton is looping its animation.
     *
     * @type {boolean}
     */
    looping: boolean;
    /**
     * @type {Animation}
     * @private
     */
    private _animation;
    _time: number;
    _interpolatedKeys: any[];
    _interpolatedKeyDict: {};
    _currKeyIndices: {};
    graph: GraphNode;
    /**
     * Sets the animation on the skeleton.
     *
     * @type {Animation}
     */
    set animation(value: Animation);
    /**
     * Gets the animation on the skeleton.
     *
     * @type {Animation}
     */
    get animation(): Animation;
    /**
     * Sets the current time of the currently active animation in seconds. This value is between
     * zero and the duration of the animation.
     *
     * @type {number}
     */
    set currentTime(value: number);
    /**
     * Gets the current time of the currently active animation in seconds.
     *
     * @type {number}
     */
    get currentTime(): number;
    /**
     * Gets the number of nodes in the skeleton.
     *
     * @type {number}
     */
    get numNodes(): number;
    /**
     * Progresses the animation assigned to the specified skeleton by the supplied time delta. If
     * the delta takes the animation passed its end point, if the skeleton is set to loop, the
     * animation will continue from the beginning. Otherwise, the animation's current time will
     * remain at its duration (i.e. the end).
     *
     * @param {number} delta - The time in seconds to progress the skeleton's animation.
     */
    addTime(delta: number): void;
    /**
     * Blends two skeletons together.
     *
     * @param {Skeleton} skel1 - Skeleton holding the first pose to be blended.
     * @param {Skeleton} skel2 - Skeleton holding the second pose to be blended.
     * @param {number} alpha - The value controlling the interpolation in relation to the two input
     * skeletons. The value is in the range 0 to 1, 0 generating skel1, 1 generating skel2 and
     * anything in between generating a spherical interpolation between the two.
     */
    blend(skel1: Skeleton, skel2: Skeleton, alpha: number): void;
    /**
     * Links a skeleton to a node hierarchy. The nodes animated skeleton are then subsequently used
     * to drive the local transformation matrices of the node hierarchy.
     *
     * @param {GraphNode} graph - The root node of the graph that the skeleton is to drive.
     */
    setGraph(graph: GraphNode): void;
    /**
     * Synchronizes the currently linked node hierarchy with the current state of the skeleton.
     * Internally, this function converts the interpolated keyframe at each node in the skeleton
     * into the local transformation matrix at each corresponding node in the linked node
     * hierarchy.
     */
    updateGraph(): void;
}

/**
 * @import { Animation } from '../../../scene/animation/animation.js'
 * @import { Model } from '../../../scene/model.js'
 */
/**
 * The Animation Component allows an Entity to playback animations on models.
 *
 * @hideconstructor
 * @category Animation
 */
declare class AnimationComponent extends Component {
    /**
     * @type {Object<string, Animation>}
     * @private
     */
    private _animations;
    /**
     * @type {Array.<number|Asset>}
     * @private
     */
    private _assets;
    /** @private */
    private _loop;
    /**
     * @type {AnimEvaluator|null}
     * @ignore
     */
    animEvaluator: AnimEvaluator | null;
    /**
     * @type {Model|null}
     * @ignore
     */
    model: Model | null;
    /**
     * Get the skeleton for the current model. If the model is loaded from glTF/glb, then the
     * skeleton is null.
     *
     * @type {Skeleton|null}
     */
    skeleton: Skeleton | null;
    /**
     * @type {Skeleton|null}
     * @ignore
     */
    fromSkel: Skeleton | null;
    /**
     * @type {Skeleton|null}
     * @ignore
     */
    toSkel: Skeleton | null;
    /**
     * @type {Object<string, string>}
     * @ignore
     */
    animationsIndex: {
        [x: string]: string;
    };
    /**
     * @type {string|null}
     * @private
     */
    private prevAnim;
    /**
     * @type {string|null}
     * @private
     */
    private currAnim;
    /** @private */
    private blend;
    /** @private */
    private blending;
    /** @private */
    private blendSpeed;
    /**
     * If true, the first animation asset will begin playing when the scene is loaded.
     *
     * @type {boolean}
     */
    activate: boolean;
    /**
     * Speed multiplier for animation play back. 1 is playback at normal speed and 0 pauses the
     * animation.
     *
     * @type {number}
     */
    speed: number;
    /**
     * Sets the dictionary of animations by name.
     *
     * @type {Object<string, Animation>}
     */
    set animations(value: {
        [x: string]: Animation;
    });
    /**
     * Gets the dictionary of animations by name.
     *
     * @type {Object<string, Animation>}
     */
    get animations(): {
        [x: string]: Animation;
    };
    /**
     * Sets the array of animation assets or asset ids.
     *
     * @type {Array.<number|Asset>}
     */
    set assets(value: Array<number | Asset>);
    /**
     * Gets the array of animation assets or asset ids.
     *
     * @type {Array.<number|Asset>}
     */
    get assets(): Array<number | Asset>;
    /**
     * Sets the current time position (in seconds) of the animation.
     *
     * @type {number}
     */
    set currentTime(currentTime: number);
    /**
     * Gets the current time position (in seconds) of the animation.
     *
     * @type {number}
     */
    get currentTime(): number;
    /**
     * Gets the duration in seconds of the current animation. Returns 0 if no animation is playing.
     *
     * @type {number}
     */
    get duration(): number;
    /**
     * Sets whether the animation will restart from the beginning when it reaches the end.
     *
     * @type {boolean}
     */
    set loop(value: boolean);
    /**
     * Gets whether the animation will restart from the beginning when it reaches the end.
     *
     * @type {boolean}
     */
    get loop(): boolean;
    /**
     * Start playing an animation.
     *
     * @param {string} name - The name of the animation asset to begin playing.
     * @param {number} [blendTime] - The time in seconds to blend from the current
     * animation state to the start of the animation being set. Defaults to 0.
     */
    play(name: string, blendTime?: number): void;
    playing: boolean;
    /**
     * Return an animation.
     *
     * @param {string} name - The name of the animation asset.
     * @returns {Animation} An Animation.
     */
    getAnimation(name: string): Animation;
    /**
     * Set the model driven by this animation component.
     *
     * @param {Model} model - The model to set.
     * @ignore
     */
    setModel(model: Model): void;
    onSetAnimations(): void;
    /** @private */
    private _resetAnimationController;
    /** @private */
    private _createAnimationController;
    /**
     * @param {number[]} ids - Array of animation asset ids.
     * @private
     */
    private loadAnimationAssets;
    /**
     * Handle asset change events.
     *
     * @param {Asset} asset - The asset that changed.
     * @param {string} attribute - The name of the asset attribute that changed. Can be 'data',
     * 'file', 'resource' or 'resources'.
     * @param {*} newValue - The new value of the specified asset property.
     * @param {*} oldValue - The old value of the specified asset property.
     * @private
     */
    private onAssetChanged;
    /**
     * @param {Asset} asset - The asset that was removed.
     * @private
     */
    private onAssetRemoved;
    /** @private */
    private _stopCurrentAnimation;
    onBeforeRemove(): void;
    /**
     * Update the state of the component.
     *
     * @param {number} dt - The time delta.
     * @ignore
     */
    update(dt: number): void;
}

/**
 * The AudioListenerComponent enables an {@link Entity} to represent the point from where
 * positional {@link SoundComponent}s are heard. This is typically the main camera Entity in your
 * scene. And typically, you will only have one AudioListenerComponent in your scene.
 *
 * You should never need to use the AudioListenerComponent constructor directly. To add a
 * AudioListenerComponent to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('audiolistener');
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Positional Sound](https://playcanvas.github.io/#/sound/positional)
 *
 * @hideconstructor
 * @category Sound
 */
declare class AudioListenerComponent extends Component {
    setCurrentListener(): void;
}

/**
 * @import { Asset } from '../../../framework/asset/asset.js'
 * @import { Entity } from '../../../framework/entity.js'
 */
declare class ButtonComponentData {
    enabled: boolean;
    active: boolean;
    /** @type {Entity} */
    imageEntity: Entity;
    hitPadding: Vec4;
    transitionMode: number;
    hoverTint: Color;
    pressedTint: Color;
    inactiveTint: Color;
    fadeDuration: number;
    /** @type {Asset} */
    hoverSpriteAsset: Asset;
    hoverSpriteFrame: number;
    /** @type {Asset} */
    pressedSpriteAsset: Asset;
    pressedSpriteFrame: number;
    /** @type {Asset} */
    inactiveSpriteAsset: Asset;
    inactiveSpriteFrame: number;
}

/**
 * Manages creation of {@link ButtonComponent}s.
 *
 * @category User Interface
 */
declare class ButtonComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ButtonComponent;
    DataType: typeof ButtonComponentData;
    schema: (string | {
        name: string;
        type: string;
    })[];
    initializeComponentData(component: any, data: any, properties: any): void;
    onUpdate(dt: any): void;
    _onRemoveComponent(entity: any, component: any): void;
}

/**
 * A ButtonComponent enables a group of entities to behave like a button, with different visual
 * states for hover and press interactions.
 *
 * @hideconstructor
 * @category User Interface
 */
declare class ButtonComponent extends Component {
    /**
     * Fired when the mouse is pressed while the cursor is on the component. The handler is passed
     * a {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.button.on('mousedown', (event) => {
     *     console.log(`Mouse down on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEDOWN: string;
    /**
     * Fired when the mouse is released while the cursor is on the component. The handler is passed
     * a {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.button.on('mouseup', (event) => {
     *     console.log(`Mouse up on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEUP: string;
    /**
     * Fired when the mouse cursor enters the component. The handler is passed a
     * {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.button.on('mouseenter', (event) => {
     *     console.log(`Mouse entered entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEENTER: string;
    /**
     * Fired when the mouse cursor leaves the component. The handler is passed a
     * {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.button.on('mouseleave', (event) => {
     *     console.log(`Mouse left entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSELEAVE: string;
    /**
     * Fired when the mouse is pressed and released on the component or when a touch starts and ends on
     * the component. The handler is passed a {@link ElementMouseEvent} or {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.button.on('click', (event) => {
     *     console.log(`Clicked entity ${entity.name}`);
     * });
     */
    static EVENT_CLICK: string;
    /**
     * Fired when a touch starts on the component. The handler is passed a {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.button.on('touchstart', (event) => {
     *     console.log(`Touch started on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHSTART: string;
    /**
     * Fired when a touch ends on the component. The handler is passed a {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.button.on('touchend', (event) => {
     *     console.log(`Touch ended on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHEND: string;
    /**
     * Fired when a touch is canceled on the component. The handler is passed a
     * {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.button.on('touchcancel', (event) => {
     *     console.log(`Touch canceled on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHCANCEL: string;
    /**
     * Fired when a touch leaves the component. The handler is passed a {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.button.on('touchleave', (event) => {
     *     console.log(`Touch left entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHLEAVE: string;
    /**
     * Fired when a xr select starts on the component. The handler is passed a
     * {@link ElementSelectEvent}.
     *
     * @event
     * @example
     * entity.button.on('selectstart', (event) => {
     *     console.log(`Select started on entity ${entity.name}`);
     * });
     */
    static EVENT_SELECTSTART: string;
    /**
     * Fired when a xr select ends on the component. The handler is passed a
     * {@link ElementSelectEvent}.
     *
     * @event
     * @example
     * entity.button.on('selectend', (event) => {
     *     console.log(`Select ended on entity ${entity.name}`);
     * });
     */
    static EVENT_SELECTEND: string;
    /**
     * Fired when a xr select now hovering over the component. The handler is passed a
     * {@link ElementSelectEvent}.
     *
     * @event
     * @example
     * entity.button.on('selectenter', (event) => {
     *     console.log(`Select entered entity ${entity.name}`);
     * });
     */
    static EVENT_SELECTENTER: string;
    /**
     * Fired when a xr select not hovering over the component. The handler is passed a
     * {@link ElementSelectEvent}.
     *
     * @event
     * @example
     * entity.button.on('selectleave', (event) => {
     *     console.log(`Select left entity ${entity.name}`);
     * });
     */
    static EVENT_SELECTLEAVE: string;
    /**
     * Fired when the button changes state to be hovered.
     *
     * @event
     * @example
     * entity.button.on('hoverstart', () => {
     *     console.log(`Entity ${entity.name} hovered`);
     * });
     */
    static EVENT_HOVERSTART: string;
    /**
     * Fired when the button changes state to be not hovered.
     *
     * @event
     * @example
     * entity.button.on('hoverend', () => {
     *     console.log(`Entity ${entity.name} unhovered`);
     * });
     */
    static EVENT_HOVEREND: string;
    /**
     * Fired when the button changes state to be pressed.
     *
     * @event
     * @example
     * entity.button.on('pressedstart', () => {
     *     console.log(`Entity ${entity.name} pressed`);
     * });
     */
    static EVENT_PRESSEDSTART: string;
    /**
     * Fired when the button changes state to be not pressed.
     *
     * @event
     * @example
     * entity.button.on('pressedend', () => {
     *     console.log(`Entity ${entity.name} unpressed`);
     * });
     */
    static EVENT_PRESSEDEND: string;
    /**
     * Create a new ButtonComponent instance.
     *
     * @param {ButtonComponentSystem} system - The ComponentSystem that created this component.
     * @param {Entity} entity - The entity that this component is attached to.
     */
    constructor(system: ButtonComponentSystem, entity: Entity);
    /** @private */
    private _visualState;
    /** @private */
    private _isHovering;
    /** @private */
    private _hoveringCounter;
    /** @private */
    private _isPressed;
    /** @private */
    private _defaultTint;
    /** @private */
    private _defaultSpriteAsset;
    /** @private */
    private _defaultSpriteFrame;
    /**
     * @type {Entity|null}
     * @private
     */
    private _imageEntity;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtElementAdd;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtImageEntityElementAdd;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtImageEntityElementRemove;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtImageEntityElementColor;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtImageEntityElementOpacity;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtImageEntityElementSpriteAsset;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtImageEntityElementSpriteFrame;
    /**
     * @type {ButtonComponentData}
     * @ignore
     */
    get data(): ButtonComponentData;
    /**
     * Sets the button's active state. If set to false, the button will be visible but will not
     * respond to hover or touch interactions. Defaults to true.
     *
     * @type {boolean}
     */
    set active(arg: boolean);
    /**
     * Gets the button's active state.
     *
     * @type {boolean}
     */
    get active(): boolean;
    /**
     * Sets the entity to be used as the button background. The entity must have an
     * {@link ElementComponent} configured as an image element.
     *
     * @type {Entity|string|null}
     */
    set imageEntity(arg: Entity | null);
    /**
     * Gets the entity to be used as the button background.
     *
     * @type {Entity|null}
     */
    get imageEntity(): Entity | null;
    /**
     * Sets the padding to be used in hit-test calculations. Can be used to expand the bounding box
     * so that the button is easier to tap. Defaults to `[0, 0, 0, 0]`.
     *
     * @type {Vec4}
     */
    set hitPadding(arg: Vec4);
    /**
     * Gets the padding to be used in hit-test calculations.
     *
     * @type {Vec4}
     */
    get hitPadding(): Vec4;
    /**
     * Sets the button transition mode. This controls how the button responds when the user hovers
     * over it/presses it. Can be:
     *
     * - {@link BUTTON_TRANSITION_MODE_TINT}
     * - {@link BUTTON_TRANSITION_MODE_SPRITE_CHANGE}
     *
     * Defaults to {@link BUTTON_TRANSITION_MODE_TINT}.
     *
     * @type {number}
     */
    set transitionMode(arg: number);
    /**
     * Gets the button transition mode.
     *
     * @type {number}
     */
    get transitionMode(): number;
    /**
     * Sets the tint color to be used on the button image when the user hovers over it. Defaults to
     * `[0.75, 0.75, 0.75]`.
     *
     * @type {Color}
     */
    set hoverTint(arg: Color);
    /**
     * Gets the tint color to be used on the button image when the user hovers over it.
     *
     * @type {Color}
     */
    get hoverTint(): Color;
    /**
     * Sets the tint color to be used on the button image when the user presses it. Defaults to
     * `[0.5, 0.5, 0.5]`.
     *
     * @type {Color}
     */
    set pressedTint(arg: Color);
    /**
     * Gets the tint color to be used on the button image when the user presses it.
     *
     * @type {Color}
     */
    get pressedTint(): Color;
    /**
     * Sets the tint color to be used on the button image when the button is not interactive.
     * Defaults to `[0.25, 0.25, 0.25]`.
     *
     * @type {Color}
     */
    set inactiveTint(arg: Color);
    /**
     * Gets the tint color to be used on the button image when the button is not interactive.
     *
     * @type {Color}
     */
    get inactiveTint(): Color;
    /**
     * Sets the duration to be used when fading between tints, in milliseconds. Defaults to 0.
     *
     * @type {number}
     */
    set fadeDuration(arg: number);
    /**
     * Gets the duration to be used when fading between tints, in milliseconds.
     *
     * @type {number}
     */
    get fadeDuration(): number;
    /**
     * Sets the sprite to be used as the button image when the user hovers over it.
     *
     * @type {Asset}
     */
    set hoverSpriteAsset(arg: Asset);
    /**
     * Gets the sprite to be used as the button image when the user hovers over it.
     *
     * @type {Asset}
     */
    get hoverSpriteAsset(): Asset;
    /**
     * Sets the frame to be used from the hover sprite.
     *
     * @type {number}
     */
    set hoverSpriteFrame(arg: number);
    /**
     * Gets the frame to be used from the hover sprite.
     *
     * @type {number}
     */
    get hoverSpriteFrame(): number;
    /**
     * Sets the sprite to be used as the button image when the user presses it.
     *
     * @type {Asset}
     */
    set pressedSpriteAsset(arg: Asset);
    /**
     * Gets the sprite to be used as the button image when the user presses it.
     *
     * @type {Asset}
     */
    get pressedSpriteAsset(): Asset;
    /**
     * Sets the frame to be used from the pressed sprite.
     *
     * @type {number}
     */
    set pressedSpriteFrame(arg: number);
    /**
     * Gets the frame to be used from the pressed sprite.
     *
     * @type {number}
     */
    get pressedSpriteFrame(): number;
    /**
     * Sets the sprite to be used as the button image when the button is not interactive.
     *
     * @type {Asset}
     */
    set inactiveSpriteAsset(arg: Asset);
    /**
     * Gets the sprite to be used as the button image when the button is not interactive.
     *
     * @type {Asset}
     */
    get inactiveSpriteAsset(): Asset;
    /**
     * Sets the frame to be used from the inactive sprite.
     *
     * @type {number}
     */
    set inactiveSpriteFrame(arg: number);
    /**
     * Gets the frame to be used from the inactive sprite.
     *
     * @type {number}
     */
    get inactiveSpriteFrame(): number;
    /** @ignore */
    _setValue(name: any, value: any): void;
    _toggleLifecycleListeners(onOrOff: any, system: any): void;
    _onSetActive(name: any, oldValue: any, newValue: any): void;
    _onSetTransitionMode(name: any, oldValue: any, newValue: any): void;
    _onSetTransitionValue(name: any, oldValue: any, newValue: any): void;
    _imageEntitySubscribe(): void;
    _imageEntityUnsubscribe(): void;
    _imageEntityElementSubscribe(): void;
    _imageEntityElementUnsubscribe(): void;
    _onElementComponentRemove(): void;
    _onElementComponentAdd(): void;
    _onImageElementLose(): void;
    _onImageElementGain(): void;
    _toggleHitElementListeners(onOrOff: any): void;
    _hasHitElementListeners: boolean;
    _storeDefaultVisualState(): void;
    _storeDefaultColor(color: any): void;
    _storeDefaultOpacity(opacity: any): void;
    _storeDefaultSpriteAsset(spriteAsset: any): void;
    _storeDefaultSpriteFrame(spriteFrame: any): void;
    _onSetColor(color: any): void;
    _onSetOpacity(opacity: any): void;
    _onSetSpriteAsset(spriteAsset: any): void;
    _onSetSpriteFrame(spriteFrame: any): void;
    _onMouseEnter(event: any): void;
    _onMouseLeave(event: any): void;
    _onMouseDown(event: any): void;
    _onMouseUp(event: any): void;
    _onTouchStart(event: any): void;
    _onTouchEnd(event: any): void;
    _onTouchLeave(event: any): void;
    _onTouchCancel(event: any): void;
    _onSelectStart(event: any): void;
    _onSelectEnd(event: any): void;
    _onSelectEnter(event: any): void;
    _onSelectLeave(event: any): void;
    _onClick(event: any): void;
    _fireIfActive(name: any, event: any): void;
    _updateVisualState(force: any): void;
    _forceReapplyVisualState(): void;
    _resetToDefaultVisualState(transitionMode: any): void;
    _determineVisualState(): string;
    _applySprite(spriteAsset: any, spriteFrame: any): void;
    _isApplyingSprite: boolean;
    _applyTint(tintColor: any): void;
    _applyTintImmediately(tintColor: any): void;
    _isApplyingTint: boolean;
    _applyTintWithTween(tintColor: any): void;
    _tweenInfo: {
        startTime: any;
        from: Color;
        to: any;
        lerpColor: Color;
    };
    _updateTintTween(): void;
    _cancelTween(): void;
    onUpdate(): void;
    onRemove(): void;
    resolveDuplicatedEntityReferenceProperties(oldButton: any, duplicatedIdsMap: any): void;
}

/**
 * @import { Asset } from '../../../framework/asset/asset.js'
 * @import { Model } from '../../../scene/model.js'
 */
declare class CollisionComponentData {
    enabled: boolean;
    type: string;
    halfExtents: Vec3;
    linearOffset: Vec3;
    angularOffset: Quat;
    radius: number;
    axis: number;
    height: number;
    convexHull: boolean;
    /** @type {Asset | number} */
    asset: Asset | number;
    /** @type {Asset | number} */
    renderAsset: Asset | number;
    checkVertexDuplicates: boolean;
    shape: any;
    /** @type {Model | null} */
    model: Model | null;
    render: any;
    initialized: boolean;
}

/**
 * Manages creation of {@link CollisionComponent}s.
 *
 * @category Physics
 */
declare class CollisionComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof CollisionComponent;
    DataType: typeof CollisionComponentData;
    schema: string[];
    implementations: {};
    _triMeshCache: {};
    initializeComponentData(component: any, _data: any, properties: any): void;
    _createImplementation(type: any): any;
    _getImplementation(entity: any): any;
    cloneComponent(entity: any, clone: any): any;
    onBeforeRemove(entity: any, component: any): void;
    onRemove(entity: any, data: any): void;
    updateCompoundChildTransform(entity: any, forceUpdate: any): void;
    _removeCompoundChild(collision: any, shape: any): void;
    onTransformChanged(component: any, position: any, rotation: any, scale: any): void;
    changeType(component: any, previousType: any, newType: any): void;
    recreatePhysicalShapes(component: any): void;
    _calculateNodeRelativeTransform(node: any, relative: any): void;
    _getNodeScaling(node: any): any;
    _getNodeTransform(node: any, relative: any): any;
}

/**
 * The CollisionComponent enables an {@link Entity} to act as a collision volume. Use it on its own
 * to define a trigger volume. Or use it in conjunction with a {@link RigidBodyComponent} to make a
 * collision volume that can be simulated using the physics engine.
 *
 * When an entity is configured as a trigger volume, if an entity with a dynamic or kinematic body
 * enters or leaves that trigger volume, both entities will receive trigger events.
 *
 * You should never need to use the CollisionComponent constructor directly. To add an
 * CollisionComponent to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = pc.Entity();
 * entity.addComponent('collision'); // This defaults to 1x1x1 box-shaped trigger volume
 * ```
 *
 * To create a 0.5 radius dynamic rigid body sphere:
 *
 * ```javascript
 * const entity = pc.Entity();
 * entity.addComponent('collision', {
 *     type: 'sphere'
 * });
 * entity.addComponent('rigidbody', {
 *     type: 'dynamic'
 * });
 * ```
 *
 * Once the CollisionComponent is added to the entity, you can access it via the
 * {@link Entity#collision} property:
 *
 * ```javascript
 * entity.collision.type = 'cylinder'; // Set the collision volume to a cylinder
 *
 * console.log(entity.collision.type); // Get the collision volume type and print it
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Compound Collision](https://playcanvas.github.io/#/physics/compound-collision)
 * - [Falling Shapes](https://playcanvas.github.io/#/physics/falling-shapes)
 * - [Offset Collision](https://playcanvas.github.io/#/physics/offset-collision)
 *
 * @hideconstructor
 * @category Physics
 */
declare class CollisionComponent extends Component {
    /**
     * Fired when a contact occurs between two rigid bodies. The handler is passed a
     * {@link ContactResult} object which contains details of the contact between the two rigid
     * bodies.
     *
     * @event
     * @example
     * entity.collision.on('contact', (result) => {
     *    console.log(`Contact between ${entity.name} and ${result.other.name}`);
     * });
     */
    static EVENT_CONTACT: string;
    /**
     * Fired when two rigid bodies start touching. The handler is passed the {@link ContactResult}
     * object which contains details of the contact between the two rigid bodies.
     *
     * @event
     * @example
     * entity.collision.on('collisionstart', (result) => {
     *    console.log(`${entity.name} started touching ${result.other.name}`);
     * });
     */
    static EVENT_COLLISIONSTART: string;
    /**
     * Fired when two rigid bodies stop touching. The handler is passed an {@link Entity} that
     * represents the other rigid body involved in the collision.
     *
     * @event
     * @example
     * entity.collision.on('collisionend', (other) => {
     *     console.log(`${entity.name} stopped touching ${other.name}`);
     * });
     */
    static EVENT_COLLISIONEND: string;
    /**
     * Fired when a rigid body enters a trigger volume. The handler is passed an {@link Entity}
     * representing the rigid body that entered this collision volume.
     *
     * @event
     * @example
     * entity.collision.on('triggerenter', (other) => {
     *     console.log(`${other.name} entered trigger volume ${entity.name}`);
     * });
     */
    static EVENT_TRIGGERENTER: string;
    /**
     * Fired when a rigid body exits a trigger volume. The handler is passed an {@link Entity}
     * representing the rigid body that exited this collision volume.
     *
     * @event
     * @example
     * entity.collision.on('triggerleave', (other) => {
     *     console.log(`${other.name} exited trigger volume ${entity.name}`);
     * });
     */
    static EVENT_TRIGGERLEAVE: string;
    /**
     * Create a new CollisionComponent.
     *
     * @param {CollisionComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: CollisionComponentSystem, entity: Entity);
    /** @private */
    private _compoundParent;
    /** @private */
    private _hasOffset;
    /**
     * @type {CollisionComponentData}
     * @ignore
     */
    get data(): CollisionComponentData;
    /**
     * Sets the type of the collision volume. Can be:
     *
     * - "box": A box-shaped collision volume.
     * - "capsule": A capsule-shaped collision volume.
     * - "compound": A compound shape. Any descendant entities with a collision component of type
     * box, capsule, cone, cylinder or sphere will be combined into a single, rigid shape.
     * - "cone": A cone-shaped collision volume.
     * - "cylinder": A cylinder-shaped collision volume.
     * - "mesh": A collision volume that uses a model asset as its shape.
     * - "sphere": A sphere-shaped collision volume.
     *
     * Defaults to "box".
     *
     * @type {string}
     */
    set type(arg: string);
    /**
     * Gets the type of the collision volume.
     *
     * @type {string}
     */
    get type(): string;
    /**
     * Sets the half-extents of the box-shaped collision volume in the x, y and z axes. Defaults to
     * `[0.5, 0.5, 0.5]`.
     *
     * @type {Vec3}
     */
    set halfExtents(arg: Vec3);
    /**
     * Gets the half-extents of the box-shaped collision volume in the x, y and z axes.
     *
     * @type {Vec3}
     */
    get halfExtents(): Vec3;
    /**
     * Sets the positional offset of the collision shape from the Entity position along the local
     * axes. Defaults to `[0, 0, 0]`.
     *
     * @type {Vec3}
     */
    set linearOffset(arg: Vec3);
    /**
     * Gets the positional offset of the collision shape from the Entity position along the local
     * axes.
     *
     * @type {Vec3}
     */
    get linearOffset(): Vec3;
    /**
     * Sets the rotational offset of the collision shape from the Entity rotation in local space.
     * Defaults to identity.
     *
     * @type {Quat}
     */
    set angularOffset(arg: Quat);
    /**
     * Gets the rotational offset of the collision shape from the Entity rotation in local space.
     *
     * @type {Quat}
     */
    get angularOffset(): Quat;
    /**
     * Sets the radius of the sphere, capsule, cylinder or cone-shaped collision volumes.
     * Defaults to 0.5.
     *
     * @type {number}
     */
    set radius(arg: number);
    /**
     * Gets the radius of the sphere, capsule, cylinder or cone-shaped collision volumes.
     *
     * @type {number}
     */
    get radius(): number;
    /**
     * Sets the local space axis with which the capsule, cylinder or cone-shaped collision volume's
     * length is aligned. 0 for X, 1 for Y and 2 for Z. Defaults to 1 (Y-axis).
     *
     * @type {number}
     */
    set axis(arg: number);
    /**
     * Gets the local space axis with which the capsule, cylinder or cone-shaped collision volume's
     * length is aligned.
     *
     * @type {number}
     */
    get axis(): number;
    /**
     * Sets the total height of the capsule, cylinder or cone-shaped collision volume from tip to
     * tip. Defaults to 2.
     *
     * @type {number}
     */
    set height(arg: number);
    /**
     * Gets the total height of the capsule, cylinder or cone-shaped collision volume from tip to
     * tip.
     *
     * @type {number}
     */
    get height(): number;
    /**
     * Sets the asset or asset id for the model of the mesh collision volume. Defaults to null.
     *
     * @type {Asset|number|null}
     */
    set asset(arg: Asset | number | null);
    /**
     * Gets the asset or asset id for the model of the mesh collision volume.
     *
     * @type {Asset|number|null}
     */
    get asset(): Asset | number | null;
    /**
     * Sets the render asset or asset id of the mesh collision volume. Defaults to null.
     * If not set then the asset property will be checked instead.
     *
     * @type {Asset|number|null}
     */
    set renderAsset(arg: Asset | number | null);
    /**
     * Gets the render asset id of the mesh collision volume.
     *
     * @type {Asset|number|null}
     */
    get renderAsset(): Asset | number | null;
    /**
     * Sets whether the collision mesh should be treated as a convex hull. When false, the mesh can
     * only be used with a static body. When true, the mesh can be used with a static, dynamic or
     * kinematic body. Defaults to `false`.
     *
     * @type {boolean}
     */
    set convexHull(arg: boolean);
    /**
     * Gets whether the collision mesh should be treated as a convex hull.
     *
     * @type {boolean}
     */
    get convexHull(): boolean;
    set shape(arg: any);
    get shape(): any;
    /**
     * Sets the model that is added to the scene graph for the mesh collision volume.
     *
     * @type {Model | null}
     */
    set model(arg: Model | null);
    /**
     * Gets the model that is added to the scene graph for the mesh collision volume.
     *
     * @type {Model | null}
     */
    get model(): Model | null;
    set render(arg: any);
    get render(): any;
    /**
     * Sets whether checking for duplicate vertices should be enabled when creating collision meshes.
     *
     * @type {boolean}
     */
    set checkVertexDuplicates(arg: boolean);
    /**
     * Gets whether checking for duplicate vertices should be enabled when creating collision meshes.
     *
     * @type {boolean}
     */
    get checkVertexDuplicates(): boolean;
    /** @ignore */
    _setValue(name: any, value: any): void;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetType;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetHalfExtents;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetOffset;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetRadius;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetHeight;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetAxis;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetAsset;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetRenderAsset;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetModel;
    /**
     * @param {string} name - Property name.
     * @param {*} oldValue - Previous value of the property.
     * @param {*} newValue - New value of the property.
     * @private
     */
    private onSetRender;
    /**
     * @param {Asset} asset - Asset that was removed.
     * @private
     */
    private onAssetRemoved;
    /**
     * @param {Asset} asset - Asset that was removed.
     * @private
     */
    private onRenderAssetRemoved;
    /**
     * @param {*} shape - Ammo shape.
     * @returns {number|null} The shape's index in the child array of the compound shape.
     * @private
     */
    private getCompoundChildShapeIndex;
    /**
     * @param {GraphNode} parent - The parent node.
     * @private
     */
    private _onInsert;
    /** @private */
    private _updateCompound;
    /**
     * Returns the world position for the collision shape, taking into account of any offsets.
     *
     * @returns {Vec3} The world position for the collision shape.
     */
    getShapePosition(): Vec3;
    /**
     * Returns the world rotation for the collision shape, taking into account of any offsets.
     *
     * @returns {Quat} The world rotation for the collision.
     */
    getShapeRotation(): Quat;
    /** @private */
    private onBeforeRemove;
}

/**
 * @import { Texture } from '../platform/graphics/texture.js'
 * @import { Vec2 } from '../core/math/vec2.js'
 * @import { Vec4 } from '../core/math/vec4.js'
 */
/**
 * A TextureAtlas contains a number of frames from a texture. Each frame defines a region in a
 * texture. The TextureAtlas is referenced by {@link Sprite}s.
 *
 * @category Graphics
 */
declare class TextureAtlas extends EventHandler {
    /**
     * @type {Texture}
     * @private
     */
    private _texture;
    /**
     * @type {object}
     * @private
     */
    private _frames;
    /**
     * Sets the texture used by the atlas.
     *
     * @type {Texture}
     */
    set texture(value: Texture);
    /**
     * Gets the texture used by the atlas.
     *
     * @type {Texture}
     */
    get texture(): Texture;
    /**
     * Sets the frames which define portions of the texture atlas.
     *
     * @type {object}
     */
    set frames(value: object);
    /**
     * Gets the frames which define portions of the texture atlas.
     *
     * @type {object}
     */
    get frames(): object;
    /**
     * Set a new frame in the texture atlas.
     *
     * @param {string} key - The key of the frame.
     * @param {object} data - The properties of the frame.
     * @param {Vec4} data.rect - The u, v, width, height properties of the frame in pixels.
     * @param {Vec2} data.pivot - The pivot of the frame - values are between 0-1.
     * @param {Vec4} data.border - The border of the frame for 9-slicing. Values are ordered as
     * follows: left, bottom, right, top border in pixels.
     * @example
     * atlas.setFrame('1', {
     *     rect: new pc.Vec4(0, 0, 128, 128),
     *     pivot: new pc.Vec2(0.5, 0.5),
     *     border: new pc.Vec4(5, 5, 5, 5)
     * });
     */
    setFrame(key: string, data: {
        rect: Vec4;
        pivot: Vec2;
        border: Vec4;
    }): void;
    /**
     * Removes a frame from the texture atlas.
     *
     * @param {string} key - The key of the frame.
     * @example
     * atlas.removeFrame('1');
     */
    removeFrame(key: string): void;
    /**
     * Free up the underlying texture owned by the atlas.
     */
    destroy(): void;
}

/**
 * A Sprite contains references to one or more frames of a {@link TextureAtlas}. It can be used by
 * the {@link SpriteComponent} or the {@link ElementComponent} to render a single frame or a sprite
 * animation.
 *
 * @category Graphics
 */
declare class Sprite extends EventHandler {
    /**
     * Create a new Sprite instance.
     *
     * @param {GraphicsDevice} device - The graphics device of the application.
     * @param {object} [options] - Options for creating the Sprite.
     * @param {number} [options.pixelsPerUnit] - The number of pixels that map to one PlayCanvas
     * unit. Defaults to 1.
     * @param {number} [options.renderMode] - The rendering mode of the sprite. Can be:
     *
     * - {@link SPRITE_RENDERMODE_SIMPLE}
     * - {@link SPRITE_RENDERMODE_SLICED}
     * - {@link SPRITE_RENDERMODE_TILED}
     *
     * Defaults to {@link SPRITE_RENDERMODE_SIMPLE}.
     * @param {TextureAtlas} [options.atlas] - The texture atlas. Defaults to null.
     * @param {string[]} [options.frameKeys] - The keys of the frames in the sprite atlas that this
     * sprite is using. Defaults to null.
     */
    constructor(device: GraphicsDevice, options?: {
        pixelsPerUnit?: number;
        renderMode?: number;
        atlas?: TextureAtlas;
        frameKeys?: string[];
    });
    _device: GraphicsDevice;
    _pixelsPerUnit: number;
    _renderMode: number;
    _atlas: TextureAtlas;
    _frameKeys: string[];
    _meshes: any[];
    _updatingProperties: boolean;
    _meshesDirty: boolean;
    /**
     * Sets the keys of the frames in the sprite atlas that this sprite is using.
     *
     * @type {string[]}
     */
    set frameKeys(value: string[]);
    /**
     * Gets the keys of the frames in the sprite atlas that this sprite is using.
     *
     * @type {string[]}
     */
    get frameKeys(): string[];
    /**
     * Sets the texture atlas.
     *
     * @type {TextureAtlas}
     */
    set atlas(value: TextureAtlas);
    /**
     * Gets the texture atlas.
     *
     * @type {TextureAtlas}
     */
    get atlas(): TextureAtlas;
    /**
     * Sets the number of pixels that map to one PlayCanvas unit.
     *
     * @type {number}
     */
    set pixelsPerUnit(value: number);
    /**
     * Gets the number of pixels that map to one PlayCanvas unit.
     *
     * @type {number}
     */
    get pixelsPerUnit(): number;
    /**
     * Sets the rendering mode of the sprite. Can be:
     *
     * - {@link SPRITE_RENDERMODE_SIMPLE}
     * - {@link SPRITE_RENDERMODE_SLICED}
     * - {@link SPRITE_RENDERMODE_TILED}
     *
     * @type {number}
     */
    set renderMode(value: number);
    /**
     * Sets the rendering mode of the sprite.
     *
     * @type {number}
     */
    get renderMode(): number;
    /**
     * An array that contains a mesh for each frame.
     *
     * @type {Mesh[]}
     */
    get meshes(): Mesh[];
    _createMeshes(): void;
    _createSimpleMesh(frame: any): Mesh;
    _create9SliceMesh(): Mesh;
    _onSetFrames(frames: any): void;
    _onFrameChanged(frameKey: any, frame: any): void;
    _onFrameRemoved(frameKey: any): void;
    startUpdate(): void;
    endUpdate(): void;
    /**
     * Free up the meshes created by the sprite.
     */
    destroy(): void;
}

declare class ImageElement {
    constructor(element: any);
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtSetMeshes;
    _element: any;
    _entity: any;
    _system: any;
    /** @type {number} */
    _textureAsset: number;
    /** @type {Texture} */
    _texture: Texture;
    /** @type {number} */
    _materialAsset: number;
    /** @type {Material} */
    _material: Material;
    /** @type {number} */
    _spriteAsset: number;
    /** @type {Sprite} */
    _sprite: Sprite;
    _spriteFrame: number;
    /** @type {number} */
    _pixelsPerUnit: number;
    _targetAspectRatio: number;
    _rect: Vec4;
    _mask: boolean;
    _maskRef: number;
    _outerScale: Vec2;
    _outerScaleUniform: Float32Array<ArrayBuffer>;
    _innerOffset: Vec4;
    _innerOffsetUniform: Float32Array<ArrayBuffer>;
    _atlasRect: Vec4;
    _atlasRectUniform: Float32Array<ArrayBuffer>;
    _defaultMesh: Mesh;
    _renderable: ImageRenderable;
    _color: Color;
    _colorUniform: Float32Array<ArrayBuffer>;
    _updateAabbFunc: any;
    destroy(): void;
    set textureAsset(value: number);
    get textureAsset(): number;
    set spriteAsset(value: number);
    get spriteAsset(): number;
    set materialAsset(value: number);
    get materialAsset(): number;
    _onResolutionChange(res: any): void;
    _onParentResizeOrPivotChange(): void;
    _onScreenSpaceChange(value: any): void;
    _onScreenChange(screen: any, previous: any): void;
    _onDrawOrderChange(order: any): void;
    _hasUserMaterial(): boolean;
    _use9Slicing(): boolean;
    _updateMaterial(screenSpace: any): void;
    _createMesh(): Mesh;
    _updateMesh(mesh: any): void;
    _meshDirty: boolean;
    _updateSprite(): void;
    set mesh(value: any);
    get mesh(): any;
    refreshMesh(): void;
    _updateAabb(aabb: any): any;
    _toggleMask(): void;
    _onMaterialLoad(asset: any): void;
    set material(value: Material);
    get material(): Material;
    _onMaterialAdded(asset: any): void;
    _bindMaterialAsset(asset: any): void;
    _unbindMaterialAsset(asset: any): void;
    _onMaterialChange(): void;
    _onMaterialRemove(): void;
    _onTextureAdded(asset: any): void;
    _bindTextureAsset(asset: any): void;
    _unbindTextureAsset(asset: any): void;
    _onTextureLoad(asset: any): void;
    set texture(value: Texture);
    get texture(): Texture;
    _onTextureChange(asset: any): void;
    _onTextureRemove(asset: any): void;
    _onSpriteAssetAdded(asset: any): void;
    _bindSpriteAsset(asset: any): void;
    _unbindSpriteAsset(asset: any): void;
    _onSpriteAssetLoad(asset: any): void;
    set sprite(value: Sprite);
    get sprite(): Sprite;
    _onSpriteAssetChange(asset: any): void;
    _onSpriteAssetRemove(asset: any): void;
    _bindSprite(sprite: any): void;
    _unbindSprite(sprite: any): void;
    _onSpriteMeshesChange(): void;
    _onSpritePpuChange(): void;
    _onAtlasTextureChange(): void;
    _onTextureAtlasLoad(atlasAsset: any): void;
    onEnable(): void;
    onDisable(): void;
    _setStencil(stencilParams: any): void;
    _updateRenderableEmissive(): void;
    set color(value: Color);
    get color(): Color;
    set opacity(value: number);
    get opacity(): number;
    set rect(value: Vec4);
    get rect(): Vec4;
    _removeMaterialAssetEvents(): void;
    set spriteFrame(value: number);
    get spriteFrame(): number;
    set mask(value: boolean);
    get mask(): boolean;
    set pixelsPerUnit(value: number);
    get pixelsPerUnit(): number;
    /**
     * @type {BoundingBox | null}
     */
    get aabb(): BoundingBox | null;
}

declare class ImageRenderable {
    constructor(entity: any, mesh: any, material: any);
    _entity: any;
    _element: any;
    model: Model;
    node: GraphNode;
    mesh: any;
    meshInstance: MeshInstance;
    _meshDirty: boolean;
    unmaskMeshInstance: MeshInstance;
    destroy(): void;
    setMesh(mesh: any): void;
    setMask(mask: any): void;
    setMaterial(material: any): void;
    setParameter(name: any, value: any): void;
    deleteParameter(name: any): void;
    setUnmaskDrawOrder(): void;
    setDrawOrder(drawOrder: any): void;
    setCull(cull: any): void;
    setScreenSpace(screenSpace: any): void;
    setLayer(layer: any): void;
    forceUpdateAabb(mask: any): void;
    setAabbFunc(fn: any): void;
}

declare class LocalizedAsset extends EventHandler {
    constructor(app: any);
    _app: any;
    _autoLoad: boolean;
    _disableLocalization: boolean;
    /** @type {number} */
    _defaultAsset: number;
    /** @type {number} */
    _localizedAsset: number;
    /**
     * @param {Asset | number} value - The asset or id.
     */
    set defaultAsset(value: Asset | number);
    get defaultAsset(): Asset | number;
    /**
     * @param {Asset | number} value - The asset or id.
     */
    set localizedAsset(value: Asset | number);
    get localizedAsset(): Asset | number;
    set autoLoad(value: boolean);
    get autoLoad(): boolean;
    set disableLocalization(value: boolean);
    get disableLocalization(): boolean;
    _bindDefaultAsset(): void;
    _unbindDefaultAsset(): void;
    _onDefaultAssetAdd(asset: any): void;
    _onDefaultAssetRemove(asset: any): void;
    _bindLocalizedAsset(): void;
    _unbindLocalizedAsset(): void;
    _onLocalizedAssetAdd(asset: any): void;
    _onLocalizedAssetLoad(asset: any): void;
    _onLocalizedAssetChange(asset: any, name: any, newValue: any, oldValue: any): void;
    _onLocalizedAssetRemove(asset: any): void;
    _onLocaleAdd(locale: any, assetId: any): void;
    _onLocaleRemove(locale: any, assetId: any): void;
    _onSetLocale(locale: any): void;
    destroy(): void;
}

/**
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * Represents the resource of a font asset.
 *
 * @category User Interface
 */
declare class Font {
    /**
     * Create a new Font instance.
     *
     * @param {Texture[]} textures - The font textures.
     * @param {object} data - The font data.
     */
    constructor(textures: Texture[], data: object);
    type: any;
    em: number;
    /**
     * The font textures.
     *
     * @type {Texture[]}
     */
    textures: Texture[];
    /**
     * The font intensity.
     *
     * @type {number}
     */
    intensity: number;
    _data: any;
    set data(value: any);
    get data(): any;
}

/**
 * Represents the resource of a canvas font asset.
 *
 * @ignore
 */
declare class CanvasFont extends EventHandler {
    /**
     * Create a new CanvasFont instance.
     *
     * @param {AppBase} app - The application.
     * @param {object} options - The font options.
     * @param {string} [options.fontName] - The name of the font. CSS font names are supported.
     * Defaults to 'Arial'.
     * @param {string} [options.fontWeight] - The weight of the font, e.g. 'normal', 'bold'.
     * Defaults to 'normal'.
     * @param {number} [options.fontSize] - The font size in pixels. Defaults to 32.
     * @param {Color} [options.color] - The font color.Defaults to white.
     * @param {number} [options.width] - The width of each texture atlas. Defaults to 512.
     * @param {number} [options.height] - The height of each texture atlas. Defaults to 512.
     * @param {number} [options.padding] - Amount of glyph padding in pixels that is added to each
     * glyph in the atlas. Defaults to 0.
     */
    constructor(app: AppBase, options?: {
        fontName?: string;
        fontWeight?: string;
        fontSize?: number;
        color?: Color;
        width?: number;
        height?: number;
        padding?: number;
    });
    type: string;
    app: AppBase;
    intensity: number;
    fontWeight: string;
    fontSize: number;
    glyphSize: number;
    fontName: string;
    color: Color;
    padding: number;
    width: number;
    height: number;
    atlases: any[];
    chars: string;
    data: {};
    /**
     * Render the necessary textures for all characters in a string to be used for the canvas font.
     *
     * @param {string} text - The list of characters to render into the texture atlas.
     */
    createTextures(text: string): void;
    /**
     * Update the list of characters to include in the atlas to include those provided and
     * re-render the texture atlas to include all the characters that have been supplied so far.
     *
     * @param {string} text - The list of characters to add to the texture atlas.
     */
    updateTextures(text: string): void;
    /**
     * Destroys the font. This also destroys the textures owned by the font.
     */
    destroy(): void;
    /**
     * @param {Color} color - The color to covert.
     * @param {boolean} alpha - Whether to include the alpha channel.
     * @returns {string} The hex string for the color.
     * @private
     */
    private _colorToRgbString;
    /**
     * @param {CanvasRenderingContext2D} context - The canvas 2D context.
     * @param {string} char - The character to render.
     * @param {number} x - The x position to render the character at.
     * @param {number} y - The y position to render the character at.
     * @param {string} color - The color to render the character in.
     * @ignore
     */
    renderCharacter(context: CanvasRenderingContext2D, char: string, x: number, y: number, color: string): void;
    /**
     * Return the atlas at the specified index.
     *
     * @param {number} index - The atlas index
     * @private
     */
    private _getAtlas;
    /**
     * Renders an array of characters into one or more textures atlases.
     *
     * @param {string[]} charsArray - The list of characters to render.
     * @private
     */
    private _renderAtlas;
    /**
     * @param {string[]} chars - A list of characters.
     * @param {string} fontName - The font name.
     * @param {number} width - The width of the texture atlas.
     * @param {number} height - The height of the texture atlas.
     * @returns {object} The font JSON object.
     * @private
     */
    private _createJson;
    /**
     * @param {object} json - Font data.
     * @param {string} char - The character to add.
     * @param {number} charCode - The code point number of the character to add.
     * @param {number} x - The x position of the character.
     * @param {number} y - The y position of the character.
     * @param {number} w - The width of the character.
     * @param {number} h - The height of the character.
     * @param {number} xoffset - The x offset of the character.
     * @param {number} yoffset - The y offset of the character.
     * @param {number} xadvance - The x advance of the character.
     * @param {number} mapNum - The map number of the character.
     * @param {number} mapW - The width of the map.
     * @param {number} mapH - The height of the map.
     * @private
     */
    private _addChar;
    /**
     * Take a unicode string and produce the set of characters used to create that string.
     * e.g. "abcabcabc" -> ['a', 'b', 'c']
     *
     * @param {string} text - The unicode string to process.
     * @returns {string[]} The set of characters used to create the string.
     * @private
     */
    private _normalizeCharsSet;
    /**
     * Calculate some metrics that aren't available via the browser API, notably character height
     * and descent size.
     *
     * @param {string} text - The text to measure.
     * @returns {{ascent: number, descent: number, height: number}} The metrics of the text.
     * @private
     */
    private _getTextMetrics;
    get textures(): any[];
}

declare class TextElement {
    constructor(element: any);
    _element: any;
    _system: any;
    _entity: any;
    _text: string;
    _symbols: any[];
    _colorPalette: any[];
    _outlinePalette: any[];
    _shadowPalette: any[];
    _symbolColors: any[];
    _symbolOutlineParams: any[];
    _symbolShadowParams: any[];
    /** @type {string} */
    _i18nKey: string;
    _fontAsset: LocalizedAsset;
    /** @type {Font | CanvasFont} */
    _font: Font | CanvasFont;
    _color: Color;
    _colorUniform: Float32Array<ArrayBuffer>;
    _spacing: number;
    _fontSize: number;
    _fontMinY: number;
    _fontMaxY: number;
    _originalFontSize: number;
    _maxFontSize: number;
    _minFontSize: number;
    _autoFitWidth: boolean;
    _autoFitHeight: boolean;
    _maxLines: number;
    _lineHeight: number;
    _scaledLineHeight: number;
    _wrapLines: boolean;
    _drawOrder: number;
    _alignment: Vec2;
    _autoWidth: boolean;
    _autoHeight: boolean;
    width: number;
    height: number;
    _node: GraphNode;
    _model: Model;
    _meshInfo: any[];
    _material: any;
    _aabbDirty: boolean;
    _aabb: BoundingBox;
    _noResize: boolean;
    _currentMaterialType: any;
    _maskedMaterialSrc: any;
    _rtlReorder: boolean;
    _unicodeConverter: boolean;
    _rtl: boolean;
    _outlineColor: Color;
    _outlineColorUniform: Float32Array<ArrayBuffer>;
    _outlineThicknessScale: number;
    _outlineThickness: number;
    _shadowColor: Color;
    _shadowColorUniform: Float32Array<ArrayBuffer>;
    _shadowOffsetScale: number;
    _shadowOffset: Vec2;
    _shadowOffsetUniform: Float32Array<ArrayBuffer>;
    _enableMarkup: boolean;
    _rangeStart: number;
    _rangeEnd: number;
    destroy(): void;
    set font(value: CanvasFont | Font);
    get font(): CanvasFont | Font;
    _onParentResize(width: any, height: any): void;
    _onScreenChange(screen: any): void;
    _onScreenSpaceChange(value: any): void;
    _onDrawOrderChange(order: any): void;
    _onPivotChange(pivot: any): void;
    _onLocaleSet(locale: any): void;
    _onLocalizationData(locale: any, messages: any): void;
    _resetLocalizedText(): void;
    _setText(text: any): void;
    _updateText(text: any): void;
    _removeMeshInstance(meshInstance: any): void;
    _setMaterial(material: any): void;
    _updateMaterial(screenSpace: any): void;
    _updateMaterialEmissive(): void;
    _updateMaterialOutline(): void;
    _updateMaterialShadow(): void;
    _isWordBoundary(char: any): boolean;
    _isValidNextChar(nextchar: any): boolean;
    _isNextCJKBoundary(char: any, nextchar: any): boolean;
    _isNextCJKWholeWord(nextchar: any): boolean;
    _updateMeshes(): void;
    _lineWidths: any[];
    _lineContents: any[];
    set autoWidth(value: boolean);
    get autoWidth(): boolean;
    set autoHeight(value: boolean);
    get autoHeight(): boolean;
    _onFontRender(): void;
    _onFontLoad(asset: any): void;
    _onFontChange(asset: any, name: any, _new: any, _old: any): void;
    _onFontRemove(asset: any): void;
    _setTextureParams(mi: any, texture: any): void;
    _getPxRange(font: any): number;
    _getUv(char: any): any;
    onEnable(): void;
    onDisable(): void;
    _setStencil(stencilParams: any): void;
    _shouldAutoFitWidth(): boolean;
    _shouldAutoFitHeight(): boolean;
    _shouldAutoFit(): boolean;
    _calculateCharsPerTexture(symbolIndex: any): {};
    _updateRenderRange(): void;
    set text(value: string);
    get text(): string;
    set key(value: string);
    get key(): string;
    set color(value: Color);
    get color(): Color;
    set opacity(value: number);
    get opacity(): number;
    set lineHeight(value: number);
    get lineHeight(): number;
    set wrapLines(value: boolean);
    get wrapLines(): boolean;
    get lines(): any[];
    set spacing(value: number);
    get spacing(): number;
    set fontSize(value: number);
    get fontSize(): number;
    set fontAsset(value: number | Asset);
    get fontAsset(): number | Asset;
    set alignment(value: Vec2);
    get alignment(): Vec2;
    set rtlReorder(value: boolean);
    get rtlReorder(): boolean;
    set unicodeConverter(value: boolean);
    get unicodeConverter(): boolean;
    /**
     * @type {BoundingBox}
     */
    get aabb(): BoundingBox;
    set outlineColor(value: Color);
    get outlineColor(): Color;
    set outlineThickness(value: number);
    get outlineThickness(): number;
    set shadowColor(value: Color);
    get shadowColor(): Color;
    set shadowOffset(value: Vec2);
    get shadowOffset(): Vec2;
    set minFontSize(value: number);
    get minFontSize(): number;
    set maxFontSize(value: number);
    get maxFontSize(): number;
    set autoFitWidth(value: boolean);
    get autoFitWidth(): boolean;
    set autoFitHeight(value: boolean);
    get autoFitHeight(): boolean;
    set maxLines(value: number);
    get maxLines(): number;
    set enableMarkup(value: boolean);
    get enableMarkup(): boolean;
    get symbols(): any[];
    get symbolColors(): any[];
    get symbolOutlineParams(): any[];
    get symbolShadowParams(): any[];
    get rtl(): boolean;
    set rangeStart(rangeStart: number);
    get rangeStart(): number;
    set rangeEnd(rangeEnd: number);
    get rangeEnd(): number;
}

declare class ElementComponentData {
    enabled: boolean;
}

/**
 * The standard material options define a set of options used to control the shader frontend shader
 * generation, such as textures, tints and multipliers.
 *
 * @category Graphics
 */
declare class StandardMaterialOptions {
    /**
     * The set of defines used to generate the shader.
     *
     * @type {Map<string, string>}
     */
    defines: Map<string, string>;
    /**
     * If UV1 (second set of texture coordinates) is required in the shader. Will be declared as
     * "vUv1" and passed to the fragment shader.
     *
     * @type {boolean}
     */
    forceUv1: boolean;
    /**
     * Defines if {@link StandardMaterial#specular} constant should affect specular color.
     *
     * @type {boolean}
     */
    specularTint: boolean;
    /**
     * Defines if {@link StandardMaterial#metalness} constant should affect metalness value.
     *
     * @type {boolean}
     */
    metalnessTint: boolean;
    /**
     * Defines if {@link StandardMaterial#gloss} constant should affect glossiness value.
     *
     * @type {boolean}
     */
    glossTint: boolean;
    emissiveEncoding: string;
    lightMapEncoding: string;
    vertexColorGamma: boolean;
    /**
     * If normal map contains X in RGB, Y in Alpha, and Z must be reconstructed.
     *
     * @type {boolean}
     */
    packedNormal: boolean;
    /**
     * If normal detail map contains X in RGB, Y in Alpha, and Z must be reconstructed.
     *
     * @type {boolean}
     */
    normalDetailPackedNormal: boolean;
    /**
     * If normal clear coat map contains X in RGB, Y in Alpha, and Z must be reconstructed.
     *
     * @type {boolean}
     */
    clearCoatPackedNormal: boolean;
    /**
     * Invert the gloss channel.
     *
     * @type {boolean}
     */
    glossInvert: boolean;
    /**
     * Invert the sheen gloss channel.
     *
     * @type {boolean}
     */
    sheenGlossInvert: boolean;
    /**
     * Invert the clearcoat gloss channel.
     *
     * @type {boolean}
     */
    clearCoatGlossInvert: boolean;
    /**
     * True to include AO variables even if AO is not used, which allows SSAO to be used in the lit shader.
     *
     * @type {boolean}
     */
    useAO: boolean;
    /**
     * Storage for the options for lit the shader and material.
     *
     * @type {LitShaderOptions}
     */
    litOptions: LitShaderOptions;
    get pass(): number;
}

declare class StandardMaterialOptionsBuilder {
    _mapXForms: any[];
    updateMinRef(options: any, scene: any, stdMat: any, objDefs: any, pass: any, sortedLights: any): void;
    updateRef(options: any, scene: any, cameraShaderParams: any, stdMat: any, objDefs: any, pass: any, sortedLights: any): void;
    _updateSharedOptions(options: any, scene: any, stdMat: any, objDefs: any, pass: any): void;
    _updateUVOptions(options: any, stdMat: any, objDefs: any, minimalOptions: any, cameraShaderParams: any): void;
    _updateTexOptions(options: any, stdMat: any, p: any, hasUv0: any, hasUv1: any, hasVcolor: any, minimalOptions: any, uniqueTextureMap: any): void;
    _updateMinOptions(options: any, stdMat: any, pass: any): void;
    _updateMaterialOptions(options: any, stdMat: any, scene: any): void;
    _updateEnvOptions(options: any, stdMat: any, scene: any, cameraShaderParams: any): void;
    _updateLightOptions(options: any, scene: any, stdMat: any, objDefs: any, sortedLights: any): void;
    _getMapTransformID(xform: any, uv: any): any;
}

/**
 * Callback used by {@link StandardMaterial#onUpdateShader}.
 */
type UpdateShaderCallback = (options: StandardMaterialOptions) => StandardMaterialOptions;
/**
 * @callback UpdateShaderCallback
 * Callback used by {@link StandardMaterial#onUpdateShader}.
 * @param {StandardMaterialOptions} options - An object with shader generator settings (based on current
 * material and scene properties), that you can change and then return. Properties of the object passed
 * into this function are documented in {@link StandardMaterial}. Also contains a member named litOptions
 * which holds some of the options only used by the lit shader backend {@link LitShaderOptions}.
 * @returns {StandardMaterialOptions} Returned settings will be used by the shader.
 */
/**
 * A standard material is the main, general purpose material that is most often used for rendering.
 * It can approximate a wide variety of surface types and can simulate dynamic reflected light.
 * Most maps can use 3 types of input values in any combination: constant ({@link Color} or number),
 * mesh vertex colors and a {@link Texture}. All enabled inputs are multiplied together.
 *
 * @property {Color} ambient The ambient color of the material. This color value is 3-component
 * (RGB), where each component is between 0 and 1.
 * @property {Color} diffuse The diffuse color of the material. This color value is 3-component
 * (RGB), where each component is between 0 and 1. Defines basic surface color (aka albedo).
 * @property {Texture|null} diffuseMap The main (primary) diffuse map of the material (default is
 * null).
 * @property {number} diffuseMapUv Main (primary) diffuse map UV channel.
 * @property {Vec2} diffuseMapTiling Controls the 2D tiling of the main (primary) diffuse map.
 * @property {Vec2} diffuseMapOffset Controls the 2D offset of the main (primary) diffuse map. Each
 * component is between 0 and 1.
 * @property {number} diffuseMapRotation Controls the 2D rotation (in degrees) of the main
 * (primary) diffuse map.
 * @property {string} diffuseMapChannel Color channels of the main (primary) diffuse map to use.
 * Can be "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} diffuseVertexColor Multiply diffuse by the mesh vertex colors.
 * @property {string} diffuseVertexColorChannel Vertex color channels to use for diffuse. Can be
 * "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {Texture|null} diffuseDetailMap The detail (secondary) diffuse map of the material
 * (default is null). Will only be used if main (primary) diffuse map is non-null.
 * @property {number} diffuseDetailMapUv Detail (secondary) diffuse map UV channel.
 * @property {Vec2} diffuseDetailMapTiling Controls the 2D tiling of the detail (secondary) diffuse
 * map.
 * @property {Vec2} diffuseDetailMapOffset Controls the 2D offset of the detail (secondary) diffuse
 * map. Each component is between 0 and 1.
 * @property {number} diffuseDetailMapRotation Controls the 2D rotation (in degrees) of the main
 * (secondary) diffuse map.
 * @property {string} diffuseDetailMapChannel Color channels of the detail (secondary) diffuse map
 * to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {string} diffuseDetailMode Determines how the main (primary) and detail (secondary)
 * diffuse maps are blended together. Can be:
 *
 * - {@link DETAILMODE_MUL}: Multiply together the primary and secondary colors.
 * - {@link DETAILMODE_ADD}: Add together the primary and secondary colors.
 * - {@link DETAILMODE_SCREEN}: Softer version of {@link DETAILMODE_ADD}.
 * - {@link DETAILMODE_OVERLAY}: Multiplies or screens the colors, depending on the primary color.
 * - {@link DETAILMODE_MIN}: Select whichever of the primary and secondary colors is darker,
 * component-wise.
 * - {@link DETAILMODE_MAX}: Select whichever of the primary and secondary colors is lighter,
 * component-wise.
 *
 * Defaults to {@link DETAILMODE_MUL}.
 * @property {Color} specular The specular color of the material. This color value is 3-component
 * (RGB), where each component is between 0 and 1. Defines surface reflection/specular color.
 * Affects specular intensity and tint.
 * @property {boolean} specularTint Multiply specular map and/or specular vertex color by the
 * constant specular value.
 * @property {Texture|null} specularMap The specular map of the material (default is null).
 * @property {number} specularMapUv Specular map UV channel.
 * @property {Vec2} specularMapTiling Controls the 2D tiling of the specular map.
 * @property {Vec2} specularMapOffset Controls the 2D offset of the specular map. Each component is
 * between 0 and 1.
 * @property {number} specularMapRotation Controls the 2D rotation (in degrees) of the specular map.
 * @property {string} specularMapChannel Color channels of the specular map to use. Can be "r", "g",
 * "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} specularVertexColor Use mesh vertex colors for specular. If specularMap or
 * are specularTint are set, they'll be multiplied by vertex colors.
 * @property {string} specularVertexColorChannel Vertex color channels to use for specular. Can be
 * "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} specularityFactorTint Multiply specularity factor map and/or specular vertex color by the
 * constant specular value.
 * @property {number} specularityFactor The factor of specular intensity, used to weight the fresnel and specularity. Default is 1.0.
 * @property {Texture|null} specularityFactorMap The factor of specularity as a texture (default is
 * null).
 * @property {number} specularityFactorMapUv Specularity factor map UV channel.
 * @property {Vec2} specularityFactorMapTiling Controls the 2D tiling of the specularity factor map.
 * @property {Vec2} specularityFactorMapOffset Controls the 2D offset of the specularity factor map. Each component is
 * between 0 and 1.
 * @property {number} specularityFactorMapRotation Controls the 2D rotation (in degrees) of the specularity factor map.
 * @property {string} specularityFactorMapChannel The channel used by the specularity factor texture to sample from (default is 'a').
 * @property {boolean} specularityFactorVertexColor Use mesh vertex colors for specularity factor. If specularityFactorMap or
 * are specularityFactorTint are set, they'll be multiplied by vertex colors.
 * @property {string} specularityFactorVertexColorChannel Vertex color channels to use for specularity factor. Can be
 * "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} enableGGXSpecular Enables GGX specular. Also enables
 * {@link StandardMaterial#anisotropyIntensity} parameter to set material anisotropy.
 * @property {number} anisotropyIntensity Defines amount of anisotropy. Requires
 * {@link StandardMaterial#enableGGXSpecular} is set to true.
 * - When anisotropyIntensity == 0, specular is isotropic.
 * - Specular anisotropy increases as anisotropyIntensity value increases to maximum of 1.
 * @property {number} anisotropyRotation Defines the rotation (in degrees) of anisotropy.
 * @property {Texture|null} anisotropyMap The anisotropy map of the material (default is null).
 * @property {number} anisotropyMapUv Anisotropy map UV channel.
 * @property {Vec2} anisotropyMapTiling Controls the 2D tiling of the anisotropy map.
 * @property {Vec2} anisotropyMapOffset Controls the 2D offset of the anisotropy map. Each
 * component is between 0 and 1.
 * @property {number} anisotropyMapRotation Controls the 2D rotation (in degrees) of the anisotropy map.
 * @property {number} clearCoat Defines intensity of clearcoat layer from 0 to 1. Clearcoat layer
 * is disabled when clearCoat == 0. Default value is 0 (disabled).
 * @property {Texture|null} clearCoatMap Monochrome clearcoat intensity map (default is null). If
 * specified, will be multiplied by normalized 'clearCoat' value and/or vertex colors.
 * @property {number} clearCoatMapUv Clearcoat intensity map UV channel.
 * @property {Vec2} clearCoatMapTiling Controls the 2D tiling of the clearcoat intensity map.
 * @property {Vec2} clearCoatMapOffset Controls the 2D offset of the clearcoat intensity map. Each
 * component is between 0 and 1.
 * @property {number} clearCoatMapRotation Controls the 2D rotation (in degrees) of the clearcoat
 * intensity map.
 * @property {string} clearCoatMapChannel Color channel of the clearcoat intensity map to use. Can
 * be "r", "g", "b" or "a".
 * @property {boolean} clearCoatVertexColor Use mesh vertex colors for clearcoat intensity. If
 * clearCoatMap is set, it'll be multiplied by vertex colors.
 * @property {string} clearCoatVertexColorChannel Vertex color channel to use for clearcoat
 * intensity. Can be "r", "g", "b" or "a".
 * @property {number} clearCoatGloss Defines the clearcoat glossiness of the clearcoat layer
 * from 0 (rough) to 1 (mirror).
 * @property {boolean} clearCoatGlossInvert Invert the clearcoat gloss component (default is false).
 * Enabling this flag results in material treating the clear coat gloss members as roughness.
 * @property {Texture|null} clearCoatGlossMap Monochrome clearcoat glossiness map (default is
 * null). If specified, will be multiplied by normalized 'clearCoatGloss' value and/or vertex
 * colors.
 * @property {number} clearCoatGlossMapUv Clearcoat gloss map UV channel.
 * @property {Vec2} clearCoatGlossMapTiling Controls the 2D tiling of the clearcoat gloss map.
 * @property {Vec2} clearCoatGlossMapOffset Controls the 2D offset of the clearcoat gloss map.
 * Each component is between 0 and 1.
 * @property {number} clearCoatGlossMapRotation Controls the 2D rotation (in degrees) of the clear
 * coat gloss map.
 * @property {string} clearCoatGlossMapChannel Color channel of the clearcoat gloss map to use.
 * Can be "r", "g", "b" or "a".
 * @property {boolean} clearCoatGlossVertexColor Use mesh vertex colors for clearcoat glossiness.
 * If clearCoatGlossMap is set, it'll be multiplied by vertex colors.
 * @property {string} clearCoatGlossVertexColorChannel Vertex color channel to use for clearcoat
 * glossiness. Can be "r", "g", "b" or "a".
 * @property {Texture|null} clearCoatNormalMap The clearcoat normal map of the material (default is
 * null). The texture must contains normalized, tangent space normals.
 * @property {number} clearCoatNormalMapUv Clearcoat normal map UV channel.
 * @property {Vec2} clearCoatNormalMapTiling Controls the 2D tiling of the main clearcoat normal
 * map.
 * @property {Vec2} clearCoatNormalMapOffset Controls the 2D offset of the main clearcoat normal
 * map. Each component is between 0 and 1.
 * @property {number} clearCoatNormalMapRotation Controls the 2D rotation (in degrees) of the main
 * clearcoat map.
 * @property {number} clearCoatBumpiness The bumpiness of the clearcoat layer. This value scales
 * the assigned main clearcoat normal map. It should be normally between 0 (no bump mapping) and 1
 * (full bump mapping), but can be set to e.g. 2 to give even more pronounced bump effect.
 * @property {boolean} useIridescence Enable thin-film iridescence.
 * @property {Texture|null} iridescenceMap The per-pixel iridescence intensity. Only used when
 * useIridescence is enabled.
 * @property {number} iridescenceMapUv Iridescence map UV channel.
 * @property {Vec2} iridescenceMapTiling Controls the 2D tiling of the iridescence map.
 * @property {Vec2} iridescenceMapOffset Controls the 2D offset of the iridescence map. Each component is
 * between 0 and 1.
 * @property {number} iridescenceMapRotation Controls the 2D rotation (in degrees) of the iridescence
 * map.
 * @property {string} iridescenceMapChannel Color channels of the iridescence map to use. Can be "r",
 * "g", "b" or "a".
 * @property {Texture|null} iridescenceThicknessMap The per-pixel iridescence thickness. Defines a
 * gradient weight between iridescenceThicknessMin and iridescenceThicknessMax. Only used when
 * useIridescence is enabled.
 * @property {number} iridescenceThicknessMapUv Iridescence thickness map UV channel.
 * @property {Vec2} iridescenceThicknessMapTiling Controls the 2D tiling of the iridescence
 * thickness map.
 * @property {Vec2} iridescenceThicknessMapOffset Controls the 2D offset of the iridescence
 * thickness map. Each component is between 0 and 1.
 * @property {number} iridescenceThicknessMapRotation Controls the 2D rotation (in degrees)
 * of the iridescence map.
 * @property {string} iridescenceThicknessMapChannel Color channels of the iridescence thickness
 * map to use. Can be "r", "g", "b" or "a".
 * @property {number} iridescenceThicknessMin The minimum thickness for the iridescence layer.
 * Only used when an iridescence thickness map is used. The unit is in nm.
 * @property {number} iridescenceThicknessMax The maximum thickness for the iridescence layer.
 * Used as the 'base' thickness when no iridescence thickness map is defined. The unit is in nm.
 * @property {number} iridescenceRefractionIndex The index of refraction of the iridescent
 * thin-film. Affects the color phase shift as described here:
 * https://github.com/KhronosGroup/glTF/tree/main/extensions/2.0/Khronos/KHR_materials_iridescence
 * @property {boolean} useMetalness Use metalness properties instead of specular. When enabled,
 * diffuse colors also affect specular instead of the dedicated specular map. This can be used as
 * alternative to specular color to save space. With metalness == 0, the pixel is assumed to be
 * dielectric, and diffuse color is used as normal. With metalness == 1, the pixel is fully
 * metallic, and diffuse color is used as specular color instead.
 * @property {boolean} useMetalnessSpecularColor When metalness is enabled, use the
 * specular map to apply color tint to specular reflections.
 * at direct angles.
 * @property {number} metalness Defines how much the surface is metallic. From 0 (dielectric) to 1
 * (metal).
 * @property {Texture|null} metalnessMap Monochrome metalness map (default is null).
 * @property {number} metalnessMapUv Metalness map UV channel.
 * @property {Vec2} metalnessMapTiling Controls the 2D tiling of the metalness map.
 * @property {Vec2} metalnessMapOffset Controls the 2D offset of the metalness map. Each component
 * is between 0 and 1.
 * @property {number} metalnessMapRotation Controls the 2D rotation (in degrees) of the metalness
 * map.
 * @property {string} metalnessMapChannel Color channel of the metalness map to use. Can be "r",
 * "g", "b" or "a".
 * @property {boolean} metalnessVertexColor Use mesh vertex colors for metalness. If metalnessMap
 * is set, it'll be multiplied by vertex colors.
 * @property {string} metalnessVertexColorChannel Vertex color channel to use for metalness. Can be
 * "r", "g", "b" or "a".
 * @property {number} gloss Defines the glossiness of the material from 0 (rough) to 1 (shiny).
 * @property {Texture|null} glossMap Gloss map (default is null). If specified, will be multiplied
 * by normalized gloss value and/or vertex colors.
 * @property {boolean} glossInvert Invert the gloss component (default is false). Enabling this
 * flag results in material treating the gloss members as roughness.
 * @property {number} glossMapUv Gloss map UV channel.
 * @property {string} glossMapChannel Color channel of the gloss map to use. Can be "r", "g", "b"
 * or "a".
 * @property {Vec2} glossMapTiling Controls the 2D tiling of the gloss map.
 * @property {Vec2} glossMapOffset Controls the 2D offset of the gloss map. Each component is
 * between 0 and 1.
 * @property {number} glossMapRotation Controls the 2D rotation (in degrees) of the gloss map.
 * @property {boolean} glossVertexColor Use mesh vertex colors for glossiness. If glossMap is set,
 * it'll be multiplied by vertex colors.
 * @property {string} glossVertexColorChannel Vertex color channel to use for glossiness. Can be
 * "r", "g", "b" or "a".
 * @property {number} refraction Defines the visibility of refraction. Material can refract the
 * same cube map as used for reflections.
 * @property {Texture|null} refractionMap The map of the refraction visibility.
 * @property {number} refractionMapUv Refraction map UV channel.
 * @property {Vec2} refractionMapTiling Controls the 2D tiling of the refraction map.
 * @property {Vec2} refractionMapOffset Controls the 2D offset of the refraction map. Each component
 * is between 0 and 1.
 * @property {number} refractionMapRotation Controls the 2D rotation (in degrees) of the emissive
 * map.
 * @property {string} refractionMapChannel Color channels of the refraction map to use. Can be "r",
 * "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} refractionVertexColor Use mesh vertex colors for refraction. If
 * refraction map is set, it will be be multiplied by vertex colors.
 * @property {boolean} refractionVertexColorChannel Vertex color channel to use for refraction.
 * Can be "r", "g", "b" or "a".
 * @property {number} refractionIndex Defines the index of refraction, i.e. The amount of
 * distortion. The value is calculated as (outerIor / surfaceIor), where inputs are measured
 * indices of refraction, the one around the object and the one of its own surface. In most
 * situations outer medium is air, so outerIor will be approximately 1. Then you only need to do
 * (1.0 / surfaceIor).
 * @property {number} dispersion The strength of the angular separation of colors (chromatic
 * aberration) transmitting through a volume. Defaults to 0, which is equivalent to no dispersion.
 * @property {boolean} useDynamicRefraction Enables higher quality refractions using the grab pass
 * instead of pre-computed cube maps for refractions.
 * @property {number} thickness The thickness of the medium, only used when useDynamicRefraction
 * is enabled. The unit is in base units, and scales with the size of the object.
 * @property {Texture|null} thicknessMap The per-pixel thickness of the medium, only used when
 * useDynamicRefraction is enabled.
 * @property {number} thicknessMapUv Thickness map UV channel.
 * @property {Vec2} thicknessMapTiling Controls the 2D tiling of the thickness map.
 * @property {Vec2} thicknessMapOffset Controls the 2D offset of the thickness map. Each component is
 * between 0 and 1.
 * @property {number} thicknessMapRotation Controls the 2D rotation (in degrees) of the thickness
 * map.
 * @property {string} thicknessMapChannel Color channels of the thickness map to use. Can be "r",
 * "g", "b" or "a".
 * @property {boolean} thicknessVertexColor Use mesh vertex colors for thickness. If
 * thickness map is set, it will be be multiplied by vertex colors.
 * @property {Color} attenuation The attenuation color for refractive materials, only used when
 * useDynamicRefraction is enabled.
 * @property {number} attenuationDistance The distance defining the absorption rate of light
 * within the medium. Only used when useDynamicRefraction is enabled.
 * @property {Color} emissive The emissive color of the material. This color value is 3-component
 * (RGB), where each component is between 0 and 1.
 * @property {Texture|null} emissiveMap The emissive map of the material (default is null). Can be
 * HDR. When the emissive map is applied, the emissive color is multiplied by the texel color in the
 * map. Since the emissive color is black by default, the emissive map won't be visible unless the
 * emissive color is changed.
 * @property {number} emissiveIntensity Emissive color multiplier.
 * @property {number} emissiveMapUv Emissive map UV channel.
 * @property {Vec2} emissiveMapTiling Controls the 2D tiling of the emissive map.
 * @property {Vec2} emissiveMapOffset Controls the 2D offset of the emissive map. Each component is
 * between 0 and 1.
 * @property {number} emissiveMapRotation Controls the 2D rotation (in degrees) of the emissive
 * map.
 * @property {string} emissiveMapChannel Color channels of the emissive map to use. Can be "r",
 * "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} emissiveVertexColor Use mesh vertex colors for emission. If emissiveMap or
 * emissive are set, they'll be multiplied by vertex colors.
 * @property {string} emissiveVertexColorChannel Vertex color channels to use for emission. Can be
 * "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} useSheen Toggle sheen specular effect on/off.
 * @property {Color} sheen The specular color of the sheen (fabric) microfiber structure.
 * This color value is 3-component (RGB), where each component is between 0 and 1.
 * @property {Texture|null} sheenMap The sheen microstructure color map of the material (default is
 * null).
 * @property {number} sheenMapUv Sheen map UV channel.
 * @property {Vec2} sheenMapTiling Controls the 2D tiling of the sheen map.
 * @property {Vec2} sheenMapOffset Controls the 2D offset of the sheen map. Each component is
 * between 0 and 1.
 * @property {number} sheenMapRotation Controls the 2D rotation (in degrees) of the sheen
 * map.
 * @property {string} sheenMapChannel Color channels of the sheen map to use. Can be "r",
 * "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} sheenVertexColor Use mesh vertex colors for sheen. If sheen map or
 * sheen tint are set, they'll be multiplied by vertex colors.
 * @property {number} sheenGloss The glossiness of the sheen (fabric) microfiber structure.
 * This color value is a single value between 0 and 1.
 * @property {boolean} sheenGlossInvert Invert the sheen gloss component (default is false).
 * Enabling this flag results in material treating the sheen gloss members as roughness.
 * @property {Texture|null} sheenGlossMap The sheen glossiness microstructure color map of the
 * material (default is null).
 * @property {number} sheenGlossMapUv Sheen map UV channel.
 * @property {Vec2} sheenGlossMapTiling Controls the 2D tiling of the sheen glossiness map.
 * @property {Vec2} sheenGlossMapOffset Controls the 2D offset of the sheen glossiness map.
 * Each component is between 0 and 1.
 * @property {number} sheenGlossMapRotation Controls the 2D rotation (in degrees) of the sheen
 * glossiness map.
 * @property {string} sheenGlossMapChannel Color channels of the sheen glossiness map to use.
 * Can be "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {boolean} sheenGlossVertexColor Use mesh vertex colors for sheen glossiness.
 * If sheen glossiness map or sheen glossiness tint are set, they'll be multiplied by vertex colors.
 * @property {string} sheenGlossVertexColorChannel Vertex color channels to use for sheen glossiness.
 * Can be "r", "g", "b" or "a".
 * @property {number} opacity The opacity of the material. This value can be between 0 and 1, where
 * 0 is fully transparent and 1 is fully opaque. If you want the material to be semi-transparent
 * you also need to set the {@link Material#blendType} to {@link BLEND_NORMAL},
 * {@link BLEND_ADDITIVE} or any other mode. Also note that for most semi-transparent objects you
 * want {@link Material#depthWrite} to be false, otherwise they can fully occlude objects behind
 * them.
 * @property {Texture|null} opacityMap The opacity map of the material (default is null).
 * @property {number} opacityMapUv Opacity map UV channel.
 * @property {string} opacityMapChannel Color channel of the opacity map to use. Can be "r", "g",
 * "b" or "a".
 * @property {Vec2} opacityMapTiling Controls the 2D tiling of the opacity map.
 * @property {Vec2} opacityMapOffset Controls the 2D offset of the opacity map. Each component is
 * between 0 and 1.
 * @property {number} opacityMapRotation Controls the 2D rotation (in degrees) of the opacity map.
 * @property {boolean} opacityVertexColor Use mesh vertex colors for opacity. If opacityMap is set,
 * it'll be multiplied by vertex colors.
 * @property {string} opacityVertexColorChannel Vertex color channels to use for opacity. Can be
 * "r", "g", "b" or "a".
 * @property {boolean} opacityFadesSpecular Used to specify whether specular and reflections are
 * faded out using {@link StandardMaterial#opacity}. Default is true. When set to false use
 * {@link Material#alphaFade} to fade out materials.
 * @property {string} opacityDither Used to specify whether opacity is dithered, which allows
 * transparency without alpha blending. Can be:
 *
 * - {@link DITHER_NONE}: Opacity dithering is disabled.
 * - {@link DITHER_BAYER8}: Opacity is dithered using a Bayer 8 matrix.
 * - {@link DITHER_BLUENOISE}: Opacity is dithered using a blue noise.
 * - {@link DITHER_IGNNOISE}: Opacity is dithered using an interleaved gradient noise.
 *
 * Defaults to {@link DITHER_NONE}.
 * @property {boolean} opacityShadowDither Used to specify whether shadow opacity is dithered, which
 * allows shadow transparency without alpha blending.  Can be:
 *
 * - {@link DITHER_NONE}: Opacity dithering is disabled.
 * - {@link DITHER_BAYER8}: Opacity is dithered using a Bayer 8 matrix.
 * - {@link DITHER_BLUENOISE}: Opacity is dithered using a blue noise.
 * - {@link DITHER_IGNNOISE}: Opacity is dithered using an interleaved gradient noise.
 *
 * Defaults to {@link DITHER_NONE}.
 * @property {number} alphaFade Used to fade out materials when
 * {@link StandardMaterial#opacityFadesSpecular} is set to false.
 * @property {Texture|null} normalMap The main (primary) normal map of the material (default is
 * null). The texture must contains normalized, tangent space normals.
 * @property {number} normalMapUv Main (primary) normal map UV channel.
 * @property {Vec2} normalMapTiling Controls the 2D tiling of the main (primary) normal map.
 * @property {Vec2} normalMapOffset Controls the 2D offset of the main (primary) normal map. Each
 * component is between 0 and 1.
 * @property {number} normalMapRotation Controls the 2D rotation (in degrees) of the main (primary)
 * normal map.
 * @property {number} bumpiness The bumpiness of the material. This value scales the assigned main
 * (primary) normal map. It should be normally between 0 (no bump mapping) and 1 (full bump
 * mapping), but can be set to e.g. 2 to give even more pronounced bump effect.
 * @property {Texture|null} normalDetailMap The detail (secondary) normal map of the material
 * (default is null). Will only be used if main (primary) normal map is non-null.
 * @property {number} normalDetailMapUv Detail (secondary) normal map UV channel.
 * @property {Vec2} normalDetailMapTiling Controls the 2D tiling of the detail (secondary) normal
 * map.
 * @property {Vec2} normalDetailMapOffset Controls the 2D offset of the detail (secondary) normal
 * map. Each component is between 0 and 1.
 * @property {number} normalDetailMapRotation Controls the 2D rotation (in degrees) of the detail
 * (secondary) normal map.
 * @property {number} normalDetailMapBumpiness The bumpiness of the material. This value scales the
 * assigned detail (secondary) normal map. It should be normally between 0 (no bump mapping) and 1
 * (full bump mapping), but can be set to e.g. 2 to give even more pronounced bump effect.
 * @property {Texture|null} heightMap The height map of the material (default is null). Used for a
 * view-dependent parallax effect. The texture must represent the height of the surface where
 * darker pixels are lower and lighter pixels are higher. It is recommended to use it together with
 * a normal map.
 * @property {number} heightMapUv Height map UV channel.
 * @property {string} heightMapChannel Color channel of the height map to use. Can be "r", "g", "b"
 * or "a".
 * @property {Vec2} heightMapTiling Controls the 2D tiling of the height map.
 * @property {Vec2} heightMapOffset Controls the 2D offset of the height map. Each component is
 * between 0 and 1.
 * @property {number} heightMapRotation Controls the 2D rotation (in degrees) of the height map.
 * @property {number} heightMapFactor Height map multiplier. Affects the strength of the parallax
 * effect.
 * @property {Texture|null} envAtlas The prefiltered environment lighting atlas (default is null).
 * This setting overrides cubeMap and sphereMap and will replace the scene lighting environment.
 * @property {Texture|null} cubeMap The cubic environment map of the material (default is null).
 * This setting overrides sphereMap and will replace the scene lighting environment.
 * @property {Texture|null} sphereMap The spherical environment map of the material (default is
 * null). This will replace the scene lighting environment.
 * @property {number} cubeMapProjection The type of projection applied to the cubeMap property:
 * - {@link CUBEPROJ_NONE}: The cube map is treated as if it is infinitely far away.
 * - {@link CUBEPROJ_BOX}: Box-projection based on a world space axis-aligned bounding box.
 * Defaults to {@link CUBEPROJ_NONE}.
 * @property {BoundingBox} cubeMapProjectionBox The world space axis-aligned bounding box
 * defining the box-projection used for the cubeMap property. Only used when cubeMapProjection is
 * set to {@link CUBEPROJ_BOX}.
 * @property {number} reflectivity Environment map intensity.
 * @property {Texture|null} lightMap A custom lightmap of the material (default is null). Lightmaps
 * are textures that contain pre-rendered lighting. Can be HDR.
 * @property {number} lightMapUv Lightmap UV channel
 * @property {string} lightMapChannel Color channels of the lightmap to use. Can be "r", "g", "b",
 * "a", "rgb" or any swizzled combination.
 * @property {Vec2} lightMapTiling Controls the 2D tiling of the lightmap.
 * @property {Vec2} lightMapOffset Controls the 2D offset of the lightmap. Each component is
 * between 0 and 1.
 * @property {number} lightMapRotation Controls the 2D rotation (in degrees) of the lightmap.
 * @property {boolean} lightVertexColor Use baked vertex lighting. If lightMap is set, it'll be
 * multiplied by vertex colors.
 * @property {string} lightVertexColorChannel Vertex color channels to use for baked lighting. Can
 * be "r", "g", "b", "a", "rgb" or any swizzled combination.
 * @property {number} aoIntensity Ambient occlusion intensity. Defaults to 1.
 * @property {Texture|null} aoMap The main (primary) baked ambient occlusion (AO) map (default is
 * null). Modulates ambient color.
 * @property {number} aoMapUv Main (primary) AO map UV channel
 * @property {string} aoMapChannel Color channel of the main (primary) AO map to use. Can be "r", "g", "b" or "a".
 * @property {Vec2} aoMapTiling Controls the 2D tiling of the main (primary) AO map.
 * @property {Vec2} aoMapOffset Controls the 2D offset of the main (primary) AO map. Each component is between 0
 * and 1.
 * @property {number} aoMapRotation Controls the 2D rotation (in degrees) of the main (primary) AO map.
 * @property {boolean} aoVertexColor Use mesh vertex colors for AO. If aoMap is set, it'll be
 * multiplied by vertex colors.
 * @property {string} aoVertexColorChannel Vertex color channels to use for AO. Can be "r", "g",
 * "b" or "a".
 * @property {Texture|null} aoDetailMap The detail (secondary) baked ambient occlusion (AO) map of
 * the material (default is null). Will only be used if main (primary) ao map is non-null.
 * @property {number} aoDetailMapUv Detail (secondary) AO map UV channel.
 * @property {Vec2} aoDetailMapTiling Controls the 2D tiling of the detail (secondary) AO map.
 * @property {Vec2} aoDetailMapOffset Controls the 2D offset of the detail (secondary) AO map. Each
 * component is between 0 and 1.
 * @property {number} aoDetailMapRotation Controls the 2D rotation (in degrees) of the detail
 * (secondary) AO map.
 * @property {string} aoDetailMapChannel Color channels of the detail (secondary) AO map to use.
 * Can be "r", "g", "b" or "a" (default is "g").
 * @property {string} aoDetailMode Determines how the main (primary) and detail (secondary)
 * AO maps are blended together. Can be:
 *
 * - {@link DETAILMODE_MUL}: Multiply together the primary and secondary colors.
 * - {@link DETAILMODE_ADD}: Add together the primary and secondary colors.
 * - {@link DETAILMODE_SCREEN}: Softer version of {@link DETAILMODE_ADD}.
 * - {@link DETAILMODE_OVERLAY}: Multiplies or screens the colors, depending on the primary color.
 * - {@link DETAILMODE_MIN}: Select whichever of the primary and secondary colors is darker,
 * component-wise.
 * - {@link DETAILMODE_MAX}: Select whichever of the primary and secondary colors is lighter,
 * component-wise.
 *
 * Defaults to {@link DETAILMODE_MUL}.
 * @property {number} occludeSpecular Uses ambient occlusion to darken specular/reflection. It's a
 * hack, because real specular occlusion is view-dependent. However, it can be better than nothing.
 *
 * - {@link SPECOCC_NONE}: No specular occlusion
 * - {@link SPECOCC_AO}: Use AO directly to occlude specular.
 * - {@link SPECOCC_GLOSSDEPENDENT}: Modify AO based on material glossiness/view angle to occlude
 * specular.
 *
 * @property {number} occludeSpecularIntensity Controls visibility of specular occlusion.
 * @property {boolean} occludeDirect Tells if AO should darken directional lighting. Defaults to
 * false.
 * @property {number} fresnelModel Defines the formula used for Fresnel effect.
 * As a side-effect, enabling any Fresnel model changes the way diffuse and reflection components
 * are combined. When Fresnel is off, legacy non energy-conserving combining is used. When it is
 * on, combining behavior is energy-conserving.
 *
 * - {@link FRESNEL_NONE}: No Fresnel.
 * - {@link FRESNEL_SCHLICK}: Schlick's approximation of Fresnel (recommended). Parameterized by
 * specular color.
 *
 * @property {boolean} useFog Apply fogging (as configured in scene settings)
 * @property {boolean} useLighting Apply lighting
 * @property {boolean} useSkybox Apply scene skybox as prefiltered environment map
 * @property {boolean} useTonemap Apply tonemapping (as configured in {@link Scene#rendering} or
 * {@link CameraComponent.rendering}). Defaults to true.
 * @property {boolean} pixelSnap Align vertices to pixel coordinates when rendering. Useful for
 * pixel perfect 2D graphics.
 * @property {boolean} twoSidedLighting Calculate proper normals (and therefore lighting) on
 * backfaces.
 * @property {boolean} shadowCatcher When enabled, the material will output accumulated directional
 * shadow value in linear space as the color.
 * @property {boolean} vertexColorGamma When set to true, the vertex shader converts vertex colors
 * from gamma to linear space to ensure correct interpolation in the fragment shader. This flag is
 * provided for backwards compatibility, allowing users to mark their materials to handle vertex
 * colors in gamma space. Defaults to false, which indicates that vertex colors are stored in
 * linear space.
 *
 * @category Graphics
 */
declare class StandardMaterial extends Material {
    static TEXTURE_PARAMETERS: any[];
    static CUBEMAP_PARAMETERS: any[];
    userAttributes: Map<any, any>;
    /**
     * A custom function that will be called after all shader generator properties are collected
     * and before shader code is generated. This function will receive an object with shader
     * generator settings (based on current material and scene properties), that you can change and
     * then return. Returned value will be used instead. This is mostly useful when rendering the
     * same set of objects, but with different shader variations based on the same material. For
     * example, you may wish to render a depth or normal pass using textures assigned to the
     * material, a reflection pass with simpler shaders and so on. These properties are split into
     * two sections, generic standard material options and lit options. Properties of the standard
     * material options are {@link StandardMaterialOptions} and the options for the lit options are
     * {@link LitShaderOptions}.
     *
     * @type {UpdateShaderCallback|undefined}
     */
    onUpdateShader: UpdateShaderCallback | undefined;
    _assetReferences: {};
    _activeParams: Set<any>;
    _activeLightingParams: Set<any>;
    shaderOptBuilder: StandardMaterialOptionsBuilder;
    reset(): void;
                	
	set alphaFade(arg: boolean);
	get alphaFade(): boolean;

	/** The ambient color of the material. This color value is 3-component (RGB), where each component is between 0 and 1. */
	set ambient(arg: Color);
	get ambient(): Color;

	/** Intensity Defines amount of anisotropy. Requires {@link StandardMaterial#enableGGXSpecular} is set to true. - When anisotropyIntensity == 0, specular is isotropic. - Specular anisotropy increases as anisotropyIntensity value increases to maximum of 1. */
	set anisotropy(arg: number);
	get anisotropy(): number;

	/** Defines amount of anisotropy. Requires {@link StandardMaterial#enableGGXSpecular} is set to true. - When anisotropyIntensity == 0, specular is isotropic. - Specular anisotropy increases as anisotropyIntensity value increases to maximum of 1. */
	set anisotropyIntensity(arg: number);
	get anisotropyIntensity(): number;

	/** Defines the rotation (in degrees) of anisotropy. */
	set anisotropyRotation(arg: number);
	get anisotropyRotation(): number;

	/** e main (primary) diffuse map of the material (default is null). */
	set anisotropyMap(arg: Texture|null);
	get anisotropyMap(): Texture|null;

	/** Controls the 2D offset of the anisotropy map. Each component is between 0 and 1. */
	set anisotropyMapOffset(arg: Vec2);
	get anisotropyMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the anisotropy map. */
	set anisotropyMapRotation(arg: number);
	get anisotropyMapRotation(): number;

	/** Controls the 2D tiling of the anisotropy map. */
	set anisotropyMapTiling(arg: Vec2);
	get anisotropyMapTiling(): Vec2;

	/** Anisotropy map UV channel. */
	set anisotropyMapUv(arg: number);
	get anisotropyMapUv(): number;

	/** Ambient occlusion intensity. Defaults to 1. */
	set aoIntensity(arg: number);
	get aoIntensity(): number;

	/** seMap The main (primary) diffuse map of the material (default is null). */
	set aoMap(arg: Texture|null);
	get aoMap(): Texture|null;

	/** Color channel of the main (primary) AO map to use. Can be "r", "g", "b" or "a". */
	set aoMapChannel(arg: string);
	get aoMapChannel(): string;

	/** Controls the 2D offset of the main (primary) AO map. Each component is between 0 and 1. */
	set aoMapOffset(arg: Vec2);
	get aoMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the main (primary) AO map. */
	set aoMapRotation(arg: number);
	get aoMapRotation(): number;

	/** Controls the 2D tiling of the main (primary) AO map. */
	set aoMapTiling(arg: Vec2);
	get aoMapTiling(): Vec2;

	/** Main (primary) AO map UV channel */
	set aoMapUv(arg: number);
	get aoMapUv(): number;

	/** The main (primary) diffuse map of the material (default is null). */
	set aoDetailMap(arg: Texture|null);
	get aoDetailMap(): Texture|null;

	/** Color channels of the detail (secondary) AO map to use. Can be "r", "g", "b" or "a" (default is "g"). */
	set aoDetailMapChannel(arg: string);
	get aoDetailMapChannel(): string;

	/** Controls the 2D offset of the detail (secondary) AO map. Each component is between 0 and 1. */
	set aoDetailMapOffset(arg: Vec2);
	get aoDetailMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the detail (secondary) AO map. */
	set aoDetailMapRotation(arg: number);
	get aoDetailMapRotation(): number;

	/** Controls the 2D tiling of the detail (secondary) AO map. */
	set aoDetailMapTiling(arg: Vec2);
	get aoDetailMapTiling(): Vec2;

	/** Detail (secondary) AO map UV channel. */
	set aoDetailMapUv(arg: number);
	get aoDetailMapUv(): number;

	/** Determines how the main (primary) and detail (secondary) AO maps are blended together. Can be: - {@link DETAILMODE_MUL}: Multiply together the primary and secondary colors. - {@link DETAILMODE_ADD}: Add together the primary and secondary colors. - {@link DETAILMODE_SCREEN}: Softer version of {@link DETAILMODE_ADD}. - {@link DETAILMODE_OVERLAY}: Multiplies or screens the colors, depending on the primary color. - {@link DETAILMODE_MIN}: Select whichever of the primary and secondary colors is darker, component-wise. - {@link DETAILMODE_MAX}: Select whichever of the primary and secondary colors is lighter, component-wise. Defaults to {@link DETAILMODE_MUL}. */
	set aoDetailMode(arg: string);
	get aoDetailMode(): string;

	/** Use mesh vertex colors for AO. If aoMap is set, it'll be multiplied by vertex colors. */
	set aoVertexColor(arg: boolean);
	get aoVertexColor(): boolean;

	/** Vertex color channels to use for AO. Can be "r", "g", "b" or "a". */
	set aoVertexColorChannel(arg: string);
	get aoVertexColorChannel(): string;

	/** The bumpiness of the material. This value scales the assigned main (primary) normal map. It should be normally between 0 (no bump mapping) and 1 (full bump mapping), but can be set to e.g. 2 to give even more pronounced bump effect. */
	set bumpiness(arg: number);
	get bumpiness(): number;

	/** Defines intensity of clearcoat layer from 0 to 1. Clearcoat layer is disabled when clearCoat == 0. Default value is 0 (disabled). */
	set clearCoat(arg: number);
	get clearCoat(): number;

	/** The bumpiness of the clearcoat layer. This value scales the assigned main clearcoat normal map. It should be normally between 0 (no bump mapping) and 1 (full bump mapping), but can be set to e.g. 2 to give even more pronounced bump effect. */
	set clearCoatBumpiness(arg: number);
	get clearCoatBumpiness(): number;

	/** Invert the clearcoat gloss component (default is false). Enabling this flag results in material treating the clear coat gloss members as roughness. */
	set clearCoatGlossInvert(arg: boolean);
	get clearCoatGlossInvert(): boolean;

	/** in (primary) diffuse map of the material (default is null). */
	set clearCoatGlossMap(arg: Texture|null);
	get clearCoatGlossMap(): Texture|null;

	/** Color channel of the clearcoat gloss map to use. Can be "r", "g", "b" or "a". */
	set clearCoatGlossMapChannel(arg: string);
	get clearCoatGlossMapChannel(): string;

	/** Controls the 2D offset of the clearcoat gloss map. Each component is between 0 and 1. */
	set clearCoatGlossMapOffset(arg: Vec2);
	get clearCoatGlossMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the clear coat gloss map. */
	set clearCoatGlossMapRotation(arg: number);
	get clearCoatGlossMapRotation(): number;

	/** Controls the 2D tiling of the clearcoat gloss map. */
	set clearCoatGlossMapTiling(arg: Vec2);
	get clearCoatGlossMapTiling(): Vec2;

	/** Clearcoat gloss map UV channel. */
	set clearCoatGlossMapUv(arg: number);
	get clearCoatGlossMapUv(): number;

	/** Use mesh vertex colors for clearcoat glossiness. If clearCoatGlossMap is set, it'll be multiplied by vertex colors. */
	set clearCoatGlossVertexColor(arg: boolean);
	get clearCoatGlossVertexColor(): boolean;

	/** Vertex color channel to use for clearcoat glossiness. Can be "r", "g", "b" or "a". */
	set clearCoatGlossVertexColorChannel(arg: string);
	get clearCoatGlossVertexColorChannel(): string;

	/** Defines the clearcoat glossiness of the clearcoat layer from 0 (rough) to 1 (mirror). */
	set clearCoatGloss(arg: number);
	get clearCoatGloss(): number;

	/** he main (primary) diffuse map of the material (default is null). */
	set clearCoatMap(arg: Texture|null);
	get clearCoatMap(): Texture|null;

	/** Color channel of the clearcoat intensity map to use. Can be "r", "g", "b" or "a". */
	set clearCoatMapChannel(arg: string);
	get clearCoatMapChannel(): string;

	/** Controls the 2D offset of the clearcoat intensity map. Each component is between 0 and 1. */
	set clearCoatMapOffset(arg: Vec2);
	get clearCoatMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the clearcoat intensity map. */
	set clearCoatMapRotation(arg: number);
	get clearCoatMapRotation(): number;

	/** Controls the 2D tiling of the clearcoat intensity map. */
	set clearCoatMapTiling(arg: Vec2);
	get clearCoatMapTiling(): Vec2;

	/** Clearcoat intensity map UV channel. */
	set clearCoatMapUv(arg: number);
	get clearCoatMapUv(): number;

	/** n (primary) diffuse map of the material (default is null). */
	set clearCoatNormalMap(arg: Texture|null);
	get clearCoatNormalMap(): Texture|null;

	/** Controls the 2D offset of the main clearcoat normal map. Each component is between 0 and 1. */
	set clearCoatNormalMapOffset(arg: Vec2);
	get clearCoatNormalMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the main clearcoat map. */
	set clearCoatNormalMapRotation(arg: number);
	get clearCoatNormalMapRotation(): number;

	/** Controls the 2D tiling of the main clearcoat normal map. */
	set clearCoatNormalMapTiling(arg: Vec2);
	get clearCoatNormalMapTiling(): Vec2;

	/** Clearcoat normal map UV channel. */
	set clearCoatNormalMapUv(arg: number);
	get clearCoatNormalMapUv(): number;

	/** Use mesh vertex colors for clearcoat intensity. If clearCoatMap is set, it'll be multiplied by vertex colors. */
	set clearCoatVertexColor(arg: boolean);
	get clearCoatVertexColor(): boolean;

	/** Vertex color channel to use for clearcoat intensity. Can be "r", "g", "b" or "a". */
	set clearCoatVertexColorChannel(arg: string);
	get clearCoatVertexColorChannel(): string;

	/** Map The main (primary) diffuse map of the material (default is null). */
	set cubeMap(arg: Texture|null);
	get cubeMap(): Texture|null;

	/** The type of projection applied to the cubeMap property: - {@link CUBEPROJ_NONE}: The cube map is treated as if it is infinitely far away. - {@link CUBEPROJ_BOX}: Box-projection based on a world space axis-aligned bounding box. Defaults to {@link CUBEPROJ_NONE}. */
	set cubeMapProjection(arg: number);
	get cubeMapProjection(): number;

	/** The world space axis-aligned bounding box defining the box-projection used for the cubeMap property. Only used when cubeMapProjection is set to {@link CUBEPROJ_BOX}. */
	set cubeMapProjectionBox(arg: BoundingBox);
	get cubeMapProjectionBox(): BoundingBox;

	/** The diffuse color of the material. This color value is 3-component (RGB), where each component is between 0 and 1. Defines basic surface color (aka albedo). */
	set diffuse(arg: Color);
	get diffuse(): Color;

	/** ain (primary) diffuse map of the material (default is null). */
	set diffuseDetailMap(arg: Texture|null);
	get diffuseDetailMap(): Texture|null;

	/** Color channels of the detail (secondary) diffuse map to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set diffuseDetailMapChannel(arg: string);
	get diffuseDetailMapChannel(): string;

	/** Controls the 2D offset of the detail (secondary) diffuse map. Each component is between 0 and 1. */
	set diffuseDetailMapOffset(arg: Vec2);
	get diffuseDetailMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the main (secondary) diffuse map. */
	set diffuseDetailMapRotation(arg: number);
	get diffuseDetailMapRotation(): number;

	/** Controls the 2D tiling of the detail (secondary) diffuse map. */
	set diffuseDetailMapTiling(arg: Vec2);
	get diffuseDetailMapTiling(): Vec2;

	/** Detail (secondary) diffuse map UV channel. */
	set diffuseDetailMapUv(arg: number);
	get diffuseDetailMapUv(): number;

	/** Determines how the main (primary) and detail (secondary) diffuse maps are blended together. Can be: - {@link DETAILMODE_MUL}: Multiply together the primary and secondary colors. - {@link DETAILMODE_ADD}: Add together the primary and secondary colors. - {@link DETAILMODE_SCREEN}: Softer version of {@link DETAILMODE_ADD}. - {@link DETAILMODE_OVERLAY}: Multiplies or screens the colors, depending on the primary color. - {@link DETAILMODE_MIN}: Select whichever of the primary and secondary colors is darker, component-wise. - {@link DETAILMODE_MAX}: Select whichever of the primary and secondary colors is lighter, component-wise. Defaults to {@link DETAILMODE_MUL}. */
	set diffuseDetailMode(arg: string);
	get diffuseDetailMode(): string;

	/** The main (primary) diffuse map of the material (default is null). */
	set diffuseMap(arg: Texture|null);
	get diffuseMap(): Texture|null;

	/** Color channels of the main (primary) diffuse map to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set diffuseMapChannel(arg: string);
	get diffuseMapChannel(): string;

	/** Controls the 2D offset of the main (primary) diffuse map. Each component is between 0 and 1. */
	set diffuseMapOffset(arg: Vec2);
	get diffuseMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the main (primary) diffuse map. */
	set diffuseMapRotation(arg: number);
	get diffuseMapRotation(): number;

	/** Controls the 2D tiling of the main (primary) diffuse map. */
	set diffuseMapTiling(arg: Vec2);
	get diffuseMapTiling(): Vec2;

	/** Main (primary) diffuse map UV channel. */
	set diffuseMapUv(arg: number);
	get diffuseMapUv(): number;

	/** Multiply diffuse by the mesh vertex colors. */
	set diffuseVertexColor(arg: boolean);
	get diffuseVertexColor(): boolean;

	/** Vertex color channels to use for diffuse. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set diffuseVertexColorChannel(arg: string);
	get diffuseVertexColorChannel(): string;

	/** The emissive color of the material. This color value is 3-component (RGB), where each component is between 0 and 1. */
	set emissive(arg: Color);
	get emissive(): Color;

	/** Emissive color multiplier. */
	set emissiveIntensity(arg: number);
	get emissiveIntensity(): number;

	/** The main (primary) diffuse map of the material (default is null). */
	set emissiveMap(arg: Texture|null);
	get emissiveMap(): Texture|null;

	/** Color channels of the emissive map to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set emissiveMapChannel(arg: string);
	get emissiveMapChannel(): string;

	/** Controls the 2D offset of the emissive map. Each component is between 0 and 1. */
	set emissiveMapOffset(arg: Vec2);
	get emissiveMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the emissive map. */
	set emissiveMapRotation(arg: number);
	get emissiveMapRotation(): number;

	/** Controls the 2D tiling of the emissive map. */
	set emissiveMapTiling(arg: Vec2);
	get emissiveMapTiling(): Vec2;

	/** Emissive map UV channel. */
	set emissiveMapUv(arg: number);
	get emissiveMapUv(): number;

	/** Use mesh vertex colors for emission. If emissiveMap or emissive are set, they'll be multiplied by vertex colors. */
	set emissiveVertexColor(arg: boolean);
	get emissiveVertexColor(): boolean;

	/** Vertex color channels to use for emission. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set emissiveVertexColorChannel(arg: string);
	get emissiveVertexColorChannel(): string;

	/** Enables GGX specular. Also enables {@link StandardMaterial#anisotropyIntensity} parameter to set material anisotropy. */
	set enableGGXSpecular(arg: boolean);
	get enableGGXSpecular(): boolean;

	/** ap The main (primary) diffuse map of the material (default is null). */
	set envAtlas(arg: Texture|null);
	get envAtlas(): Texture|null;

	/** Defines the formula used for Fresnel effect. As a side-effect, enabling any Fresnel model changes the way diffuse and reflection components are combined. When Fresnel is off, legacy non energy-conserving combining is used. When it is on, combining behavior is energy-conserving. - {@link FRESNEL_NONE}: No Fresnel. - {@link FRESNEL_SCHLICK}: Schlick's approximation of Fresnel (recommended). Parameterized by specular color.  */
	set fresnelModel(arg: number);
	get fresnelModel(): number;

	/** Defines the glossiness of the material from 0 (rough) to 1 (shiny). */
	set gloss(arg: number);
	get gloss(): number;

	/** Invert the gloss component (default is false). Enabling this flag results in material treating the gloss members as roughness. */
	set glossInvert(arg: boolean);
	get glossInvert(): boolean;

	/** ap The main (primary) diffuse map of the material (default is null). */
	set glossMap(arg: Texture|null);
	get glossMap(): Texture|null;

	/** Color channel of the gloss map to use. Can be "r", "g", "b" or "a". */
	set glossMapChannel(arg: string);
	get glossMapChannel(): string;

	/** Controls the 2D offset of the gloss map. Each component is between 0 and 1. */
	set glossMapOffset(arg: Vec2);
	get glossMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the gloss map. */
	set glossMapRotation(arg: number);
	get glossMapRotation(): number;

	/** Controls the 2D tiling of the gloss map. */
	set glossMapTiling(arg: Vec2);
	get glossMapTiling(): Vec2;

	/** Gloss map UV channel. */
	set glossMapUv(arg: number);
	get glossMapUv(): number;

	/** Use mesh vertex colors for glossiness. If glossMap is set, it'll be multiplied by vertex colors. */
	set glossVertexColor(arg: boolean);
	get glossVertexColor(): boolean;

	/** Vertex color channel to use for glossiness. Can be "r", "g", "b" or "a". */
	set glossVertexColorChannel(arg: string);
	get glossVertexColorChannel(): string;

	/** p The main (primary) diffuse map of the material (default is null). */
	set heightMap(arg: Texture|null);
	get heightMap(): Texture|null;

	/** Color channel of the height map to use. Can be "r", "g", "b" or "a". */
	set heightMapChannel(arg: string);
	get heightMapChannel(): string;

	/** Height map multiplier. Affects the strength of the parallax effect. */
	set heightMapFactor(arg: number);
	get heightMapFactor(): number;

	/** Controls the 2D offset of the height map. Each component is between 0 and 1. */
	set heightMapOffset(arg: Vec2);
	get heightMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the height map. */
	set heightMapRotation(arg: number);
	get heightMapRotation(): number;

	/** Controls the 2D tiling of the height map. */
	set heightMapTiling(arg: Vec2);
	get heightMapTiling(): Vec2;

	/** Height map UV channel. */
	set heightMapUv(arg: number);
	get heightMapUv(): number;

	/** ap The main (primary) diffuse map of the material (default is null). */
	set lightMap(arg: Texture|null);
	get lightMap(): Texture|null;

	/** Color channels of the lightmap to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set lightMapChannel(arg: string);
	get lightMapChannel(): string;

	/** Controls the 2D offset of the lightmap. Each component is between 0 and 1. */
	set lightMapOffset(arg: Vec2);
	get lightMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the lightmap. */
	set lightMapRotation(arg: number);
	get lightMapRotation(): number;

	/** Controls the 2D tiling of the lightmap. */
	set lightMapTiling(arg: Vec2);
	get lightMapTiling(): Vec2;

	/** Lightmap UV channel */
	set lightMapUv(arg: number);
	get lightMapUv(): number;

	/** Use baked vertex lighting. If lightMap is set, it'll be multiplied by vertex colors. */
	set lightVertexColor(arg: boolean);
	get lightVertexColor(): boolean;

	/** Vertex color channels to use for baked lighting. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set lightVertexColorChannel(arg: string);
	get lightVertexColorChannel(): string;

	/** Defines how much the surface is metallic. From 0 (dielectric) to 1 (metal). */
	set metalness(arg: number);
	get metalness(): number;

	/** he main (primary) diffuse map of the material (default is null). */
	set metalnessMap(arg: Texture|null);
	get metalnessMap(): Texture|null;

	/** Color channel of the metalness map to use. Can be "r", "g", "b" or "a". */
	set metalnessMapChannel(arg: string);
	get metalnessMapChannel(): string;

	/** Controls the 2D offset of the metalness map. Each component is between 0 and 1. */
	set metalnessMapOffset(arg: Vec2);
	get metalnessMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the metalness map. */
	set metalnessMapRotation(arg: number);
	get metalnessMapRotation(): number;

	/** Controls the 2D tiling of the metalness map. */
	set metalnessMapTiling(arg: Vec2);
	get metalnessMapTiling(): Vec2;

	/** Metalness map UV channel. */
	set metalnessMapUv(arg: number);
	get metalnessMapUv(): number;

	/** Use mesh vertex colors for metalness. If metalnessMap is set, it'll be multiplied by vertex colors. */
	set metalnessVertexColor(arg: boolean);
	get metalnessVertexColor(): boolean;

	/** Vertex color channel to use for metalness. Can be "r", "g", "b" or "a". */
	set metalnessVertexColorChannel(arg: string);
	get metalnessVertexColorChannel(): string;

	/** main (primary) diffuse map of the material (default is null). */
	set normalDetailMap(arg: Texture|null);
	get normalDetailMap(): Texture|null;

	/** The bumpiness of the material. This value scales the assigned detail (secondary) normal map. It should be normally between 0 (no bump mapping) and 1 (full bump mapping), but can be set to e.g. 2 to give even more pronounced bump effect. */
	set normalDetailMapBumpiness(arg: number);
	get normalDetailMapBumpiness(): number;

	/** Controls the 2D offset of the detail (secondary) normal map. Each component is between 0 and 1. */
	set normalDetailMapOffset(arg: Vec2);
	get normalDetailMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the detail (secondary) normal map. */
	set normalDetailMapRotation(arg: number);
	get normalDetailMapRotation(): number;

	/** Controls the 2D tiling of the detail (secondary) normal map. */
	set normalDetailMapTiling(arg: Vec2);
	get normalDetailMapTiling(): Vec2;

	/** Detail (secondary) normal map UV channel. */
	set normalDetailMapUv(arg: number);
	get normalDetailMapUv(): number;

	/** p The main (primary) diffuse map of the material (default is null). */
	set normalMap(arg: Texture|null);
	get normalMap(): Texture|null;

	/** Controls the 2D offset of the main (primary) normal map. Each component is between 0 and 1. */
	set normalMapOffset(arg: Vec2);
	get normalMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the main (primary) normal map. */
	set normalMapRotation(arg: number);
	get normalMapRotation(): number;

	/** Controls the 2D tiling of the main (primary) normal map. */
	set normalMapTiling(arg: Vec2);
	get normalMapTiling(): Vec2;

	/** Main (primary) normal map UV channel. */
	set normalMapUv(arg: number);
	get normalMapUv(): number;

	
	set occludeDirect(arg: number);
	get occludeDirect(): number;

	/** Uses ambient occlusion to darken specular/reflection. It's a hack, because real specular occlusion is view-dependent. However, it can be better than nothing. - {@link SPECOCC_NONE}: No specular occlusion - {@link SPECOCC_AO}: Use AO directly to occlude specular. - {@link SPECOCC_GLOSSDEPENDENT}: Modify AO based on material glossiness/view angle to occlude specular.  */
	set occludeSpecular(arg: number);
	get occludeSpecular(): number;

	/** Controls visibility of specular occlusion. */
	set occludeSpecularIntensity(arg: number);
	get occludeSpecularIntensity(): number;

	/** The opacity of the material. This value can be between 0 and 1, where 0 is fully transparent and 1 is fully opaque. If you want the material to be semi-transparent you also need to set the {@link Material#blendType} to {@link BLEND_NORMAL}, {@link BLEND_ADDITIVE} or any other mode. Also note that for most semi-transparent objects you want {@link Material#depthWrite} to be false, otherwise they can fully occlude objects behind them. */
	set opacity(arg: number);
	get opacity(): number;

	/** Used to specify whether opacity is dithered, which allows transparency without alpha blending. Can be: - {@link DITHER_NONE}: Opacity dithering is disabled. - {@link DITHER_BAYER8}: Opacity is dithered using a Bayer 8 matrix. - {@link DITHER_BLUENOISE}: Opacity is dithered using a blue noise. - {@link DITHER_IGNNOISE}: Opacity is dithered using an interleaved gradient noise. Defaults to {@link DITHER_NONE}. */
	set opacityDither(arg: string);
	get opacityDither(): string;

	
	set opacityShadowDither(arg: string);
	get opacityShadowDither(): string;

	/** Used to specify whether specular and reflections are faded out using {@link StandardMaterial#opacity}. Default is true. When set to false use {@link Material#alphaFade} to fade out materials. */
	set opacityFadesSpecular(arg: boolean);
	get opacityFadesSpecular(): boolean;

	/** The main (primary) diffuse map of the material (default is null). */
	set opacityMap(arg: Texture|null);
	get opacityMap(): Texture|null;

	/** Color channel of the opacity map to use. Can be "r", "g", "b" or "a". */
	set opacityMapChannel(arg: string);
	get opacityMapChannel(): string;

	/** Controls the 2D offset of the opacity map. Each component is between 0 and 1. */
	set opacityMapOffset(arg: Vec2);
	get opacityMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the opacity map. */
	set opacityMapRotation(arg: number);
	get opacityMapRotation(): number;

	/** Controls the 2D tiling of the opacity map. */
	set opacityMapTiling(arg: Vec2);
	get opacityMapTiling(): Vec2;

	/** Opacity map UV channel. */
	set opacityMapUv(arg: number);
	get opacityMapUv(): number;

	/** Use mesh vertex colors for opacity. If opacityMap is set, it'll be multiplied by vertex colors. */
	set opacityVertexColor(arg: boolean);
	get opacityVertexColor(): boolean;

	/** Vertex color channels to use for opacity. Can be "r", "g", "b" or "a". */
	set opacityVertexColorChannel(arg: string);
	get opacityVertexColorChannel(): string;

	/** Align vertices to pixel coordinates when rendering. Useful for pixel perfect 2D graphics. */
	set pixelSnap(arg: boolean);
	get pixelSnap(): boolean;

	/** Environment map intensity. */
	set reflectivity(arg: number);
	get reflectivity(): number;

	/** Defines the visibility of refraction. Material can refract the same cube map as used for reflections. */
	set refraction(arg: number);
	get refraction(): number;

	/** Defines the index of refraction, i.e. The amount of distortion. The value is calculated as (outerIor / surfaceIor), where inputs are measured indices of refraction, the one around the object and the one of its own surface. In most situations outer medium is air, so outerIor will be approximately 1. Then you only need to do (1.0 / surfaceIor). */
	set refractionIndex(arg: number);
	get refractionIndex(): number;

	/** The strength of the angular separation of colors (chromatic aberration) transmitting through a volume. Defaults to 0, which is equivalent to no dispersion. */
	set dispersion(arg: number);
	get dispersion(): number;

	/** When enabled, the material will output accumulated directional shadow value in linear space as the color. */
	set shadowCatcher(arg: boolean);
	get shadowCatcher(): boolean;

	/** The specular color of the material. This color value is 3-component (RGB), where each component is between 0 and 1. Defines surface reflection/specular color. Affects specular intensity and tint. */
	set specular(arg: Color);
	get specular(): Color;

	/** The main (primary) diffuse map of the material (default is null). */
	set specularMap(arg: Texture|null);
	get specularMap(): Texture|null;

	/** Color channels of the specular map to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set specularMapChannel(arg: string);
	get specularMapChannel(): string;

	/** Controls the 2D offset of the specular map. Each component is between 0 and 1. */
	set specularMapOffset(arg: Vec2);
	get specularMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the specular map. */
	set specularMapRotation(arg: number);
	get specularMapRotation(): number;

	/** Controls the 2D tiling of the specular map. */
	set specularMapTiling(arg: Vec2);
	get specularMapTiling(): Vec2;

	/** Specular map UV channel. */
	set specularMapUv(arg: number);
	get specularMapUv(): number;

	/** Multiply specular map and/or specular vertex color by the constant specular value. */
	set specularTint(arg: boolean);
	get specularTint(): boolean;

	/** Use mesh vertex colors for specular. If specularMap or are specularTint are set, they'll be multiplied by vertex colors. */
	set specularVertexColor(arg: boolean);
	get specularVertexColor(): boolean;

	/** Vertex color channels to use for specular. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set specularVertexColorChannel(arg: string);
	get specularVertexColorChannel(): string;

	/** The factor of specular intensity, used to weight the fresnel and specularity. Default is 1.0. */
	set specularityFactor(arg: number);
	get specularityFactor(): number;

	/** (primary) diffuse map of the material (default is null). */
	set specularityFactorMap(arg: Texture|null);
	get specularityFactorMap(): Texture|null;

	/** The channel used by the specularity factor texture to sample from (default is 'a'). */
	set specularityFactorMapChannel(arg: string);
	get specularityFactorMapChannel(): string;

	/** Controls the 2D offset of the specularity factor map. Each component is between 0 and 1. */
	set specularityFactorMapOffset(arg: Vec2);
	get specularityFactorMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the specularity factor map. */
	set specularityFactorMapRotation(arg: number);
	get specularityFactorMapRotation(): number;

	/** Controls the 2D tiling of the specularity factor map. */
	set specularityFactorMapTiling(arg: Vec2);
	get specularityFactorMapTiling(): Vec2;

	/** Specularity factor map UV channel. */
	set specularityFactorMapUv(arg: number);
	get specularityFactorMapUv(): number;

	/** Toggle sheen specular effect on/off. */
	set useSheen(arg: boolean);
	get useSheen(): boolean;

	/** The specular color of the sheen (fabric) microfiber structure. This color value is 3-component (RGB), where each component is between 0 and 1. */
	set sheen(arg: Color);
	get sheen(): Color;

	/** ap The main (primary) diffuse map of the material (default is null). */
	set sheenMap(arg: Texture|null);
	get sheenMap(): Texture|null;

	/** Color channels of the sheen map to use. Can be "r", "g", "b", "a", "rgb" or any swizzled combination. */
	set sheenMapChannel(arg: string);
	get sheenMapChannel(): string;

	/** Controls the 2D offset of the sheen map. Each component is between 0 and 1. */
	set sheenMapOffset(arg: Vec2);
	get sheenMapOffset(): Vec2;

	/** Controls the 2D rotation (in degrees) of the sheen map. */
	set sheenMapRotation(arg: number);
	get sheenMapRotation(): number;

	/** Controls the 2D tiling of the sheen map. */
	set sheenMapTiling(arg: Vec2);
	get sheenMapTiling(): Vec2;

	/** Sheen map UV channel. */
	set sheenMapUv(arg: number);
	get sheenMapUv(): number;

	/** Use mesh vertex colors for sheen. If sheen map or sheen tint are set, they'll be multiplied by vertex colors. */
	set sheenVertexColor(arg: boolean);
	get sheenVertexColor(): boolean;

	
	set sheenVertexColorChannel(arg: string);
	get sheenVertexColorChannel(): string;

	/** p The main (primary) diffuse map of the material (default is null). */
	set sphereMap(arg: Texture|null);
	get sphereMap(): Texture|null;

	/** Calculate proper normals (and therefore lighting) on backfaces. */
	set twoSidedLighting(arg: boolean);
	get twoSidedLighting(): boolean;

	/** Apply fogging (as configured in scene settings) */
	set useFog(arg: boolean);
	get useFog(): boolean;

	/** Apply tonemapping (as configured in {@link Scene#rendering} or {@link CameraComponent.rendering}). Defaults to true. */
	set useTonemap(arg: boolean);
	get useTonemap(): boolean;

	/** Apply lighting */
	set useLighting(arg: boolean);
	get useLighting(): boolean;

	/** Use metalness properties instead of specular. When enabled, diffuse colors also affect specular instead of the dedicated specular map. This can be used as alternative to specular color to save space. With metalness == 0, the pixel is assumed to be dielectric, and diffuse color is used as normal. With metalness == 1, the pixel is fully metallic, and diffuse color is used as specular color instead. */
	set useMetalness(arg: boolean);
	get useMetalness(): boolean;

	/** When metalness is enabled, use the specular map to apply color tint to specular reflections. at direct angles. */
	set useMetalnessSpecularColor(arg: boolean);
	get useMetalnessSpecularColor(): boolean;

	/** Apply scene skybox as prefiltered environment map */
	set useSkybox(arg: boolean);
	get useSkybox(): boolean;


    _uniformCache: {};
    /**
     * Copy a `StandardMaterial`.
     *
     * @param {StandardMaterial} source - The material to copy from.
     * @returns {StandardMaterial} The destination material.
     */
    copy(source: StandardMaterial): StandardMaterial;
    /**
     * Sets a vertex shader attribute on a material.
     *
     * @param {string} name - The name of the parameter to set.
     * @param {string} semantic - Semantic to map the vertex data. Must match with the semantic set
     * on vertex stream of the mesh.
     * @example
     * mesh.setVertexStream(pc.SEMANTIC_ATTR15, offset, 3);
     * material.setAttribute('offset', pc.SEMANTIC_ATTR15);
     */
    setAttribute(name: string, semantic: string): void;
    _setParameter(name: any, value: any): void;
    _setParameters(parameters: any): void;
    _processParameters(paramsName: any): void;
    _updateMap(p: any): void;
    _allocUniform(name: any, allocFunc: any): any;
    getUniform(name: any, device: any, scene: any): any;
    updateEnvUniforms(device: any, scene: any): void;
    getShaderVariant(params: any): Shader;
}

/**
 * Manages creation of {@link ElementComponent}s.
 *
 * @category User Interface
 */
declare class ElementComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ElementComponent;
    DataType: typeof ElementComponentData;
    schema: string[];
    _unicodeConverter: any;
    _rtlReorder: any;
    _defaultTexture: Texture;
    defaultImageMaterial: StandardMaterial;
    defaultImage9SlicedMaterial: StandardMaterial;
    defaultImage9TiledMaterial: StandardMaterial;
    defaultImageMaskMaterial: StandardMaterial;
    defaultImage9SlicedMaskMaterial: StandardMaterial;
    defaultImage9TiledMaskMaterial: StandardMaterial;
    defaultScreenSpaceImageMaterial: StandardMaterial;
    defaultScreenSpaceImage9SlicedMaterial: StandardMaterial;
    defaultScreenSpaceImage9TiledMaterial: StandardMaterial;
    defaultScreenSpaceImageMask9SlicedMaterial: StandardMaterial;
    defaultScreenSpaceImageMask9TiledMaterial: StandardMaterial;
    defaultScreenSpaceImageMaskMaterial: StandardMaterial;
    _defaultTextMaterials: {};
    defaultImageMaterials: any[];
    initializeComponentData(component: any, data: any, properties: any): void;
    onAddComponent(entity: any, component: any): void;
    onRemoveComponent(entity: any, component: any): void;
    cloneComponent(entity: any, clone: any): Component;
    getTextElementMaterial(screenSpace: any, msdf: any, textAttibutes: any): any;
    _createBaseImageMaterial(): StandardMaterial;
    getImageElementMaterial(screenSpace: any, mask: any, nineSliced: any, nineSliceTiled: any): StandardMaterial;
    registerUnicodeConverter(func: any): void;
    registerRtlReorder(func: any): void;
    getUnicodeConverter(): any;
    getRtlReorder(): any;
}

/**
 * ElementComponents are used to construct user interfaces. The {@link ElementComponent#type}
 * property can be configured in 3 main ways: as a text element, as an image element or as a group
 * element. If the ElementComponent has a {@link ScreenComponent} ancestor in the hierarchy, it
 * will be transformed with respect to the coordinate system of the screen. If there is no
 * {@link ScreenComponent} ancestor, the ElementComponent will be transformed like any other
 * entity.
 *
 * You should never need to use the ElementComponent constructor directly. To add an
 * ElementComponent to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = pc.Entity();
 * entity.addComponent('element'); // This defaults to a 'group' element
 * ```
 *
 * To create a simple text-based element:
 *
 * ```javascript
 * entity.addComponent('element', {
 *     anchor: new pc.Vec4(0.5, 0.5, 0.5, 0.5), // centered anchor
 *     fontAsset: fontAsset,
 *     fontSize: 128,
 *     pivot: new pc.Vec2(0.5, 0.5),            // centered pivot
 *     text: 'Hello World!',
 *     type: pc.ELEMENTTYPE_TEXT
 * });
 * ```
 *
 * Once the ElementComponent is added to the entity, you can access it via the
 * {@link Entity#element} property:
 *
 * ```javascript
 * entity.element.color = pc.Color.RED; // Set the element's color to red
 *
 * console.log(entity.element.color);   // Get the element's color and print it
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Basic text rendering](https://playcanvas.github.io/#/user-interface/text)
 * - [Auto font sizing](https://playcanvas.github.io/#/user-interface/text-auto-font-size)
 * - [Emojis](https://playcanvas.github.io/#/user-interface/text-emojis)
 * - [Text localization](https://playcanvas.github.io/#/user-interface/text-localization)
 * - [Typewriter text](https://playcanvas.github.io/#/user-interface/text-typewriter)
 *
 * @hideconstructor
 * @category User Interface
 */
declare class ElementComponent extends Component {
    /**
     * Fired when the mouse is pressed while the cursor is on the component. Only fired when
     * useInput is true. The handler is passed an {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.element.on('mousedown', (event) => {
     *     console.log(`Mouse down event on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEDOWN: string;
    /**
     * Fired when the mouse is released while the cursor is on the component. Only fired when
     * useInput is true. The handler is passed an {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.element.on('mouseup', (event) => {
     *     console.log(`Mouse up event on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEUP: string;
    /**
     * Fired when the mouse cursor enters the component. Only fired when useInput is true. The
     * handler is passed an {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.element.on('mouseenter', (event) => {
     *     console.log(`Mouse enter event on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEENTER: string;
    /**
     * Fired when the mouse cursor leaves the component. Only fired when useInput is true. The
     * handler is passed an {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.element.on('mouseleave', (event) => {
     *     console.log(`Mouse leave event on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSELEAVE: string;
    /**
     * Fired when the mouse cursor is moved on the component. Only fired when useInput is true. The
     * handler is passed an {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.element.on('mousemove', (event) => {
     *     console.log(`Mouse move event on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEMOVE: string;
    /**
     * Fired when the mouse wheel is scrolled on the component. Only fired when useInput is true.
     * The handler is passed an {@link ElementMouseEvent}.
     *
     * @event
     * @example
     * entity.element.on('mousewheel', (event) => {
     *     console.log(`Mouse wheel event on entity ${entity.name}`);
     * });
     */
    static EVENT_MOUSEWHEEL: string;
    /**
     * Fired when the mouse is pressed and released on the component or when a touch starts and
     * ends on the component. Only fired when useInput is true. The handler is passed an
     * {@link ElementMouseEvent} or {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.element.on('click', (event) => {
     *     console.log(`Click event on entity ${entity.name}`);
     * });
     */
    static EVENT_CLICK: string;
    /**
     * Fired when a touch starts on the component. Only fired when useInput is true. The handler is
     * passed an {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.element.on('touchstart', (event) => {
     *     console.log(`Touch start event on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHSTART: string;
    /**
     * Fired when a touch ends on the component. Only fired when useInput is true. The handler is
     * passed an {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.element.on('touchend', (event) => {
     *     console.log(`Touch end event on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHEND: string;
    /**
     * Fired when a touch moves after it started touching the component. Only fired when useInput
     * is true. The handler is passed an {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.element.on('touchmove', (event) => {
     *     console.log(`Touch move event on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHMOVE: string;
    /**
     * Fired when a touch is canceled on the component. Only fired when useInput is true. The
     * handler is passed an {@link ElementTouchEvent}.
     *
     * @event
     * @example
     * entity.element.on('touchcancel', (event) => {
     *     console.log(`Touch cancel event on entity ${entity.name}`);
     * });
     */
    static EVENT_TOUCHCANCEL: string;
    /**
     * Create a new ElementComponent instance.
     *
     * @param {ElementComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ElementComponentSystem, entity: Entity);
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    _beingInitialized: boolean;
    _anchor: Vec4;
    _localAnchor: Vec4;
    _pivot: Vec2;
    _width: number;
    _calculatedWidth: number;
    _height: number;
    _calculatedHeight: number;
    _margin: Vec4;
    _modelTransform: Mat4;
    _screenToWorld: Mat4;
    _anchorTransform: Mat4;
    _anchorDirty: boolean;
    _parentWorldTransform: Mat4;
    _screenTransform: Mat4;
    _screenCorners: Vec3[];
    _canvasCorners: Vec2[];
    _worldCorners: Vec3[];
    _cornersDirty: boolean;
    _canvasCornersDirty: boolean;
    _worldCornersDirty: boolean;
    /**
     * The Entity with a {@link ScreenComponent} that this component belongs to. This is
     * automatically set when the component is a child of a ScreenComponent.
     *
     * @type {Entity|null}
     */
    screen: Entity | null;
    _type: string;
    _image: ImageElement;
    _text: TextElement;
    _group: any;
    _drawOrder: number;
    _fitMode: string;
    _useInput: boolean;
    _layers: number[];
    _addedModels: any[];
    _batchGroupId: number;
    _batchGroup: any;
    _offsetReadAt: number;
    _maskOffset: number;
    _maskedBy: any;
    /**
     * @type {ElementComponentData}
     * @ignore
     */
    get data(): ElementComponentData;
    /**
     * @type {number}
     * @private
     */
    private get _absLeft();
    /**
     * @type {number}
     * @private
     */
    private get _absRight();
    /**
     * @type {number}
     * @private
     */
    private get _absTop();
    /**
     * @type {number}
     * @private
     */
    private get _absBottom();
    /**
     * @type {boolean}
     * @private
     */
    private get _hasSplitAnchorsX();
    /**
     * @type {boolean}
     * @private
     */
    private get _hasSplitAnchorsY();
    /**
     * Gets the world space axis-aligned bounding box for this element component.
     *
     * @type {BoundingBox | null}
     */
    get aabb(): BoundingBox | null;
    /**
     * Sets the anchor for this element component. Specifies where the left, bottom, right and top
     * edges of the component are anchored relative to its parent. Each value ranges from 0 to 1.
     * e.g. a value of `[0, 0, 0, 0]` means that the element will be anchored to the bottom left of
     * its parent. A value of `[1, 1, 1, 1]` means it will be anchored to the top right. A split
     * anchor is when the left-right or top-bottom pairs of the anchor are not equal. In that case,
     * the component will be resized to cover that entire area. For example, a value of `[0, 0, 1, 1]`
     * will make the component resize exactly as its parent.
     *
     * @example
     * this.entity.element.anchor = new pc.Vec4(Math.random() * 0.1, 0, 1, 0);
     * @example
     * this.entity.element.anchor = [Math.random() * 0.1, 0, 1, 0];
     *
     * @type {Vec4 | number[]}
     */
    set anchor(value: Vec4 | number[]);
    /**
     * Gets the anchor for this element component.
     *
     * @type {Vec4 | number[]}
     */
    get anchor(): Vec4 | number[];
    /**
     * Sets the batch group (see {@link BatchGroup}) for this element. Default is -1 (no group).
     *
     * @type {number}
     */
    set batchGroupId(value: number);
    /**
     * Gets the batch group (see {@link BatchGroup}) for this element.
     *
     * @type {number}
     */
    get batchGroupId(): number;
    /**
     * Sets the distance from the bottom edge of the anchor. Can be used in combination with a
     * split anchor to make the component's top edge always be 'top' units away from the top.
     *
     * @type {number}
     */
    set bottom(value: number);
    /**
     * Gets the distance from the bottom edge of the anchor.
     *
     * @type {number}
     */
    get bottom(): number;
    /**
     * Sets the width at which the element will be rendered. In most cases this will be the same as
     * {@link width}. However, in some cases the engine may calculate a different width for the
     * element, such as when the element is under the control of a {@link LayoutGroupComponent}. In
     * these scenarios, `calculatedWidth` may be smaller or larger than the width that was set in
     * the editor.
     *
     * @type {number}
     */
    set calculatedWidth(value: number);
    /**
     * Gets the width at which the element will be rendered.
     *
     * @type {number}
     */
    get calculatedWidth(): number;
    /**
     * Sets the height at which the element will be rendered. In most cases this will be the same
     * as {@link height}. However, in some cases the engine may calculate a different height for
     * the element, such as when the element is under the control of a {@link LayoutGroupComponent}.
     * In these scenarios, `calculatedHeight` may be smaller or larger than the height that was set
     * in the editor.
     *
     * @type {number}
     */
    set calculatedHeight(value: number);
    /**
     * Gets the height at which the element will be rendered.
     *
     * @type {number}
     */
    get calculatedHeight(): number;
    /**
     * Gets the array of 4 {@link Vec2}s that represent the bottom left, bottom right, top right
     * and top left corners of the component in canvas pixels. Only works for screen space element
     * components.
     *
     * @type {Vec2[]}
     */
    get canvasCorners(): Vec2[];
    /**
     * Sets the draw order of the component. A higher value means that the component will be
     * rendered on top of other components.
     *
     * @type {number}
     */
    set drawOrder(value: number);
    /**
     * Gets the draw order of the component.
     *
     * @type {number}
     */
    get drawOrder(): number;
    /**
     * Sets the height of the element as set in the editor. Note that in some cases this may not
     * reflect the true height at which the element is rendered, such as when the element is under
     * the control of a {@link LayoutGroupComponent}. See {@link calculatedHeight} in order to
     * ensure you are reading the true height at which the element will be rendered.
     *
     * @type {number}
     */
    set height(value: number);
    /**
     * Gets the height of the element.
     *
     * @type {number}
     */
    get height(): number;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which this element should belong. Don't
     * push, pop, splice or modify this array. If you want to change it, set a new one instead.
     *
     * @type {number[]}
     */
    set layers(value: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which this element belongs.
     *
     * @type {number[]}
     */
    get layers(): number[];
    /**
     * Sets the distance from the left edge of the anchor. Can be used in combination with a split
     * anchor to make the component's left edge always be 'left' units away from the left.
     *
     * @type {number}
     */
    set left(value: number);
    /**
     * Gets the distance from the left edge of the anchor.
     *
     * @type {number}
     */
    get left(): number;
    /**
     * Sets the distance from the left, bottom, right and top edges of the anchor. For example, if
     * we are using a split anchor like `[0, 0, 1, 1]` and the margin is `[0, 0, 0, 0]` then the
     * component will be the same width and height as its parent.
     *
     * @type {Vec4}
     */
    set margin(value: Vec4);
    /**
     * Gets the distance from the left, bottom, right and top edges of the anchor.
     *
     * @type {Vec4}
     */
    get margin(): Vec4;
    /**
     * Gets the entity that is currently masking this element.
     *
     * @type {Entity}
     * @private
     */
    private get maskedBy();
    /**
     * Sets the position of the pivot of the component relative to its anchor. Each value ranges
     * from 0 to 1 where `[0, 0]` is the bottom left and `[1, 1]` is the top right.
     *
     * @example
     * this.entity.element.pivot = [Math.random() * 0.1, Math.random() * 0.1];
     * @example
     * this.entity.element.pivot = new pc.Vec2(Math.random() * 0.1, Math.random() * 0.1);
     *
     * @type {Vec2 | number[]}
     */
    set pivot(value: Vec2 | number[]);
    /**
     * Gets the position of the pivot of the component relative to its anchor.
     *
     * @type {Vec2 | number[]}
     */
    get pivot(): Vec2 | number[];
    /**
     * Sets the distance from the right edge of the anchor. Can be used in combination with a split
     * anchor to make the component's right edge always be 'right' units away from the right.
     *
     * @type {number}
     */
    set right(value: number);
    /**
     * Gets the distance from the right edge of the anchor.
     *
     * @type {number}
     */
    get right(): number;
    /**
     * Gets the array of 4 {@link Vec3}s that represent the bottom left, bottom right, top right
     * and top left corners of the component relative to its parent {@link ScreenComponent}.
     *
     * @type {Vec3[]}
     */
    get screenCorners(): Vec3[];
    /**
     * Gets the width of the text rendered by the component. Only works for
     * {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    get textWidth(): number;
    /**
     * Gets the height of the text rendered by the component. Only works for
     * {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    get textHeight(): number;
    /**
     * Sets the distance from the top edge of the anchor. Can be used in combination with a split
     * anchor to make the component's bottom edge always be 'bottom' units away from the bottom.
     *
     * @type {number}
     */
    set top(value: number);
    /**
     * Gets the distance from the top edge of the anchor.
     *
     * @type {number}
     */
    get top(): number;
    /**
     * Sets the type of the ElementComponent. Can be:
     *
     * - {@link ELEMENTTYPE_GROUP}: The component can be used as a layout mechanism to create
     * groups of ElementComponents e.g. panels.
     * - {@link ELEMENTTYPE_IMAGE}: The component will render an image
     * - {@link ELEMENTTYPE_TEXT}: The component will render text
     *
     * @type {string}
     */
    set type(value: string);
    /**
     * Gets the type of the ElementComponent.
     *
     * @type {string}
     */
    get type(): string;
    /**
     * Sets whether the component will receive mouse and touch input events.
     *
     * @type {boolean}
     */
    set useInput(value: boolean);
    /**
     * Gets whether the component will receive mouse and touch input events.
     *
     * @type {boolean}
     */
    get useInput(): boolean;
    /**
     * Sets the fit mode of the element. Controls how the content should be fitted and preserve the
     * aspect ratio of the source texture or sprite. Only works for {@link ELEMENTTYPE_IMAGE}
     * types. Can be:
     *
     * - {@link FITMODE_STRETCH}: Fit the content exactly to Element's bounding box.
     * - {@link FITMODE_CONTAIN}: Fit the content within the Element's bounding box while
     * preserving its Aspect Ratio.
     * - {@link FITMODE_COVER}: Fit the content to cover the entire Element's bounding box while
     * preserving its Aspect Ratio.
     *
     * @type {string}
     */
    set fitMode(value: string);
    /**
     * Gets the fit mode of the element.
     *
     * @type {string}
     */
    get fitMode(): string;
    /**
     * Sets the width of the element as set in the editor. Note that in some cases this may not
     * reflect the true width at which the element is rendered, such as when the element is under
     * the control of a {@link LayoutGroupComponent}. See {@link calculatedWidth} in order to
     * ensure you are reading the true width at which the element will be rendered.
     *
     * @type {number}
     */
    set width(value: number);
    /**
     * Gets the width of the element.
     *
     * @type {number}
     */
    get width(): number;
    /**
     * Gets the array of 4 {@link Vec3}s that represent the bottom left, bottom right, top right
     * and top left corners of the component in world space. Only works for 3D element components.
     *
     * @type {Vec3[]}
     */
    get worldCorners(): Vec3[];
    /**
     * Sets the size of the font. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set fontSize(arg: number);
    /**
     * Gets the size of the font.
     *
     * @type {number}
     */
    get fontSize(): number;
    /**
     * Sets the minimum size that the font can scale to when {@link autoFitWidth} or
     * {@link autoFitHeight} are true.
     *
     * @type {number}
     */
    set minFontSize(arg: number);
    /**
     * Gets the minimum size that the font can scale to when {@link autoFitWidth} or
     * {@link autoFitHeight} are true.
     *
     * @type {number}
     */
    get minFontSize(): number;
    /**
     * Sets the maximum size that the font can scale to when {@link autoFitWidth} or
     * {@link autoFitHeight} are true.
     *
     * @type {number}
     */
    set maxFontSize(arg: number);
    /**
     * Gets the maximum size that the font can scale to when {@link autoFitWidth} or
     * {@link autoFitHeight} are true.
     *
     * @type {number}
     */
    get maxFontSize(): number;
    /**
     * Sets the maximum number of lines that the Element can wrap to. Any leftover text will be
     * appended to the last line. Set this to null to allow unlimited lines.
     *
     * @type {number|null}
     */
    set maxLines(arg: number | null);
    /**
     * Gets the maximum number of lines that the Element can wrap to. Returns null for unlimited
     * lines.
     *
     * @type {number|null}
     */
    get maxLines(): number | null;
    /**
     * Sets whether the font size and line height will scale so that the text fits inside the width
     * of the Element. The font size will be scaled between {@link minFontSize} and
     * {@link maxFontSize}. The value of {@link autoFitWidth} will be ignored if {@link autoWidth}
     * is true.
     *
     * @type {boolean}
     */
    set autoFitWidth(arg: boolean);
    /**
     * Gets whether the font size and line height will scale so that the text fits inside the width
     * of the Element.
     *
     * @type {boolean}
     */
    get autoFitWidth(): boolean;
    /**
     * Sets whether the font size and line height will scale so that the text fits inside the
     * height of the Element. The font size will be scaled between {@link minFontSize} and
     * {@link maxFontSize}. The value of {@link autoFitHeight} will be ignored if
     * {@link autoHeight} is true.
     *
     * @type {boolean}
     */
    set autoFitHeight(arg: boolean);
    /**
     * Gets whether the font size and line height will scale so that the text fits inside the
     * height of the Element.
     *
     * @type {boolean}
     */
    get autoFitHeight(): boolean;
    /**
     * Sets the color of the image for {@link ELEMENTTYPE_IMAGE} types or the color of the text for
     * {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {Color}
     */
    set color(arg: Color);
    /**
     * Gets the color of the element.
     *
     * @type {Color}
     */
    get color(): Color;
    /**
     * Sets the font used for rendering the text. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {Font|CanvasFont}
     */
    set font(arg: Font | CanvasFont);
    /**
     * Gets the font used for rendering the text.
     *
     * @type {Font|CanvasFont}
     */
    get font(): Font | CanvasFont;
    /**
     * Sets the id of the font asset used for rendering the text. Only works for {@link ELEMENTTYPE_TEXT}
     * types.
     *
     * @type {number}
     */
    set fontAsset(arg: number);
    /**
     * Gets the id of the font asset used for rendering the text.
     *
     * @type {number}
     */
    get fontAsset(): number;
    /**
     * Sets the spacing between the letters of the text. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set spacing(arg: number);
    /**
     * Gets the spacing between the letters of the text.
     *
     * @type {number}
     */
    get spacing(): number;
    /**
     * Sets the height of each line of text. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set lineHeight(arg: number);
    /**
     * Gets the height of each line of text.
     *
     * @type {number}
     */
    get lineHeight(): number;
    /**
     * Sets whether to automatically wrap lines based on the element width. Only works for
     * {@link ELEMENTTYPE_TEXT} types, and when {@link autoWidth} is set to false.
     *
     * @type {boolean}
     */
    set wrapLines(arg: boolean);
    /**
     * Gets whether to automatically wrap lines based on the element width.
     *
     * @type {boolean}
     */
    get wrapLines(): boolean;
    set lines(arg: any[]);
    get lines(): any[];
    /**
     * Sets the horizontal and vertical alignment of the text. Values range from 0 to 1 where
     * `[0, 0]` is the bottom left and `[1, 1]` is the top right.  Only works for
     * {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {Vec2}
     */
    set alignment(arg: Vec2);
    /**
     * Gets the horizontal and vertical alignment of the text.
     *
     * @type {Vec2}
     */
    get alignment(): Vec2;
    /**
     * Sets whether to automatically set the width of the component to be the same as the
     * {@link textWidth}. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {boolean}
     */
    set autoWidth(arg: boolean);
    /**
     * Gets whether to automatically set the width of the component to be the same as the
     * {@link textWidth}.
     *
     * @type {boolean}
     */
    get autoWidth(): boolean;
    /**
     * Sets whether to automatically set the height of the component to be the same as the
     * {@link textHeight}. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {boolean}
     */
    set autoHeight(arg: boolean);
    /**
     * Gets whether to automatically set the height of the component to be the same as the
     * {@link textHeight}.
     *
     * @type {boolean}
     */
    get autoHeight(): boolean;
    /**
     * Sets whether to reorder the text for RTL languages. The reordering uses a function
     * registered by `app.systems.element.registerUnicodeConverter`.
     *
     * @type {boolean}
     */
    set rtlReorder(arg: boolean);
    /**
     * Gets whether to reorder the text for RTL languages.
     *
     * @type {boolean}
     */
    get rtlReorder(): boolean;
    /**
     * Sets whether to convert unicode characters. This uses a function registered by
     * `app.systems.element.registerUnicodeConverter`.
     *
     * @type {boolean}
     */
    set unicodeConverter(arg: boolean);
    /**
     * Gets whether to convert unicode characters.
     *
     * @type {boolean}
     */
    get unicodeConverter(): boolean;
    /**
     * Sets the text to render. Only works for {@link ELEMENTTYPE_TEXT} types. To override certain
     * text styling properties on a per-character basis, the text can optionally include markup
     * tags contained within square brackets. Supported tags are:
     *
     * 1. `color` - override the element's {@link color} property. Examples:
     *     - `[color="#ff0000"]red text[/color]`
     *     - `[color="#00ff00"]green text[/color]`
     *     - `[color="#0000ff"]blue text[/color]`
     * 2. `outline` - override the element's {@link outlineColor} and {@link outlineThickness}
     * properties. Example:
     *     - `[outline color="#ffffff" thickness="0.5"]text[/outline]`
     * 3. `shadow` - override the element's {@link shadowColor} and {@link shadowOffset}
     * properties. Examples:
     *     - `[shadow color="#ffffff" offset="0.5"]text[/shadow]`
     *     - `[shadow color="#000000" offsetX="0.1" offsetY="0.2"]text[/shadow]`
     *
     * Note that markup tags are only processed if the text element's {@link enableMarkup} property
     * is set to true.
     *
     * @type {string}
     */
    set text(arg: string);
    /**
     * Gets the text to render.
     *
     * @type {string}
     */
    get text(): string;
    /**
     * Sets the localization key to use to get the localized text from {@link Application#i18n}.
     * Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {string}
     */
    set key(arg: string);
    /**
     * Gets the localization key to use to get the localized text from {@link Application#i18n}.
     *
     * @type {string}
     */
    get key(): string;
    /**
     * Sets the texture to render. Only works for {@link ELEMENTTYPE_IMAGE} types.
     *
     * @type {Texture}
     */
    set texture(arg: Texture);
    /**
     * Gets the texture to render.
     *
     * @type {Texture}
     */
    get texture(): Texture;
    /**
     * Sets the id of the texture asset to render. Only works for {@link ELEMENTTYPE_IMAGE} types.
     *
     * @type {number}
     */
    set textureAsset(arg: number);
    /**
     * Gets the id of the texture asset to render.
     *
     * @type {number}
     */
    get textureAsset(): number;
    /**
     * Sets the material to use when rendering an image. Only works for {@link ELEMENTTYPE_IMAGE} types.
     *
     * @type {Material}
     */
    set material(arg: Material);
    /**
     * Gets the material to use when rendering an image.
     *
     * @type {Material}
     */
    get material(): Material;
    /**
     * Sets the id of the material asset to use when rendering an image. Only works for
     * {@link ELEMENTTYPE_IMAGE} types.
     *
     * @type {number}
     */
    set materialAsset(arg: number);
    /**
     * Gets the id of the material asset to use when rendering an image.
     *
     * @type {number}
     */
    get materialAsset(): number;
    /**
     * Sets the sprite to render. Only works for {@link ELEMENTTYPE_IMAGE} types which can render
     * either a texture or a sprite.
     *
     * @type {Sprite}
     */
    set sprite(arg: Sprite);
    /**
     * Gets the sprite to render.
     *
     * @type {Sprite}
     */
    get sprite(): Sprite;
    /**
     * Sets the id of the sprite asset to render. Only works for {@link ELEMENTTYPE_IMAGE} types which
     * can render either a texture or a sprite.
     *
     * @type {number}
     */
    set spriteAsset(arg: number);
    /**
     * Gets the id of the sprite asset to render.
     *
     * @type {number}
     */
    get spriteAsset(): number;
    /**
     * Sets the frame of the sprite to render. Only works for {@link ELEMENTTYPE_IMAGE} types who have a
     * sprite assigned.
     *
     * @type {number}
     */
    set spriteFrame(arg: number);
    /**
     * Gets the frame of the sprite to render.
     *
     * @type {number}
     */
    get spriteFrame(): number;
    /**
     * Sets the number of pixels that map to one PlayCanvas unit. Only works for
     * {@link ELEMENTTYPE_IMAGE} types who have a sliced sprite assigned.
     *
     * @type {number}
     */
    set pixelsPerUnit(arg: number);
    /**
     * Gets the number of pixels that map to one PlayCanvas unit.
     *
     * @type {number}
     */
    get pixelsPerUnit(): number;
    /**
     * Sets the opacity of the element. This works for both {@link ELEMENTTYPE_IMAGE} and
     * {@link ELEMENTTYPE_TEXT} element types.
     *
     * @type {number}
     */
    set opacity(arg: number);
    /**
     * Gets the opacity of the element.
     *
     * @type {number}
     */
    get opacity(): number;
    /**
     * Sets the region of the texture to use in order to render an image. Values range from 0 to 1
     * and indicate u, v, width, height. Only works for {@link ELEMENTTYPE_IMAGE} types.
     *
     * @type {Vec4}
     */
    set rect(arg: Vec4);
    /**
     * Gets the region of the texture to use in order to render an image.
     *
     * @type {Vec4}
     */
    get rect(): Vec4;
    /**
     * Sets whether the Image Element should be treated as a mask. Masks do not render into the
     * scene, but instead limit child elements to only be rendered where this element is rendered.
     *
     * @type {boolean}
     */
    set mask(arg: boolean);
    /**
     * Gets whether the Image Element should be treated as a mask.
     *
     * @type {boolean}
     */
    get mask(): boolean;
    /**
     * Sets the text outline effect color and opacity. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {Color}
     */
    set outlineColor(arg: Color);
    /**
     * Gets the text outline effect color and opacity.
     *
     * @type {Color}
     */
    get outlineColor(): Color;
    /**
     * Sets the width of the text outline effect. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set outlineThickness(arg: number);
    /**
     * Gets the width of the text outline effect.
     *
     * @type {number}
     */
    get outlineThickness(): number;
    /**
     * Sets the text shadow effect color and opacity. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {Color}
     */
    set shadowColor(arg: Color);
    /**
     * Gets the text shadow effect color and opacity.
     *
     * @type {Color}
     */
    get shadowColor(): Color;
    /**
     * Sets the text shadow effect shift amount from original text. Only works for
     * {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set shadowOffset(arg: number);
    /**
     * Gets the text shadow effect shift amount from original text.
     *
     * @type {number}
     */
    get shadowOffset(): number;
    /**
     * Sets whether markup processing is enabled for this element. Only works for
     * {@link ELEMENTTYPE_TEXT} types. Defaults to false.
     *
     * @type {boolean}
     */
    set enableMarkup(arg: boolean);
    /**
     * Gets whether markup processing is enabled for this element.
     *
     * @type {boolean}
     */
    get enableMarkup(): boolean;
    /**
     * Sets the index of the first character to render. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set rangeStart(arg: number);
    /**
     * Gets the index of the first character to render.
     *
     * @type {number}
     */
    get rangeStart(): number;
    /**
     * Sets the index of the last character to render. Only works for {@link ELEMENTTYPE_TEXT} types.
     *
     * @type {number}
     */
    set rangeEnd(arg: number);
    /**
     * Gets the index of the last character to render.
     *
     * @type {number}
     */
    get rangeEnd(): number;
    /** @ignore */
    _setValue(name: any, value: any): void;
    _patch(): void;
    _unpatch(): void;
    /**
     * Patched method for setting the position.
     *
     * @param {number|Vec3} x - The x coordinate or Vec3
     * @param {number} [y] - The y coordinate
     * @param {number} [z] - The z coordinate
     * @private
     */
    private _setPosition;
    /**
     * Patched method for setting the local position.
     *
     * @param {number|Vec3} x - The x coordinate or Vec3
     * @param {number} [y] - The y coordinate
     * @param {number} [z] - The z coordinate
     * @private
     */
    private _setLocalPosition;
    _sync(): void;
    _dirtyLocal: boolean;
    _dirtyWorld: boolean;
    _onInsert(parent: any): void;
    _dirtifyMask(): void;
    _onPrerender(): void;
    _bindScreen(screen: any): void;
    _unbindScreen(screen: any): void;
    _updateScreen(screen: any): void;
    syncMask(depth: any): void;
    _setMaskedBy(mask: any): void;
    _updateMask(currentMask: any, depth: any): void;
    _parseUpToScreen(): {
        screen: any;
        mask: any;
    };
    _onScreenResize(res: any): void;
    _onScreenSpaceChange(): void;
    _onScreenRemove(): void;
    _calculateLocalAnchors(): void;
    getOffsetPosition(x: any, y: any): Vec3;
    onLayersChanged(oldComp: any, newComp: any): void;
    onLayerAdded(layer: any): void;
    onLayerRemoved(layer: any): void;
    onRemove(): void;
    /**
     * Recalculates these properties:
     *   - `_localAnchor`
     *   - `width`
     *   - `height`
     *   - Local position is updated if anchors are split
     *
     * Assumes these properties are up to date:
     *   - `_margin`
     *
     * @param {boolean} propagateCalculatedWidth - If true, call `_setWidth` instead
     * of `_setCalculatedWidth`
     * @param {boolean} propagateCalculatedHeight - If true, call `_setHeight` instead
     * of `_setCalculatedHeight`
     * @private
     */
    private _calculateSize;
    _sizeDirty: boolean;
    /**
     * Internal set width without updating margin.
     *
     * @param {number} w - The new width.
     * @private
     */
    private _setWidth;
    /**
     * Internal set height without updating margin.
     *
     * @param {number} h - The new height.
     * @private
     */
    private _setHeight;
    /**
     * This method sets the calculated width value and optionally updates the margins.
     *
     * @param {number} value - The new calculated width.
     * @param {boolean} updateMargins - Update margins or not.
     * @private
     */
    private _setCalculatedWidth;
    /**
     * This method sets the calculated height value and optionally updates the margins.
     *
     * @param {number} value - The new calculated height.
     * @param {boolean} updateMargins - Update margins or not.
     * @private
     */
    private _setCalculatedHeight;
    _flagChildrenAsDirty(): void;
    addModelToLayers(model: any): void;
    removeModelFromLayers(model: any): void;
    getMaskOffset(): number;
    isVisibleForCamera(camera: any): boolean;
    _isScreenSpace(): boolean;
    _isScreenCulled(): boolean;
    _dirtyBatch(): void;
}

declare class GSplatComponentData {
    enabled: boolean;
}

/**
 * Allows an Entity to render a gsplat.
 *
 * @category Graphics
 */
declare class GSplatComponentSystem extends ComponentSystem {
    /**
     * Fired when a GSplat material is created for a camera and layer combination. In unified
     * mode, materials are created during the first frame update when the GSplat is rendered.
     * The handler is passed the {@link ShaderMaterial}, the {@link CameraComponent}, and
     * the {@link Layer}.
     *
     * This event is useful for setting up custom material chunks and parameters before the
     * first render.
     *
     * @event
     * @example
     * app.systems.gsplat.on('material:created', (material, camera, layer) => {
     *     console.log(`Material created for camera ${camera.entity.name} on layer ${layer.name}`);
     *     // Set custom material parameters before first render
     *     material.setParameter('myParam', value);
     * });
     */
    static EVENT_MATERIALCREATED: string;
    /**
     * Fired every frame for each camera and layer combination rendering GSplats in unified mode.
     * The handler is passed the {@link CameraComponent}, the {@link Layer}, a boolean indicating
     * if the current frame has up-to-date sorting, and a number indicating how many resources are
     * loading.
     *
     * The `ready` parameter indicates whether the current frame reflects all recent changes (camera
     * movement, splat transforms, lod updates, etc.) with the latest sorting applied. The `loadingCount`
     * parameter reports the total number of octree LOD resources currently loading or queued to load.
     *
     * This event is useful for video capture or other workflows that need to wait for frames
     * to be fully ready. Only capture frames and move camera to next position when both
     * `ready === true` and `loadingCount === 0`. Note that `loadingCount` can be used as a boolean
     * in conditionals (0 is falsy, non-zero is truthy) for backward compatibility.
     *
     * @event
     * @example
     * // Wait for frame to be ready before capturing
     * app.systems.gsplat.on('frame:ready', (camera, layer, ready, loadingCount) => {
     *     if (ready && !loadingCount) {
     *         console.log(`Frame ready to capture for camera ${camera.entity.name}`);
     *         // Capture frame here
     *     }
     * });
     * @example
     * // Track loading progress (0..1)
     * let maxLoadingCount = 0;
     * app.systems.gsplat.on('frame:ready', (camera, layer, ready, loadingCount) => {
     *     maxLoadingCount = Math.max(maxLoadingCount, loadingCount);
     *     const progress = maxLoadingCount > 0 ? (maxLoadingCount - loadingCount) / maxLoadingCount : 1;
     *     console.log(`Loading progress: ${(progress * 100).toFixed(1)}%`);
     * });
     */
    static EVENT_FRAMEREADY: string;
    id: string;
    ComponentType: typeof GSplatComponent;
    DataType: typeof GSplatComponentData;
    schema: string[];
    initializeComponentData(component: any, _data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onRemove(entity: any, component: any): void;
    /**
     * Gets the GSplat material used by unified GSplat rendering for the given camera and layer.
     *
     * Returns null if the material hasn't been created yet. In unified mode, materials are created
     * during the first frame update when the GSplat is rendered. To be notified immediately when
     * materials are created, listen to the 'material:created' event on GSplatComponentSystem:
     *
     * @param {Camera} camera - The camera instance.
     * @param {Layer} layer - The layer instance.
     * @returns {ShaderMaterial|null} The material, or null if not created yet.
     * @example
     * app.systems.gsplat.on('material:created', (material, camera, layer) => {
     *     // Material is now available
     *     material.setParameter('myParam', value);
     * });
     */
    getMaterial(camera: Camera, layer: Layer): ShaderMaterial | null;
    getGSplatMaterial(camera: any, layer: any): ShaderMaterial;
}

/**
 * @import { BoundingBox } from '../../../core/shape/bounding-box.js'
 * @import { Entity } from '../../entity.js'
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { GSplatComponentSystem } from './system.js'
 * @import { GSplatResourceBase } from '../../../scene/gsplat/gsplat-resource-base.js'
 * @import { ScopeId } from '../../../platform/graphics/scope-id.js'
 * @import { ShaderMaterial } from '../../../scene/materials/shader-material.js'
 * @import { StorageBuffer } from '../../../platform/graphics/storage-buffer.js'
 * @import { Texture } from '../../../platform/graphics/texture.js'
 */
/**
 * The GSplatComponent enables an {@link Entity} to render 3D Gaussian Splats. Splats are always
 * loaded from {@link Asset}s rather than being created programmatically. The asset type is
 * `gsplat` which supports multiple file formats including `.ply`, `.sog`, `.meta.json` (SOG
 * format), and `.lod-meta.json` (streaming LOD format).
 *
 * You should never need to use the GSplatComponent constructor directly. To add an
 * GSplatComponent to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = pc.Entity();
 * entity.addComponent('gsplat', {
 *     asset: asset
 * });
 * ```
 *
 * Once the GSplatComponent is added to the entity, you can access it via the {@link Entity#gsplat}
 * property:
 *
 * ```javascript
 * entity.gsplat.customAabb = new pc.BoundingBox(new pc.Vec3(), new pc.Vec3(10, 10, 10));
 *
 * console.log(entity.gsplat.customAabb);
 * ```
 *
 * ## Unified Rendering
 *
 * The {@link GSplatComponent#unified} property enables unified rendering mode, which provides
 * advanced features for Gaussian Splats:
 *
 * - **Global Sorting**: Multiple splat components are sorted together in a single unified sort,
 *   eliminating visibility artifacts and popping effects when splat components overlap.
 * - **LOD Streaming**: Dynamically loads and renders appropriate levels of detail based on camera
 *   distance, enabling efficient rendering of massive splat scenes.
 *
 * ```javascript
 * // Enable unified rendering for advanced features
 * entity.gsplat.unified = true;
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Simple Splat Loading](https://playcanvas.github.io/#/gaussian-splatting/simple)
 * - [Global Sorting](https://playcanvas.github.io/#/gaussian-splatting/global-sorting)
 * - [LOD](https://playcanvas.github.io/#/gaussian-splatting/lod)
 * - [LOD Instances](https://playcanvas.github.io/#/gaussian-splatting/lod-instances)
 * - [LOD Streaming](https://playcanvas.github.io/#/gaussian-splatting/lod-streaming)
 * - [LOD Streaming with Spherical Harmonics](https://playcanvas.github.io/#/gaussian-splatting/lod-streaming-sh)
 * - [Multi-Splat](https://playcanvas.github.io/#/gaussian-splatting/multi-splat)
 * - [Multi-View](https://playcanvas.github.io/#/gaussian-splatting/multi-view)
 * - [Picking](https://playcanvas.github.io/#/gaussian-splatting/picking)
 * - [Reveal Effect](https://playcanvas.github.io/#/gaussian-splatting/reveal)
 * - [Shader Effects](https://playcanvas.github.io/#/gaussian-splatting/shader-effects)
 * - [Spherical Harmonics](https://playcanvas.github.io/#/gaussian-splatting/spherical-harmonics)
 *
 * @hideconstructor
 * @category Graphics
 */
declare class GSplatComponent extends Component {
    /**
     * Create a new GSplatComponent.
     *
     * @param {GSplatComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: GSplatComponentSystem, entity: Entity);
    /** @private */
    private _layers;
    /**
     * @type {GSplatInstance|null}
     * @private
     */
    private _instance;
    /**
     * @type {GSplatPlacement|null}
     * @private
     */
    private _placement;
    /**
     * Unique identifier for this component, used by the picking system.
     *
     * @type {number}
     * @private
     */
    private _id;
    /**
     * @type {ShaderMaterial|null}
     * @private
     */
    private _materialTmp;
    /** @private */
    private _highQualitySH;
    /**
     * Base distance for the first LOD transition (LOD 0 to LOD 1).
     *
     * @type {number}
     * @private
     */
    private _lodBaseDistance;
    /**
     * Geometric multiplier between successive LOD distance thresholds.
     *
     * @type {number}
     * @private
     */
    private _lodMultiplier;
    /**
     * @type {BoundingBox|null}
     * @private
     */
    private _customAabb;
    /**
     * @type {AssetReference}
     * @private
     */
    private _assetReference;
    /**
     * Direct resource reference (for container splats).
     *
     * @type {GSplatResourceBase|null}
     * @private
     */
    private _resource;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    /** @private */
    private _castShadows;
    /**
     * Whether to use the unified gsplat rendering.
     *
     * @type {boolean}
     * @private
     */
    private _unified;
    /**
     * Per-instance shader parameters. Stores objects with scopeId and data.
     *
     * @type {Map<string, {scopeId: ScopeId, data: *}>}
     * @private
     */
    private _parameters;
    /**
     * Render mode for work buffer updates.
     *
     * @type {number}
     * @private
     */
    private _workBufferUpdate;
    /**
     * Custom shader modify code for this component (object with code and pre-computed hash).
     *
     * @type {{ code: string, hash: number }|null}
     * @private
     */
    private _workBufferModifier;
    /**
     * Sets a custom object space bounding box for visibility culling of the attached gsplat.
     *
     * @type {BoundingBox|null}
     */
    set customAabb(value: BoundingBox | null);
    /**
     * Gets the custom object space bounding box for visibility culling of the attached gsplat.
     * Returns the custom AABB if set, otherwise falls back to the resource's AABB.
     *
     * @type {BoundingBox|null}
     */
    get customAabb(): BoundingBox | null;
    /**
     * Sets a {@link GSplatInstance} on the component. If not set or loaded, it returns null.
     *
     * @type {GSplatInstance|null}
     * @ignore
     */
    set instance(value: GSplatInstance | null);
    /**
     * Gets the {@link GSplatInstance} on the component.
     *
     * @type {GSplatInstance|null}
     * @ignore
     */
    get instance(): GSplatInstance | null;
    /**
     * Sets the material used to render the gsplat.
     *
     * **Note:** This setter is only supported when {@link unified} is `false`. When it's true, multiple
     * gsplat components share a single material per camera/layer combination. To access materials in
     * unified mode, use {@link GSplatComponentSystem#getMaterial}.
     *
     * @param {ShaderMaterial} value - The material instance.
     */
    set material(value: ShaderMaterial);
    /**
     * Gets the material used to render the gsplat.
     *
     * **Note:** This getter returns `null` when {@link unified} is `true`. In unified mode, materials are
     * organized per camera/layer combination rather than per component. To access materials in
     * unified mode, use {@link GSplatComponentSystem#getMaterial}.
     *
     * @type {ShaderMaterial|null}
     */
    get material(): ShaderMaterial | null;
    /**
     * Sets whether to use the high quality or the approximate (but fast) spherical-harmonic calculation when rendering SOG data.
     *
     * The low quality approximation evaluates the scene's spherical harmonic contributions
     * along the camera's Z-axis instead of using each gaussian's view vector. This results
     * in gaussians being accurate at the center of the screen and becoming less accurate
     * as they appear further from the center. This is a good trade-off for performance
     * when rendering large SOG datasets, especially on mobile devices.
     *
     * Defaults to false.
     *
     * @type {boolean}
     */
    set highQualitySH(value: boolean);
    /**
     * Gets whether the high quality (true) or the fast approximate (false) spherical-harmonic calculation is used when rendering SOG data.
     *
     * @type {boolean}
     */
    get highQualitySH(): boolean;
    /**
     * Sets whether gsplat will cast shadows for lights that have shadow casting enabled. Defaults
     * to false.
     *
     * @type {boolean}
     */
    set castShadows(value: boolean);
    /**
     * Gets whether gsplat will cast shadows for lights that have shadow casting enabled.
     *
     * @type {boolean}
     */
    get castShadows(): boolean;
    /**
     * Sets the base distance for the first LOD transition (LOD 0 to LOD 1). Objects closer
     * than this distance use the highest quality LOD. Each subsequent LOD level transitions
     * at a progressively larger distance, controlled by {@link lodMultiplier}. Clamped to a
     * minimum of 0.1. Defaults to 5.
     *
     * @type {number}
     */
    set lodBaseDistance(value: number);
    /**
     * Gets the base distance for the first LOD transition.
     *
     * @type {number}
     */
    get lodBaseDistance(): number;
    /**
     * Sets the multiplier between successive LOD distance thresholds. Each LOD level
     * transitions at this factor times the previous level's distance, creating a geometric
     * progression. Lower values keep higher quality at distance; higher values switch to
     * coarser LODs sooner. Clamped to a minimum of 1.2 to avoid degenerate logarithmic LOD
     * computation. LOD distances are automatically compensated for the camera's field of
     * view — a wider FOV makes objects appear smaller on screen, so LOD switches to coarser
     * levels sooner to match the reduced screen-space detail. Defaults to 3.
     *
     * @type {number}
     */
    set lodMultiplier(value: number);
    /**
     * Gets the geometric multiplier between successive LOD distance thresholds.
     *
     * @type {number}
     */
    get lodMultiplier(): number;
    /**
     * @deprecated Use {@link lodBaseDistance} and {@link lodMultiplier} instead.
     * @type {number[]|null}
     */
    set lodDistances(value: number[]);
    /**
     * @deprecated Use {@link lodBaseDistance} and {@link lodMultiplier} instead.
     * @type {number[]}
     */
    get lodDistances(): number[];
    /**
     * @deprecated Use app.scene.gsplat.splatBudget instead for global budget control.
     * @type {number}
     */
    set splatBudget(value: number);
    get splatBudget(): number;
    /**
     * Sets whether to use the unified gsplat rendering. Default is false.
     *
     * Note: Material handling differs between modes. When unified is false, use
     * {@link GSplatComponent#material}. When unified is true, materials are shared per
     * camera/layer - use {@link GSplatComponentSystem#getMaterial} instead.
     *
     * @type {boolean}
     */
    set unified(value: boolean);
    /**
     * Gets whether to use the unified gsplat rendering.
     *
     * @type {boolean}
     * @alpha
     */
    get unified(): boolean;
    /**
     * Gets the unique identifier for this component. This ID is used by the picking system
     * and is also written to the work buffer when `app.scene.gsplat.enableIds` is enabled, making
     * it available to custom shaders for effects like highlighting or animation.
     *
     * @type {number}
     */
    get id(): number;
    /**
     * Sets the work buffer update mode. Only applicable in unified rendering mode.
     *
     * In unified mode, splat data is rendered to a work buffer only when needed (e.g., when
     * transforms change). Can be:
     * - {@link WORKBUFFER_UPDATE_AUTO}: Update only when needed (default).
     * - {@link WORKBUFFER_UPDATE_ONCE}: Force update this frame, then switch to AUTO.
     * - {@link WORKBUFFER_UPDATE_ALWAYS}: Update every frame.
     *
     * This is typically useful when using custom shader code via {@link workBufferModifier} that
     * depends on external factors like time or animated uniforms.
     *
     * Note: {@link WORKBUFFER_UPDATE_ALWAYS} has a performance impact as it re-renders
     * all splat data to the work buffer every frame. Where possible, consider using shader
     * customization on the unified gsplat material (`app.scene.gsplat.material`) which is
     * applied during final rendering without re-rendering the work buffer.
     *
     * @type {number}
     */
    set workBufferUpdate(value: number);
    /**
     * Gets the work buffer update mode.
     *
     * @type {number}
     */
    get workBufferUpdate(): number;
    /**
     * Sets custom shader code for modifying splats when written to the work buffer. Only
     * applicable in unified rendering mode.
     *
     * Must provide all three functions:
     * - `modifySplatCenter`: Modify the splat center position
     * - `modifySplatRotationScale`: Modify the splat rotation and scale
     * - `modifySplatColor`: Modify the splat color
     *
     * Calling this method automatically triggers a work buffer re-render.
     *
     * @param {{ glsl?: string, wgsl?: string }|null} value - The modifier code for GLSL and/or WGSL.
     * @example
     * entity.gsplat.setWorkBufferModifier({
     *     glsl: `
     *         void modifySplatCenter(inout vec3 center) {}
     *         void modifySplatRotationScale(vec3 originalCenter, vec3 modifiedCenter, inout vec4 rotation, inout vec3 scale) {}
     *         void modifySplatColor(vec3 center, inout vec4 color) { color.rgb *= vec3(1.0, 0.0, 0.0); }
     *     `,
     *     wgsl: `
     *         fn modifySplatCenter(center: ptr<function, vec3f>) {}
     *         fn modifySplatRotationScale(originalCenter: vec3f, modifiedCenter: vec3f, rotation: ptr<function, vec4f>, scale: ptr<function, vec3f>) {}
     *         fn modifySplatColor(center: vec3f, color: ptr<function, vec4f>) { (*color).r = 1.0; (*color).g = 0.0; (*color).b = 0.0; }
     *     `
     * });
     */
    setWorkBufferModifier(value: {
        glsl?: string;
        wgsl?: string;
    } | null): void;
    /**
     * Sets an array of layer IDs ({@link Layer#id}) to which this gsplat should belong. Don't
     * push, pop, splice or modify this array. If you want to change it, set a new one instead.
     *
     * @type {number[]}
     */
    set layers(value: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which this gsplat belongs.
     *
     * @type {number[]}
     */
    get layers(): number[];
    /**
     * Sets the gsplat asset for this gsplat component. Can also be an asset id.
     *
     * @type {Asset|number}
     */
    set asset(value: Asset | number);
    /**
     * Gets the gsplat asset id for this gsplat component.
     *
     * @type {Asset|number}
     */
    get asset(): Asset | number;
    /**
     * Sets a GSplat resource directly (for procedural/container splats).
     * When set, this takes precedence over the asset property.
     *
     * @type {GSplatResourceBase|null}
     */
    set resource(value: GSplatResourceBase | null);
    /**
     * Gets the GSplat resource. Returns the directly set resource if available,
     * otherwise returns the resource from the assigned asset.
     *
     * @type {GSplatResourceBase|null}
     */
    get resource(): GSplatResourceBase | null;
    /** @private */
    private destroyInstance;
    /** @private */
    private addToLayers;
    removeFromLayers(): void;
    /** @private */
    private onRemoveChild;
    /** @private */
    private onInsertChild;
    onRemove(): void;
    onLayersChanged(oldComp: any, newComp: any): void;
    onLayerAdded(layer: any): void;
    onLayerRemoved(layer: any): void;
    /**
     * Stop rendering this component without removing its mesh instance from the scene hierarchy.
     */
    hide(): void;
    /**
     * Enable rendering of the component if hidden using {@link GSplatComponent#hide}.
     */
    show(): void;
    /**
     * Sets a shader parameter for this gsplat instance. Parameters set here are applied
     * during unified rendering.
     *
     * @param {string} name - The name of the parameter (uniform name in shader).
     * @param {number|number[]|ArrayBufferView|Texture|StorageBuffer} data - The value for the parameter.
     */
    setParameter(name: string, data: number | number[] | ArrayBufferView | Texture | StorageBuffer): void;
    /**
     * Gets a shader parameter value previously set with {@link setParameter}.
     *
     * @param {string} name - The name of the parameter.
     * @returns {number|number[]|ArrayBufferView|undefined} The parameter value, or undefined if not set.
     */
    getParameter(name: string): number | number[] | ArrayBufferView | undefined;
    /**
     * Deletes a shader parameter previously set with {@link setParameter}.
     *
     * @param {string} name - The name of the parameter to delete.
     */
    deleteParameter(name: string): void;
    /**
     * Gets an instance texture by name. Instance textures are per-component textures defined
     * in the resource's format with `storage: GSPLAT_STREAM_INSTANCE`. Only available in unified mode.
     *
     * @param {string} name - The name of the texture.
     * @returns {Texture|null} The texture, or null if not found or not in unified mode.
     * @example
     * // Add an instance stream to the resource format
     * resource.format.addExtraStreams([
     *     { name: 'instanceTint', format: pc.PIXELFORMAT_RGBA8, storage: pc.GSPLAT_STREAM_INSTANCE }
     * ]);
     *
     * // Get the instance texture and fill it with data
     * const texture = entity.gsplat.getInstanceTexture('instanceTint');
     * if (texture) {
     *     const data = texture.lock();
     *     // Fill texture data...
     *     texture.unlock();
     * }
     */
    getInstanceTexture(name: string): Texture | null;
    _onGSplatAssetAdded(): void;
    _onGSplatAssetLoad(): void;
    _onGSplatAssetUnload(): void;
    _onGSplatAssetRemove(): void;
}

/**
 * A LayoutChildComponent enables the Entity to control the sizing applied to it by its parent
 * {@link LayoutGroupComponent}.
 *
 * @hideconstructor
 * @category User Interface
 */
declare class LayoutChildComponent extends Component {
    /** @private */
    private _minWidth;
    /** @private */
    private _minHeight;
    /**
     * @type {number|null}
     * @private
     */
    private _maxWidth;
    /**
     * @type {number|null}
     * @private
     */
    private _maxHeight;
    /** @private */
    private _fitWidthProportion;
    /** @private */
    private _fitHeightProportion;
    /** @private */
    private _excludeFromLayout;
    /**
     * Sets the minimum width the element should be rendered at.
     *
     * @type {number}
     */
    set minWidth(value: number);
    /**
     * Gets the minimum width the element should be rendered at.
     *
     * @type {number}
     */
    get minWidth(): number;
    /**
     * Sets the minimum height the element should be rendered at.
     *
     * @type {number}
     */
    set minHeight(value: number);
    /**
     * Gets the minimum height the element should be rendered at.
     *
     * @type {number}
     */
    get minHeight(): number;
    /**
     * Sets the maximum width the element should be rendered at.
     *
     * @type {number|null}
     */
    set maxWidth(value: number | null);
    /**
     * Gets the maximum width the element should be rendered at.
     *
     * @type {number|null}
     */
    get maxWidth(): number | null;
    /**
     * Sets the maximum height the element should be rendered at.
     *
     * @type {number|null}
     */
    set maxHeight(value: number | null);
    /**
     * Gets the maximum height the element should be rendered at.
     *
     * @type {number|null}
     */
    get maxHeight(): number | null;
    /**
     * Sets the amount of additional horizontal space that the element should take up, if necessary to
     * satisfy a Stretch/Shrink fitting calculation. This is specified as a proportion, taking into
     * account the proportion values of other siblings.
     *
     * @type {number}
     */
    set fitWidthProportion(value: number);
    /**
     * Gets the amount of additional horizontal space that the element should take up, if necessary to
     * satisfy a Stretch/Shrink fitting calculation.
     *
     * @type {number}
     */
    get fitWidthProportion(): number;
    /**
     * Sets the amount of additional vertical space that the element should take up, if necessary to
     * satisfy a Stretch/Shrink fitting calculation. This is specified as a proportion, taking into
     * account the proportion values of other siblings.
     *
     * @type {number}
     */
    set fitHeightProportion(value: number);
    /**
     * Gets the amount of additional vertical space that the element should take up, if necessary to
     * satisfy a Stretch/Shrink fitting calculation.
     *
     * @type {number}
     */
    get fitHeightProportion(): number;
    /**
     * Sets whether the child will be excluded from all layout calculations.
     *
     * @type {boolean}
     */
    set excludeFromLayout(value: boolean);
    /**
     * Gets whether the child will be excluded from all layout calculations.
     *
     * @type {boolean}
     */
    get excludeFromLayout(): boolean;
}

declare class LayoutGroupComponentData {
    enabled: boolean;
}

/**
 * Manages creation of {@link LayoutGroupComponent}s.
 *
 * @category User Interface
 */
declare class LayoutGroupComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof LayoutGroupComponent;
    DataType: typeof LayoutGroupComponentData;
    schema: string[];
    _reflowQueue: any[];
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    scheduleReflow(component: any): void;
    _onPostUpdate(): void;
    _processReflowQueue(): void;
    _onRemoveComponent(entity: any, component: any): void;
}

/**
 * A LayoutGroupComponent enables the Entity to position and scale child {@link ElementComponent}s
 * according to configurable layout rules.
 *
 * @hideconstructor
 * @category User Interface
 */
declare class LayoutGroupComponent extends Component {
    /**
     * Create a new LayoutGroupComponent instance.
     *
     * @param {LayoutGroupComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: LayoutGroupComponentSystem, entity: Entity);
    /** @private */
    private _orientation;
    /** @private */
    private _reverseX;
    /** @private */
    private _reverseY;
    /** @private */
    private _alignment;
    /** @private */
    private _padding;
    /** @private */
    private _spacing;
    /** @private */
    private _widthFitting;
    /** @private */
    private _heightFitting;
    /** @private */
    private _wrap;
    /** @private */
    private _layoutCalculator;
    /**
     * Sets whether the layout should run horizontally or vertically. Can be:
     *
     * - {@link ORIENTATION_HORIZONTAL}
     * - {@link ORIENTATION_VERTICAL}
     *
     * Defaults to {@link ORIENTATION_HORIZONTAL}.
     *
     * @type {number}
     */
    set orientation(value: number);
    /**
     * Gets whether the layout should run horizontally or vertically.
     *
     * @type {number}
     */
    get orientation(): number;
    /**
     * Sets whether to reverse the order of children along the x axis. Defaults to false.
     *
     * @type {boolean}
     */
    set reverseX(value: boolean);
    /**
     * Gets whether to reverse the order of children along the x axis.
     *
     * @type {boolean}
     */
    get reverseX(): boolean;
    /**
     * Sets whether to reverse the order of children along the y axis. Defaults to true.
     *
     * @type {boolean}
     */
    set reverseY(value: boolean);
    /**
     * Gets whether to reverse the order of children along the y axis.
     *
     * @type {boolean}
     */
    get reverseY(): boolean;
    /**
     * Sets the horizontal and vertical alignment of child elements. Values range from 0 to 1 where
     * `[0, 0]` is the bottom left and `[1, 1]` is the top right. Defaults to `[0, 1]`.
     *
     * @type {Vec2}
     */
    set alignment(value: Vec2);
    /**
     * Gets the horizontal and vertical alignment of child elements.
     *
     * @type {Vec2}
     */
    get alignment(): Vec2;
    /**
     * Sets the padding to be applied inside the container before positioning any children.
     * Specified as left, bottom, right and top values. Defaults to `[0, 0, 0, 0]` (no padding).
     *
     * @type {Vec4}
     */
    set padding(value: Vec4);
    /**
     * Gets the padding to be applied inside the container before positioning any children.
     *
     * @type {Vec4}
     */
    get padding(): Vec4;
    /**
     * Sets the spacing to be applied between each child element. Defaults to `[0, 0]` (no spacing).
     *
     * @type {Vec2}
     */
    set spacing(value: Vec2);
    /**
     * Gets the spacing to be applied between each child element.
     *
     * @type {Vec2}
     */
    get spacing(): Vec2;
    /**
     * Sets the width fitting mode to be applied when positioning and scaling child elements. Can be:
     *
     * - {@link FITTING_NONE}: Child elements will be rendered at their natural size.
     * - {@link FITTING_STRETCH}: When the natural size of all child elements does not fill the width
     * of the container, children will be stretched to fit. The rules for how each child will be
     * stretched are outlined below:
     *   1. Sum the {@link LayoutChildComponent#fitWidthProportion} values of each child and normalize
     * so that all values sum to 1.
     *   2. Apply the natural width of each child.
     *   3. If there is space remaining in the container, distribute it to each child based on the
     * normalized {@link LayoutChildComponent#fitWidthProportion} values, but do not exceed the
     * {@link LayoutChildComponent#maxWidth} of each child.
     * - {@link FITTING_SHRINK}: When the natural size of all child elements overflows the width of the
     * container, children will be shrunk to fit. The rules for how each child will be stretched are
     * outlined below:
     *   1. Sum the {@link LayoutChildComponent#fitWidthProportion} values of each child and normalize
     * so that all values sum to 1.
     *   2. Apply the natural width of each child.
     *   3. If the new total width of all children exceeds the available space of the container, reduce
     * each child's width proportionally based on the normalized {@link
     * LayoutChildComponent#fitWidthProportion} values, but do not exceed the {@link
     * LayoutChildComponent#minWidth} of each child.
     * - {@link FITTING_BOTH}: Applies both STRETCH and SHRINK logic as necessary.
     *
     * Defaults to {@link FITTING_NONE}.
     *
     * @type {number}
     */
    set widthFitting(value: number);
    /**
     * Gets the width fitting mode to be applied when positioning and scaling child elements.
     *
     * @type {number}
     */
    get widthFitting(): number;
    /**
     * Sets the height fitting mode to be applied when positioning and scaling child elements.
     * Identical to {@link LayoutGroupComponent#widthFitting} but for the Y axis. Defaults to
     * {@link FITTING_NONE}.
     *
     * @type {number}
     */
    set heightFitting(value: number);
    /**
     * Gets the height fitting mode to be applied when positioning and scaling child elements.
     *
     * @type {number}
     */
    get heightFitting(): number;
    /**
     * Sets whether or not to wrap children onto a new row/column when the size of the container is
     * exceeded. Defaults to false, which means that children will be be rendered in a single row
     * (horizontal orientation) or column (vertical orientation). Note that setting wrap to true
     * makes it impossible for the {@link FITTING_BOTH} fitting mode to operate in any logical
     * manner. For this reason, when wrap is true, a {@link LayoutGroupComponent#widthFitting} or
     * {@link LayoutGroupComponent#heightFitting} mode of {@link FITTING_BOTH} will be coerced to
     * {@link FITTING_STRETCH}.
     *
     * @type {boolean}
     */
    set wrap(value: boolean);
    /**
     * Gets whether or not to wrap children onto a new row/column when the size of the container is
     * exceeded.
     *
     * @type {boolean}
     */
    get wrap(): boolean;
    _isSelfOrChild(entity: any): boolean;
    _listenForReflowEvents(target: any, onOff: any): void;
    _onElementOrLayoutComponentAdd(entity: any): void;
    _onElementOrLayoutComponentRemove(entity: any): void;
    _onChildInsert(child: any): void;
    _onChildRemove(child: any): void;
    _scheduleReflow(): void;
    reflow(): void;
    _isPerformingReflow: boolean;
    onRemove(): void;
}

declare class ModelComponentData {
    enabled: boolean;
}

/**
 * Allows an Entity to render a model or a primitive shape like a box, capsule, sphere, cylinder,
 * cone etc.
 *
 * @category Graphics
 */
declare class ModelComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ModelComponent;
    DataType: typeof ModelComponentData;
    schema: string[];
    defaultMaterial: StandardMaterial;
    initializeComponentData(component: any, _data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onRemove(entity: any, component: any): void;
}

/**
 * @import { BoundingBox } from '../../../core/shape/bounding-box.js'
 * @import { Entity } from '../../entity.js'
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { LayerComposition } from '../../../scene/composition/layer-composition.js'
 * @import { Layer } from '../../../scene/layer.js'
 * @import { Material } from '../../../scene/materials/material.js'
 * @import { ModelComponentSystem } from './system.js'
 */
/**
 * The ModelComponent enables an {@link Entity} to render 3D models. The {@link type} property can
 * be set to one of several predefined shapes (such as `box`, `sphere`, `cone` and so on).
 * Alternatively, the component can be configured to manage an arbitrary {@link Model}. This can
 * either be created programmatically or loaded from an {@link Asset}.
 *
 * The {@link Model} managed by this component is positioned, rotated, and scaled in world space by
 * the world transformation matrix of the owner {@link Entity}. This world matrix is derived by
 * combining the entity's local transformation (position, rotation, and scale) with the world
 * transformation matrix of its parent entity in the scene hierarchy.
 *
 * You should never need to use the ModelComponent constructor directly. To add a ModelComponent
 * to an Entity, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('model', {
 *     type: 'box'
 * });
 * ```
 *
 * Once the ModelComponent is added to the entity, you can access it via the {@link Entity#model}
 * property:
 *
 * ```javascript
 * entity.model.type = 'capsule';  // Set the model component's type
 *
 * console.log(entity.model.type); // Get the model component's type and print it
 * ```
 *
 * @category Graphics
 */
declare class ModelComponent extends Component {
    /**
     * Create a new ModelComponent instance.
     *
     * @param {ModelComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ModelComponentSystem, entity: Entity);
    /**
     * @type {'asset'|'box'|'capsule'|'cone'|'cylinder'|'plane'|'sphere'|'torus'}
     * @private
     */
    private _type;
    /**
     * @type {Asset|number|null}
     * @private
     */
    private _asset;
    /**
     * @type {Model|null}
     * @private
     */
    private _model;
    /**
     * @type {Object<string, number>}
     * @private
     */
    private _mapping;
    /**
     * @type {boolean}
     * @private
     */
    private _castShadows;
    /**
     * @type {boolean}
     * @private
     */
    private _receiveShadows;
    /**
     * @type {Asset|number|null}
     * @private
     */
    private _materialAsset;
    /**
     * @type {Material}
     * @private
     */
    private _material;
    /**
     * @type {boolean}
     * @private
     */
    private _castShadowsLightmap;
    /**
     * @type {boolean}
     * @private
     */
    private _lightmapped;
    /**
     * @type {number}
     * @private
     */
    private _lightmapSizeMultiplier;
    /**
     * Mark meshes as non-movable (optimization).
     *
     * @type {boolean}
     */
    isStatic: boolean;
    /**
     * @type {number[]}
     * @private
     */
    private _layers;
    /**
     * @type {number}
     * @private
     */
    private _batchGroupId;
    /**
     * @type {BoundingBox|null}
     * @private
     */
    private _customAabb;
    _area: any;
    _materialEvents: any;
    /**
     * @type {boolean}
     * @private
     */
    private _clonedModel;
    _batchGroup: any;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    /**
     * Sets the array of mesh instances contained in the component's model.
     *
     * @type {MeshInstance[]|null}
     */
    set meshInstances(value: MeshInstance[] | null);
    /**
     * Gets the array of mesh instances contained in the component's model.
     *
     * @type {MeshInstance[]|null}
     */
    get meshInstances(): MeshInstance[] | null;
    /**
     * Sets the custom object space bounding box that is used for visibility culling of attached
     * mesh instances. This is an optimization, allowing an oversized bounding box to be specified
     * for skinned characters in order to avoid per frame bounding box computations based on bone
     * positions.
     *
     * @type {BoundingBox|null}
     */
    set customAabb(value: BoundingBox | null);
    /**
     * Gets the custom object space bounding box that is used for visibility culling of attached
     * mesh instances.
     *
     * @type {BoundingBox|null}
     */
    get customAabb(): BoundingBox | null;
    /**
     * Sets the type of the component, determining the source of the geometry to be rendered.
     * The geometry, whether it's a primitive shape or originates from an asset, is rendered
     * using the owning entity's final world transform. This world transform is calculated by
     * concatenating (multiplying) the local transforms (position, rotation, scale) of the
     * entity and all its ancestors in the scene hierarchy. This process positions, orientates,
     * and scales the geometry in world space.
     *
     * Can be one of the following values:
     *
     * - **"asset"**: Renders geometry defined in an {@link Asset} of type `model`. This asset,
     *   assigned to the {@link asset} property, contains a {@link Model}. Alternatively,
     *   {@link model} can be set programmatically.
     * - **"box"**: A unit cube (sides of length 1) centered at the local space origin.
     * - **"capsule"**: A shape composed of a cylinder and two hemispherical caps that is aligned
     *   with the local Y-axis. It is centered at the local space origin and has an unscaled height
     *   of 2 and a radius of 0.5.
     * - **"cone"**: A cone aligned with the local Y-axis. It is centered at the local space
     *   origin, with its base in the local XZ plane at Y = -0.5 and its tip at Y = +0.5. It has
     *   an unscaled height of 1 and a base radius of 0.5.
     * - **"cylinder"**: A cylinder aligned with the local Y-axis. It is centered at the local
     *   space origin with an unscaled height of 1 and a radius of 0.5.
     * - **"plane"**: A flat plane in the local XZ plane at Y = 0 (normal along +Y). It is
     *   centered at the local space origin with unscaled dimensions of 1x1 units along local X and
     *   Z axes.
     * - **"sphere"**: A sphere with a radius of 0.5. It is centered at the local space origin and
     *   has poles at Y = -0.5 and Y = +0.5.
     * - **"torus"**: A doughnut shape lying in the local XZ plane at Y = 0. It is centered at
     *   the local space origin with a tube radius of 0.2 and a ring radius of 0.3.
     *
     * @type {'asset'|'box'|'capsule'|'cone'|'cylinder'|'plane'|'sphere'|'torus'}
     */
    set type(value: "asset" | "box" | "capsule" | "cone" | "cylinder" | "plane" | "sphere" | "torus");
    /**
     * Gets the type of the component.
     *
     * @type {'asset'|'box'|'capsule'|'cone'|'cylinder'|'plane'|'sphere'|'torus'}
     */
    get type(): "asset" | "box" | "capsule" | "cone" | "cylinder" | "plane" | "sphere" | "torus";
    /**
     * Sets the model owned by this component.
     *
     * @type {Model|null}
     */
    set model(value: Model | null);
    /**
     * Gets the model owned by this component. In this case a model is not set or loaded, this will
     * return null.
     *
     * @type {Model|null}
     */
    get model(): Model | null;
    /**
     * Sets the model asset (or asset id) for the component. This only applies to model components
     * with type 'asset'.
     *
     * @type {Asset|number|null}
     */
    set asset(value: Asset | number | null);
    /**
     * Gets the model asset id for the component.
     *
     * @type {Asset|number|null}
     */
    get asset(): Asset | number | null;
    /**
     * Sets whether the component is affected by the runtime lightmapper. If true, the meshes will
     * be lightmapped after using lightmapper.bake().
     *
     * @type {boolean}
     */
    set lightmapped(value: boolean);
    /**
     * Gets whether the component is affected by the runtime lightmapper.
     *
     * @type {boolean}
     */
    get lightmapped(): boolean;
    /**
     * Sets the dictionary that holds material overrides for each mesh instance. Only applies to
     * model components of type 'asset'. The mapping contains pairs of mesh instance index to
     * material asset id.
     *
     * @type {Object<string, number>}
     */
    set mapping(value: {
        [x: string]: number;
    });
    /**
     * Gets the dictionary that holds material overrides for each mesh instance.
     *
     * @type {Object<string, number>}
     */
    get mapping(): {
        [x: string]: number;
    };
    /**
     * Sets whether attached meshes will cast shadows for lights that have shadow casting enabled.
     *
     * @type {boolean}
     */
    set castShadows(value: boolean);
    /**
     * Gets whether attached meshes will cast shadows for lights that have shadow casting enabled.
     *
     * @type {boolean}
     */
    get castShadows(): boolean;
    /**
     * Sets whether shadows will be cast on attached meshes.
     *
     * @type {boolean}
     */
    set receiveShadows(value: boolean);
    /**
     * Gets whether shadows will be cast on attached meshes.
     *
     * @type {boolean}
     */
    get receiveShadows(): boolean;
    /**
     * Sets whether meshes instances will cast shadows when rendering lightmaps.
     *
     * @type {boolean}
     */
    set castShadowsLightmap(value: boolean);
    /**
     * Gets whether meshes instances will cast shadows when rendering lightmaps.
     *
     * @type {boolean}
     */
    get castShadowsLightmap(): boolean;
    /**
     * Sets the lightmap resolution multiplier.
     *
     * @type {number}
     */
    set lightmapSizeMultiplier(value: number);
    /**
     * Gets the lightmap resolution multiplier.
     *
     * @type {number}
     */
    get lightmapSizeMultiplier(): number;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which the mesh instances belong. Don't
     * push, pop, splice or modify this array. If you want to change it, set a new one instead.
     *
     * @type {number[]}
     */
    set layers(value: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which the mesh instances belong.
     *
     * @type {number[]}
     */
    get layers(): number[];
    /**
     * Sets the batch group for the mesh instances in this component (see {@link BatchGroup}).
     * Default is -1 (no group).
     *
     * @type {number}
     */
    set batchGroupId(value: number);
    /**
     * Gets the batch group for the mesh instances in this component (see {@link BatchGroup}).
     *
     * @type {number}
     */
    get batchGroupId(): number;
    /**
     * Sets the material {@link Asset} that will be used to render the component. The material is
     * ignored for renders of type 'asset'.
     *
     * @type {Asset|number|null}
     */
    set materialAsset(value: Asset | number | null);
    /**
     * Gets the material {@link Asset} that will be used to render the component.
     *
     * @type {Asset|number|null}
     */
    get materialAsset(): Asset | number | null;
    /**
     * Sets the {@link Material} that will be used to render the model. The material is ignored for
     * renders of type 'asset'.
     *
     * @type {Material}
     */
    set material(value: Material);
    /**
     * Gets the {@link Material} that will be used to render the model.
     *
     * @type {Material}
     */
    get material(): Material;
    addModelToLayers(): void;
    removeModelFromLayers(): void;
    onRemoveChild(): void;
    onInsertChild(): void;
    onRemove(): void;
    /**
     * @param {LayerComposition} oldComp - The old layer composition.
     * @param {LayerComposition} newComp - The new layer composition.
     * @private
     */
    private onLayersChanged;
    /**
     * @param {Layer} layer - The layer that was added.
     * @private
     */
    private onLayerAdded;
    /**
     * @param {Layer} layer - The layer that was removed.
     * @private
     */
    private onLayerRemoved;
    /**
     * @param {number} index - The index of the mesh instance.
     * @param {string} event - The event name.
     * @param {number} id - The asset id.
     * @param {*} handler - The handler function to be bound to the specified event.
     * @private
     */
    private _setMaterialEvent;
    /** @private */
    private _unsetMaterialEvents;
    /**
     * @param {string} idOrPath - The asset id or path.
     * @returns {Asset|null} The asset.
     * @private
     */
    private _getAssetByIdOrPath;
    /**
     * @param {string} path - The path of the model asset.
     * @returns {string|null} The model asset URL or null if the asset is not in the registry.
     * @private
     */
    private _getMaterialAssetUrl;
    /**
     * @param {Asset} materialAsset -The material asset to load.
     * @param {MeshInstance} meshInstance - The mesh instance to assign the material to.
     * @param {number} index - The index of the mesh instance.
     * @private
     */
    private _loadAndSetMeshInstanceMaterial;
    /**
     * Stop rendering model without removing it from the scene hierarchy. This method sets the
     * {@link MeshInstance#visible} property of every MeshInstance in the model to false Note, this
     * does not remove the model or mesh instances from the scene hierarchy or draw call list. So
     * the model component still incurs some CPU overhead.
     *
     * @example
     * this.timer = 0;
     * this.visible = true;
     * // ...
     * // blink model every 0.1 seconds
     * this.timer += dt;
     * if (this.timer > 0.1) {
     *     if (!this.visible) {
     *         this.entity.model.show();
     *         this.visible = true;
     *     } else {
     *         this.entity.model.hide();
     *         this.visible = false;
     *     }
     *     this.timer = 0;
     * }
     */
    hide(): void;
    /**
     * Enable rendering of the model if hidden using {@link ModelComponent#hide}. This method sets
     * all the {@link MeshInstance#visible} property on all mesh instances to true.
     */
    show(): void;
    /**
     * @param {Asset} asset - The material asset to bind events to.
     * @private
     */
    private _bindMaterialAsset;
    /**
     * @param {Asset} asset - The material asset to unbind events from.
     * @private
     */
    private _unbindMaterialAsset;
    /**
     * @param {Asset} asset - The material asset on which an asset add event has been fired.
     * @private
     */
    private _onMaterialAssetAdd;
    /**
     * @param {Asset} asset - The material asset on which an asset load event has been fired.
     * @private
     */
    private _onMaterialAssetLoad;
    /**
     * @param {Asset} asset - The material asset on which an asset unload event has been fired.
     * @private
     */
    private _onMaterialAssetUnload;
    /**
     * @param {Asset} asset - The material asset on which an asset remove event has been fired.
     * @private
     */
    private _onMaterialAssetRemove;
    /**
     * @param {Asset} asset - The material asset on which an asset change event has been fired.
     * @private
     */
    private _onMaterialAssetChange;
    /**
     * @param {Asset} asset - The model asset to bind events to.
     * @private
     */
    private _bindModelAsset;
    /**
     * @param {Asset} asset - The model asset to unbind events from.
     * @private
     */
    private _unbindModelAsset;
    /**
     * @param {Asset} asset - The model asset on which an asset add event has been fired.
     * @private
     */
    private _onModelAssetAdded;
    /**
     * @param {Asset} asset - The model asset on which an asset load event has been fired.
     * @private
     */
    private _onModelAssetLoad;
    /**
     * @param {Asset} asset - The model asset on which an asset unload event has been fired.
     * @private
     */
    private _onModelAssetUnload;
    /**
     * @param {Asset} asset - The model asset on which an asset change event has been fired.
     * @param {string} attr - The attribute that was changed.
     * @param {*} _new - The new value of the attribute.
     * @param {*} _old - The old value of the attribute.
     * @private
     */
    private _onModelAssetChange;
    /**
     * @param {Asset} asset - The model asset on which an asset remove event has been fired.
     * @private
     */
    private _onModelAssetRemove;
    /**
     * @param {Material} material - The material to be set.
     * @private
     */
    private _setMaterial;
}

/**
 * A curve is a collection of keys (time/value pairs). The shape of the curve is defined by its
 * type that specifies an interpolation scheme for the keys.
 *
 * @category Math
 */
declare class Curve {
    /**
     * Creates a new Curve instance.
     *
     * @param {number[]} [data] - An array of keys (pairs of numbers with the time first and value
     * second).
     * @example
     * const curve = new pc.Curve([
     *     0, 0,        // At 0 time, value of 0
     *     0.33, 2,     // At 0.33 time, value of 2
     *     0.66, 2.6,   // At 0.66 time, value of 2.6
     *     1, 3         // At 1 time, value of 3
     * ]);
     */
    constructor(data?: number[]);
    /**
     * The keys that define the curve. Each key is an array of two numbers with the time first and
     * the value second.
     *
     * @type {number[][]}
     */
    keys: number[][];
    /**
     * The curve interpolation scheme. Can be:
     *
     * - {@link CURVE_LINEAR}
     * - {@link CURVE_SMOOTHSTEP}
     * - {@link CURVE_SPLINE}
     * - {@link CURVE_STEP}
     *
     * Defaults to {@link CURVE_SMOOTHSTEP}.
     *
     * @type {number}
     */
    type: number;
    /**
     * Controls how {@link CURVE_SPLINE} tangents are calculated. Valid range is between 0 and 1
     * where 0 results in a non-smooth curve (equivalent to linear interpolation) and 1 results in
     * a very smooth curve. Use 0.5 for a Catmull-Rom spline.
     *
     * @type {number}
     */
    tension: number;
    /**
     * @type {CurveEvaluator}
     * @private
     */
    private _eval;
    /**
     * Gets the number of keys in the curve.
     *
     * @type {number}
     */
    get length(): number;
    /**
     * Adds a new key to the curve.
     *
     * @param {number} time - Time to add new key.
     * @param {number} value - Value of new key.
     * @returns {number[]} The newly created `[time, value]` pair.
     * @example
     * const curve = new pc.Curve();
     * curve.add(0, 1);   // add key at time 0 with value 1
     * curve.add(1, 2);   // add key at time 1 with value 2
     */
    add(time: number, value: number): number[];
    /**
     * Gets the `[time, value]` pair at the specified index.
     *
     * @param {number} index - The index of key to return.
     * @returns {number[]} The `[time, value]` pair at the specified index.
     * @example
     * const curve = new pc.Curve([0, 1, 1, 2]);
     * const key = curve.get(0); // returns [0, 1]
     */
    get(index: number): number[];
    /**
     * Sorts keys by time.
     */
    sort(): void;
    /**
     * Returns the interpolated value of the curve at specified time.
     *
     * @param {number} time - The time at which to calculate the value.
     * @returns {number} The interpolated value.
     * @example
     * const curve = new pc.Curve([0, 0, 1, 10]);
     * const value = curve.value(0.5); // returns interpolated value at time 0.5
     */
    value(time: number): number;
    /**
     * Returns the key closest to the specified time.
     *
     * @param {number} time - The time to find the closest key to.
     * @returns {number[]|null} The `[time, value]` pair closest to the specified time, or null if
     * no keys exist.
     * @example
     * const curve = new pc.Curve([0, 1, 0.5, 2, 1, 3]);
     * const key = curve.closest(0.6); // returns [0.5, 2]
     */
    closest(time: number): number[] | null;
    /**
     * Returns a clone of the specified curve object.
     *
     * @returns {this} A clone of the specified curve.
     * @example
     * const curve = new pc.Curve([0, 0, 1, 10]);
     * const clonedCurve = curve.clone();
     */
    clone(): this;
    /**
     * Sample the curve at regular intervals over the range [0..1].
     *
     * @param {number} precision - The number of samples to return.
     * @returns {Float32Array} The set of quantized values.
     * @ignore
     */
    quantize(precision: number): Float32Array;
    /**
     * Sample the curve at regular intervals over the range [0..1] and clamp the resulting samples
     * to [min..max].
     *
     * @param {number} precision - The number of samples to return.
     * @param {number} min - The minimum output value.
     * @param {number} max - The maximum output value.
     * @returns {Float32Array} The set of quantized values.
     * @ignore
     */
    quantizeClamped(precision: number, min: number, max: number): Float32Array;
}

/**
 * A curve set is a collection of curves.
 *
 * @category Math
 */
declare class CurveSet {
    /**
     * Creates a new CurveSet instance.
     *
     * @param {...*} args - Variable arguments with several possible formats:
     * - No arguments: Creates a CurveSet with a single default curve.
     * - Single number argument: Creates a CurveSet with the specified number of default curves.
     * - Single array argument: An array of arrays, where each sub-array contains keys (pairs of
     * numbers with the time first and value second).
     * - Multiple arguments: Each argument becomes a separate curve.
     * @example
     * // Create from an array of arrays of keys
     * const curveSet = new pc.CurveSet([
     *     [
     *         0, 0,        // At 0 time, value of 0
     *         0.33, 2,     // At 0.33 time, value of 2
     *         0.66, 2.6,   // At 0.66 time, value of 2.6
     *         1, 3         // At 1 time, value of 3
     *     ],
     *     [
     *         0, 34,
     *         0.33, 35,
     *         0.66, 36,
     *         1, 37
     *     ]
     * ]);
     */
    constructor(...args: any[]);
    /**
     * The array of curves in the set.
     *
     * @type {Curve[]}
     */
    curves: Curve[];
    /**
     * @type {number}
     * @private
     */
    private _type;
    /**
     * Gets the number of curves in the curve set.
     *
     * @type {number}
     */
    get length(): number;
    /**
     * Sets the interpolation scheme applied to all curves in the curve set. Can be:
     *
     * - {@link CURVE_LINEAR}
     * - {@link CURVE_SMOOTHSTEP}
     * - {@link CURVE_SPLINE}
     * - {@link CURVE_STEP}
     *
     * Defaults to {@link CURVE_SMOOTHSTEP}.
     *
     * @type {number}
     */
    set type(value: number);
    /**
     * Gets the interpolation scheme applied to all curves in the curve set.
     *
     * @type {number}
     */
    get type(): number;
    /**
     * Return a specific curve in the curve set.
     *
     * @param {number} index - The index of the curve to return.
     * @returns {Curve} The curve at the specified index.
     * @example
     * const curveSet = new pc.CurveSet([[0, 0, 1, 1], [0, 0, 1, 0.5]]);
     * const curve = curveSet.get(0); // returns the first curve
     */
    get(index: number): Curve;
    /**
     * Returns the interpolated value of all curves in the curve set at the specified time.
     *
     * @param {number} time - The time at which to calculate the value.
     * @param {number[]} [result] - The interpolated curve values at the specified time. If this
     * parameter is not supplied, the function allocates a new array internally to return the
     * result.
     * @returns {number[]} The interpolated curve values at the specified time.
     * @example
     * const curveSet = new pc.CurveSet([[0, 0, 1, 1], [0, 0, 1, 0.5]]);
     * const values = curveSet.value(0.5); // returns interpolated values for all curves at time 0.5
     */
    value(time: number, result?: number[]): number[];
    /**
     * Returns a clone of the specified curve set object.
     *
     * @returns {this} A clone of the specified curve set.
     * @example
     * const curveSet = new pc.CurveSet([[0, 0, 1, 1]]);
     * const clonedCurveSet = curveSet.clone();
     */
    clone(): this;
    /**
     * Sample the curveset at regular intervals over the range [0..1].
     *
     * @param {number} precision - The number of samples to return.
     * @returns {Float32Array} The set of quantized values.
     * @ignore
     */
    quantize(precision: number): Float32Array;
    /**
     * Sample the curveset at regular intervals over the range [0..1] and clamp the result to min
     * and max.
     *
     * @param {number} precision - The number of samples to return.
     * @param {number} min - The minimum output value.
     * @param {number} max - The maximum output value.
     * @returns {Float32Array} The set of quantized values.
     * @ignore
     */
    quantizeClamped(precision: number, min: number, max: number): Float32Array;
}

/**
 * @import { Asset } from '../../../framework/asset/asset.js'
 * @import { CurveSet } from '../../../core/math/curve-set.js'
 * @import { Curve } from '../../../core/math/curve.js'
 * @import { Mesh } from '../../../scene/mesh.js'
 * @import { Texture } from '../../../platform/graphics/texture.js'
 */
declare class ParticleSystemComponentData {
    numParticles: number;
    rate: number;
    /** @type {number} */
    rate2: number;
    startAngle: number;
    /** @type {number} */
    startAngle2: number;
    lifetime: number;
    emitterExtents: Vec3;
    emitterExtentsInner: Vec3;
    emitterRadius: number;
    emitterRadiusInner: number;
    emitterShape: number;
    initialVelocity: number;
    wrap: boolean;
    wrapBounds: Vec3;
    localSpace: boolean;
    screenSpace: boolean;
    /** @type {Texture} */
    colorMap: Texture;
    /** @type {Asset} */
    colorMapAsset: Asset;
    /** @type {Texture} */
    normalMap: Texture;
    /** @type {Asset} */
    normalMapAsset: Asset;
    loop: boolean;
    preWarm: boolean;
    sort: number;
    mode: number;
    scene: any;
    lighting: boolean;
    halfLambert: boolean;
    intensity: number;
    stretch: number;
    alignToMotion: boolean;
    depthSoftening: number;
    /** @type {Asset} */
    renderAsset: Asset;
    /** @type {Asset} */
    meshAsset: Asset;
    /** @type {Mesh} */
    mesh: Mesh;
    depthWrite: boolean;
    noFog: boolean;
    orientation: number;
    particleNormal: Vec3;
    animTilesX: number;
    animTilesY: number;
    animStartFrame: number;
    animNumFrames: number;
    animNumAnimations: number;
    animIndex: number;
    randomizeAnimIndex: boolean;
    animSpeed: number;
    animLoop: boolean;
    /** @type {Curve} */
    scaleGraph: Curve;
    /** @type {Curve} */
    scaleGraph2: Curve;
    /** @type {CurveSet} */
    colorGraph: CurveSet;
    /** @type {CurveSet} */
    colorGraph2: CurveSet;
    /** @type {Curve} */
    alphaGraph: Curve;
    /** @type {Curve} */
    alphaGraph2: Curve;
    /** @type {CurveSet} */
    localVelocityGraph: CurveSet;
    /** @type {CurveSet} */
    localVelocityGraph2: CurveSet;
    /** @type {CurveSet} */
    velocityGraph: CurveSet;
    /** @type {CurveSet} */
    velocityGraph2: CurveSet;
    /** @type {Curve} */
    rotationSpeedGraph: Curve;
    /** @type {Curve} */
    rotationSpeedGraph2: Curve;
    /** @type {Curve} */
    radialSpeedGraph: Curve;
    /** @type {Curve} */
    radialSpeedGraph2: Curve;
    blendType: number;
    enabled: boolean;
    paused: boolean;
    autoPlay: boolean;
    layers: number[];
}

/**
 * @import { ParticleEmitter } from './particle-emitter.js'
 */
/**
 * A material for rendering particle geometry by the particle emitter.
 *
 * @category Graphics
 * @ignore
 */
declare class ParticleMaterial extends Material {
    constructor(emitter: any);
    /**
     * The color of the particles.
     *
     * @type {ParticleEmitter}
     */
    emitter: ParticleEmitter;
    getShaderVariant(params: any): Shader;
}

declare class ParticleGPUUpdater {
    constructor(emitter: any, gd: any);
    _emitter: any;
    frameRandomUniform: Float32Array<ArrayBuffer>;
    emitterPosUniform: Float32Array<ArrayBuffer>;
    emitterScaleUniform: Float32Array<ArrayBuffer>;
    worldBoundsMulUniform: Float32Array<ArrayBuffer>;
    worldBoundsAddUniform: Float32Array<ArrayBuffer>;
    inBoundsSizeUniform: Float32Array<ArrayBuffer>;
    inBoundsCenterUniform: Float32Array<ArrayBuffer>;
    constantParticleTexIN: any;
    constantParticleTexOUT: any;
    constantEmitterPos: any;
    constantEmitterScale: any;
    constantSpawnBounds: any;
    constantSpawnPosInnerRatio: any;
    constantSpawnBoundsSphere: any;
    constantSpawnBoundsSphereInnerRatio: any;
    constantInitialVelocity: any;
    constantFrameRandom: any;
    constantDelta: any;
    constantRate: any;
    constantRateDiv: any;
    constantLifetime: any;
    constantGraphSampleSize: any;
    constantGraphNumSamples: any;
    constantInternalTex0: any;
    constantInternalTex1: any;
    constantInternalTex2: any;
    constantInternalTex3: any;
    constantEmitterMatrix: any;
    constantEmitterMatrixInv: any;
    constantNumParticles: any;
    constantNumParticlesPot: any;
    constantLocalVelocityDivMult: any;
    constantVelocityDivMult: any;
    constantRotSpeedDivMult: any;
    constantSeed: any;
    constantStartAngle: any;
    constantStartAngle2: any;
    constantOutBoundsMul: any;
    constantOutBoundsAdd: any;
    constantInBoundsSize: any;
    constantInBoundsCenter: any;
    constantMaxVel: any;
    constantFaceTangent: any;
    constantFaceBinorm: any;
    constantRadialSpeedDivMult: any;
    _setInputBounds(): void;
    randomize(): void;
    update(device: any, spawnMatrix: any, extentsInnerRatioUniform: any, delta: any, isOnStop: any): void;
}

declare class ParticleCPUUpdater {
    constructor(emitter: any);
    _emitter: any;
    calcSpawnPosition(particleTex: any, spawnMatrix: any, extentsInnerRatioUniform: any, emitterPos: any, i: any): void;
    update(data: any, vbToSort: any, particleTex: any, spawnMatrix: any, extentsInnerRatioUniform: any, emitterPos: any, delta: any, isOnStop: any): void;
}

declare class ParticleEmitter {
    constructor(graphicsDevice: any, options: any);
    /** @type {ParticleMaterial|null} */
    material: ParticleMaterial | null;
    /** @type {Texture|null} */
    internalTex0: Texture | null;
    /** @type {Texture|null} */
    internalTex1: Texture | null;
    /** @type {Texture|null} */
    internalTex2: Texture | null;
    /** @type {Texture|null} */
    colorParam: Texture | null;
    graphicsDevice: any;
    precision: number;
    _addTimeTime: number;
    numParticles: any;
    _gpuUpdater: ParticleGPUUpdater;
    _cpuUpdater: ParticleCPUUpdater;
    emitterPosUniform: Float32Array<ArrayBuffer>;
    wrapBoundsUniform: Float32Array<ArrayBuffer>;
    emitterScaleUniform: Float32Array<ArrayBuffer>;
    animTilesParams: Float32Array<ArrayBuffer>;
    animParams: Float32Array<ArrayBuffer>;
    animIndexParams: Float32Array<ArrayBuffer>;
    vbToSort: any[];
    vbOld: Float32Array<ArrayBuffer>;
    particleDistance: Float32Array<any>;
    camera: any;
    swapTex: boolean;
    useMesh: boolean;
    useCpu: boolean;
    pack8: boolean;
    localBounds: BoundingBox;
    worldBoundsNoTrail: BoundingBox;
    worldBoundsTrail: BoundingBox[];
    worldBounds: BoundingBox;
    worldBoundsSize: Vec3;
    prevWorldBoundsSize: Vec3;
    prevWorldBoundsCenter: Vec3;
    prevEmitterExtents: any;
    prevEmitterRadius: any;
    worldBoundsMul: Vec3;
    worldBoundsAdd: Vec3;
    timeToSwitchBounds: number;
    shaderParticleUpdateRespawn: Shader;
    shaderParticleUpdateNoRespawn: Shader;
    shaderParticleUpdateOnStop: Shader;
    numParticleVerts: number;
    numParticleIndices: number;
    meshInstance: MeshInstance;
    drawOrder: number;
    seed: number;
    fixedTimeStep: number;
    maxSubSteps: number;
    simTime: number;
    simTimeTotal: number;
    beenReset: boolean;
    _layer: any;
    get defaultParamTexture(): any;
    onChangeCamera(): void;
    calculateBoundsMad(): void;
    calculateWorldBounds(): void;
    resetWorldBounds(): void;
    calculateLocalBounds(): void;
    rebuild(): void;
    colorMap: any;
    spawnBounds: any;
    numParticlesPot: number;
    particleTex: Float32Array<ArrayBuffer>;
    particleTexStart: any;
    particleTexIN: Texture;
    particleTexOUT: Texture;
    rtParticleTexIN: RenderTarget;
    rtParticleTexOUT: RenderTarget;
    _isAnimated(): any;
    rebuildGraphs(): void;
    qLocalVelocity: any;
    qVelocity: any;
    qColor: any;
    qRotSpeed: any;
    qScale: any;
    qAlpha: any;
    qRadialSpeed: any;
    qLocalVelocity2: any;
    qVelocity2: any;
    qColor2: any;
    qRotSpeed2: any;
    qScale2: any;
    qAlpha2: any;
    qRadialSpeed2: any;
    localVelocityUMax: Float32Array<ArrayBuffer>;
    velocityUMax: Float32Array<ArrayBuffer>;
    colorUMax: Float32Array<ArrayBuffer>;
    rotSpeedUMax: number[];
    scaleUMax: number[];
    alphaUMax: number[];
    radialSpeedUMax: number[];
    qLocalVelocityDiv: Float32Array<any>;
    qVelocityDiv: Float32Array<any>;
    qColorDiv: Float32Array<any>;
    qRotSpeedDiv: Float32Array<any>;
    qScaleDiv: Float32Array<any>;
    qAlphaDiv: Float32Array<any>;
    qRadialSpeedDiv: Float32Array<any>;
    maxVel: number;
    internalTex3: Texture;
    _setMaterialTextures(): void;
    _createMaterial(): ParticleMaterial;
    resetMaterial(): void;
    _compParticleFaceParams(): void;
    getVertexInfo(): {
        semantic: string;
        components: number;
        type: number;
    }[];
    _allocate(numParticles: any): void;
    vertexBuffer: VertexBuffer;
    indexBuffer: IndexBuffer;
    vbCPU: Float32Array<ArrayBuffer>;
    reset(): void;
    loop: any;
    prewarm(time: any): void;
    resetTime(): void;
    endTime: number;
    finishFrame(): void;
    addTime(delta: any, isOnStop: any): void;
    _destroyResources(): boolean;
    destroy(): void;
}

/**
 * Allows an Entity to render a particle system.
 *
 * @category Graphics
 */
declare class ParticleSystemComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ParticleSystemComponent;
    DataType: typeof ParticleSystemComponentData;
    schema: string[];
    propertyTypes: {
        emitterExtents: string;
        emitterExtentsInner: string;
        particleNormal: string;
        wrapBounds: string;
        localVelocityGraph: string;
        localVelocityGraph2: string;
        velocityGraph: string;
        velocityGraph2: string;
        colorGraph: string;
        colorGraph2: string;
        alphaGraph: string;
        alphaGraph2: string;
        rotationSpeedGraph: string;
        rotationSpeedGraph2: string;
        radialSpeedGraph: string;
        radialSpeedGraph2: string;
        scaleGraph: string;
        scaleGraph2: string;
    };
    initializeComponentData(component: any, _data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onUpdate(dt: any): void;
    onBeforeRemove(entity: any, component: any): void;
}

/**
 * Used to simulate particles and produce renderable particle mesh on either CPU or GPU. GPU
 * simulation is generally much faster than its CPU counterpart, because it avoids slow CPU-GPU
 * synchronization and takes advantage of many GPU cores. However, it requires client to support
 * reasonable uniform count, reading from multiple textures in vertex shader and OES_texture_float
 * extension, including rendering into float textures. Most mobile devices fail to satisfy these
 * requirements, so it's not recommended to simulate thousands of particles on them. GPU version
 * also can't sort particles, so enabling sorting forces CPU mode too. Particle rotation is
 * specified by a single angle parameter: default billboard particles rotate around camera facing
 * axis, while mesh particles rotate around 2 different view-independent axes. Most of the
 * simulation parameters are specified with {@link Curve} or {@link CurveSet}. Curves are
 * interpolated based on each particle's lifetime, therefore parameters are able to change over
 * time. Most of the curve parameters can also be specified by 2 minimum/maximum curves, this way
 * each particle will pick a random value in-between.
 *
 * @hideconstructor
 * @category Graphics
 */
declare class ParticleSystemComponent extends Component {
    /**
     * Create a new ParticleSystemComponent.
     *
     * @param {ParticleSystemComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity this Component is attached to.
     */
    constructor(system: ParticleSystemComponentSystem, entity: Entity);
    /** @private */
    private _requestedDepth;
    /** @private */
    private _drawOrder;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtSetMeshes;
    /**
     * @type {ParticleSystemComponentData}
     * @ignore
     */
    get data(): ParticleSystemComponentData;
    /**
     * Sets whether the particle system plays automatically on creation. If set to false, it is
     * necessary to call {@link ParticleSystemComponent#play} for the particle system to play.
     * Defaults to true.
     *
     * @type {boolean}
     */
    set autoPlay(arg: boolean);
    /**
     * Gets whether the particle system plays automatically on creation.
     *
     * @type {boolean}
     */
    get autoPlay(): boolean;
    /**
     * Sets the maximum number of simulated particles.
     *
     * @type {number}
     */
    set numParticles(arg: number);
    /**
     * Gets the maximum number of simulated particles.
     *
     * @type {number}
     */
    get numParticles(): number;
    /**
     * Sets the length of time in seconds between a particle's birth and its death.
     *
     * @type {number}
     */
    set lifetime(arg: number);
    /**
     * Gets the length of time in seconds between a particle's birth and its death.
     *
     * @type {number}
     */
    get lifetime(): number;
    /**
     * Sets the minimal interval in seconds between particle births.
     *
     * @type {number}
     */
    set rate(arg: number);
    /**
     * Gets the minimal interval in seconds between particle births.
     *
     * @type {number}
     */
    get rate(): number;
    /**
     * Sets the maximal interval in seconds between particle births.
     *
     * @type {number}
     */
    set rate2(arg: number);
    /**
     * Gets the maximal interval in seconds between particle births.
     *
     * @type {number}
     */
    get rate2(): number;
    /**
     * Sets the minimal initial Euler angle of a particle.
     *
     * @type {number}
     */
    set startAngle(arg: number);
    /**
     * Gets the minimal initial Euler angle of a particle.
     *
     * @type {number}
     */
    get startAngle(): number;
    /**
     * Sets the maximal initial Euler angle of a particle.
     *
     * @type {number}
     */
    set startAngle2(arg: number);
    /**
     * Gets the maximal initial Euler angle of a particle.
     *
     * @type {number}
     */
    get startAngle2(): number;
    /**
     * Sets whether the particle system loops.
     *
     * @type {boolean}
     */
    set loop(arg: boolean);
    /**
     * Gets whether the particle system loops.
     *
     * @type {boolean}
     */
    get loop(): boolean;
    /**
     * Sets whether the particle system will be initialized as though it has already completed a
     * full cycle. This only works with looping particle systems.
     *
     * @type {boolean}
     */
    set preWarm(arg: boolean);
    /**
     * Gets whether the particle system will be initialized as though it has already completed a
     * full cycle.
     *
     * @type {boolean}
     */
    get preWarm(): boolean;
    /**
     * Sets whether particles will be lit by ambient and directional lights.
     *
     * @type {boolean}
     */
    set lighting(arg: boolean);
    /**
     * Gets whether particles will be lit by ambient and directional lights.
     *
     * @type {boolean}
     */
    get lighting(): boolean;
    /**
     * Sets whether Half Lambert lighting is enabled. Enabling Half Lambert lighting avoids
     * particles looking too flat in shadowed areas. It is a completely non-physical lighting model
     * but can give more pleasing visual results.
     *
     * @type {boolean}
     */
    set halfLambert(arg: boolean);
    /**
     * Gets whether Half Lambert lighting is enabled.
     *
     * @type {boolean}
     */
    get halfLambert(): boolean;
    /**
     * Sets the color multiplier.
     *
     * @type {number}
     */
    set intensity(arg: number);
    /**
     * Gets the color multiplier.
     *
     * @type {number}
     */
    get intensity(): number;
    /**
     * Sets whether depth writes is enabled. If enabled, the particles will write to the depth
     * buffer. If disabled, the depth buffer is left unchanged and particles will be guaranteed to
     * overwrite one another in the order in which they are rendered.
     *
     * @type {boolean}
     */
    set depthWrite(arg: boolean);
    /**
     * Gets whether depth writes is enabled.
     *
     * @type {boolean}
     */
    get depthWrite(): boolean;
    /**
     * Sets whether fogging is ignored.
     *
     * @type {boolean}
     */
    set noFog(arg: boolean);
    /**
     * Gets whether fogging is ignored.
     *
     * @type {boolean}
     */
    get noFog(): boolean;
    /**
     * Sets whether depth softening is enabled. Controls fading of particles near their
     * intersections with scene geometry. This effect, when it's non-zero, requires scene depth map
     * to be rendered. Multiple depth-dependent effects can share the same map, but if you only use
     * it for particles, bear in mind that it can double engine draw calls.
     *
     * @type {number}
     */
    set depthSoftening(arg: number);
    /**
     * Gets whether depth softening is enabled.
     *
     * @type {number}
     */
    get depthSoftening(): number;
    /**
     * Sets the particle sorting mode. Forces CPU simulation, so be careful.
     *
     * - {@link PARTICLESORT_NONE}: No sorting, particles are drawn in arbitrary order. Can be
     * simulated on GPU.
     * - {@link PARTICLESORT_DISTANCE}: Sorting based on distance to the camera. CPU only.
     * - {@link PARTICLESORT_NEWER_FIRST}: Newer particles are drawn first. CPU only.
     * - {@link PARTICLESORT_OLDER_FIRST}: Older particles are drawn first. CPU only.
     *
     * @type {number}
     */
    set sort(arg: number);
    /**
     * Gets the particle sorting mode.
     *
     * @type {number}
     */
    get sort(): number;
    /**
     * Sets how particles are blended when being written to the currently active render target.
     * Can be:
     *
     * - {@link BLEND_SUBTRACTIVE}: Subtract the color of the source fragment from the destination
     * fragment and write the result to the frame buffer.
     * - {@link BLEND_ADDITIVE}: Add the color of the source fragment to the destination fragment and
     * write the result to the frame buffer.
     * - {@link BLEND_NORMAL}: Enable simple translucency for materials such as glass. This is
     * equivalent to enabling a source blend mode of {@link BLENDMODE_SRC_ALPHA} and
     * a destination
     * blend mode of {@link BLENDMODE_ONE_MINUS_SRC_ALPHA}.
     * - {@link BLEND_NONE}: Disable blending.
     * - {@link BLEND_PREMULTIPLIED}: Similar to {@link BLEND_NORMAL} expect
     * the source fragment is
     * assumed to have already been multiplied by the source alpha value.
     * - {@link BLEND_MULTIPLICATIVE}: Multiply the color of the source fragment by the color of the
     * destination fragment and write the result to the frame buffer.
     * - {@link BLEND_ADDITIVEALPHA}: Same as {@link BLEND_ADDITIVE} except
     * the source RGB is
     * multiplied by the source alpha.
     *
     * @type {number}
     */
    set blendType(arg: number);
    /**
     * Gets how particles are blended when being written to the currently active render target.
     *
     * @type {number}
     */
    get blendType(): number;
    /**
     * Sets how much particles are stretched in their direction of motion. This is a value in world
     * units that controls the amount by which particles are stretched based on their velocity.
     * Particles are stretched from their center towards their previous position.
     *
     * @type {number}
     */
    set stretch(arg: number);
    /**
     * Gets how much particles are stretched in their direction of motion.
     *
     * @type {number}
     */
    get stretch(): number;
    /**
     * Sets whether particles are oriented in their direction of motion or not.
     *
     * @type {boolean}
     */
    set alignToMotion(arg: boolean);
    /**
     * Gets whether particles are oriented in their direction of motion or not.
     *
     * @type {boolean}
     */
    get alignToMotion(): boolean;
    /**
     * Sets the shape of the emitter. Defines the bounds inside which particles are spawned. Also
     * affects the direction of initial velocity.
     *
     * - {@link EMITTERSHAPE_BOX}: Box shape parameterized by emitterExtents. Initial velocity is
     * directed towards local Z axis.
     * - {@link EMITTERSHAPE_SPHERE}: Sphere shape parameterized by emitterRadius. Initial velocity is
     * directed outwards from the center.
     *
     * @type {number}
     */
    set emitterShape(arg: number);
    /**
     * Gets the shape of the emitter.
     *
     * @type {number}
     */
    get emitterShape(): number;
    /**
     * Sets the extents of a local space bounding box within which particles are spawned at random
     * positions. This only applies to particle system with the shape `EMITTERSHAPE_BOX`.
     *
     * @type {Vec3}
     */
    set emitterExtents(arg: Vec3);
    /**
     * Gets the extents of a local space bounding box within which particles are spawned at random
     * positions.
     *
     * @type {Vec3}
     */
    get emitterExtents(): Vec3;
    /**
     * Sets the exception of extents of a local space bounding box within which particles are not
     * spawned. It is aligned to the center of emitterExtents. This only applies to particle system
     * with the shape `EMITTERSHAPE_BOX`.
     *
     * @type {Vec3}
     */
    set emitterExtentsInner(arg: Vec3);
    /**
     * Gets the exception of extents of a local space bounding box within which particles are not
     * spawned.
     *
     * @type {Vec3}
     */
    get emitterExtentsInner(): Vec3;
    /**
     * Sets the radius within which particles are spawned at random positions. This only applies to
     * particle system with the shape `EMITTERSHAPE_SPHERE`.
     *
     * @type {number}
     */
    set emitterRadius(arg: number);
    /**
     * Gets the radius within which particles are spawned at random positions.
     *
     * @type {number}
     */
    get emitterRadius(): number;
    /**
     * Sets the inner radius within which particles are not spawned. This only applies to particle
     * system with the shape `EMITTERSHAPE_SPHERE`.
     *
     * @type {number}
     */
    set emitterRadiusInner(arg: number);
    /**
     * Gets the inner radius within which particles are not spawned.
     *
     * @type {number}
     */
    get emitterRadiusInner(): number;
    /**
     * Sets the magnitude of the initial emitter velocity. Direction is given by emitter shape.
     *
     * @type {number}
     */
    set initialVelocity(arg: number);
    /**
     * Gets the magnitude of the initial emitter velocity.
     *
     * @type {number}
     */
    get initialVelocity(): number;
    /**
     * Sets whether particles wrap based on the set wrap bounds.
     *
     * @type {boolean}
     */
    set wrap(arg: boolean);
    /**
     * Gets whether particles wrap based on the set wrap bounds.
     *
     * @type {boolean}
     */
    get wrap(): boolean;
    /**
     * Sets the wrap bounds of the particle system. This is half extents of a world space box
     * volume centered on the owner entity's position. If a particle crosses the boundary of one
     * side of the volume, it teleports to the opposite side.
     *
     * @type {Vec3}
     */
    set wrapBounds(arg: Vec3);
    /**
     * Gets the wrap bounds of the particle system.
     *
     * @type {Vec3}
     */
    get wrapBounds(): Vec3;
    /**
     * Sets whether particles move with respect to the emitter's transform rather then world space.
     *
     * @type {boolean}
     */
    set localSpace(arg: boolean);
    /**
     * Gets whether particles move with respect to the emitter's transform rather then world space.
     *
     * @type {boolean}
     */
    get localSpace(): boolean;
    /**
     * Sets whether particles are rendered in 2D screen space. This needs to be set when particle
     * system is part of hierarchy with {@link ScreenComponent} as its ancestor, and allows
     * particle system to integrate with the rendering of {@link ElementComponent}s. Note that an
     * entity with ParticleSystem component cannot be parented directly to {@link ScreenComponent},
     * but has to be a child of a {@link ElementComponent}, for example {@link LayoutGroupComponent}.
     *
     * @type {boolean}
     */
    set screenSpace(arg: boolean);
    /**
     * Gets whether particles are rendered in 2D screen space.
     *
     * @type {boolean}
     */
    get screenSpace(): boolean;
    /**
     * Sets the {@link Asset} used to set the colorMap.
     *
     * @type {Asset}
     */
    set colorMapAsset(arg: Asset);
    /**
     * Gets the {@link Asset} used to set the colorMap.
     *
     * @type {Asset}
     */
    get colorMapAsset(): Asset;
    /**
     * Sets the {@link Asset} used to set the normalMap.
     *
     * @type {Asset}
     */
    set normalMapAsset(arg: Asset);
    /**
     * Gets the {@link Asset} used to set the normalMap.
     *
     * @type {Asset}
     */
    get normalMapAsset(): Asset;
    /**
     * Sets the polygonal mesh to be used as a particle. Only first vertex/index buffer is used.
     * Vertex buffer must contain local position at first 3 floats of each vertex.
     *
     * @type {Mesh}
     */
    set mesh(arg: Mesh);
    /**
     * Gets the polygonal mesh to be used as a particle.
     *
     * @type {Mesh}
     */
    get mesh(): Mesh;
    /**
     * Sets the {@link Asset} used to set the mesh.
     *
     * @type {Asset}
     */
    set meshAsset(arg: Asset);
    /**
     * Gets the {@link Asset} used to set the mesh.
     *
     * @type {Asset}
     */
    get meshAsset(): Asset;
    /**
     * Sets the Render {@link Asset} used to set the mesh.
     *
     * @type {Asset}
     */
    set renderAsset(arg: Asset);
    /**
     * Gets the Render {@link Asset} used to set the mesh.
     *
     * @type {Asset}
     */
    get renderAsset(): Asset;
    /**
     * Sets the particle orientation mode. Can be:
     *
     * - {@link PARTICLEORIENTATION_SCREEN}: Particles are facing camera.
     * - {@link PARTICLEORIENTATION_WORLD}: User defined world space normal (particleNormal) to set
     * planes orientation.
     * - {@link PARTICLEORIENTATION_EMITTER}: Similar to previous, but the normal is affected by
     * emitter (entity) transformation.
     *
     * @type {number}
     */
    set orientation(arg: number);
    /**
     * Gets the particle orientation mode.
     *
     * @type {number}
     */
    get orientation(): number;
    /**
     * Sets the particle normal. This only applies to particle system with the orientation modes
     * `PARTICLEORIENTATION_WORLD` and `PARTICLEORIENTATION_EMITTER`.
     *
     * @type {Vec3}
     */
    set particleNormal(arg: Vec3);
    /**
     * Gets the particle normal.
     *
     * @type {Vec3}
     */
    get particleNormal(): Vec3;
    /**
     * Sets the local space velocity graph.
     *
     * @type {CurveSet}
     */
    set localVelocityGraph(arg: CurveSet);
    /**
     * Gets the local space velocity graph.
     *
     * @type {CurveSet}
     */
    get localVelocityGraph(): CurveSet;
    /**
     * Sets the second velocity graph. If not null, particles pick random values between
     * localVelocityGraph and localVelocityGraph2.
     *
     * @type {CurveSet}
     */
    set localVelocityGraph2(arg: CurveSet);
    /**
     * Gets the second velocity graph.
     *
     * @type {CurveSet}
     */
    get localVelocityGraph2(): CurveSet;
    /**
     * Sets the world space velocity graph.
     *
     * @type {CurveSet}
     */
    set velocityGraph(arg: CurveSet);
    /**
     * Gets the world space velocity graph.
     *
     * @type {CurveSet}
     */
    get velocityGraph(): CurveSet;
    /**
     * Sets the second world space velocity graph. If not null, particles pick random values
     * between velocityGraph and velocityGraph2.
     *
     * @type {CurveSet}
     */
    set velocityGraph2(arg: CurveSet);
    /**
     * Gets the second world space velocity graph.
     *
     * @type {CurveSet}
     */
    get velocityGraph2(): CurveSet;
    /**
     * Sets the rotation speed graph.
     *
     * @type {Curve}
     */
    set rotationSpeedGraph(arg: Curve);
    /**
     * Gets the rotation speed graph.
     *
     * @type {Curve}
     */
    get rotationSpeedGraph(): Curve;
    /**
     * Sets the second rotation speed graph. If not null, particles pick random values between
     * rotationSpeedGraph and rotationSpeedGraph2.
     *
     * @type {Curve}
     */
    set rotationSpeedGraph2(arg: Curve);
    /**
     * Gets the second rotation speed graph.
     *
     * @type {Curve}
     */
    get rotationSpeedGraph2(): Curve;
    /**
     * Sets the radial speed graph. Velocity vector points from emitter origin to particle position.
     *
     * @type {Curve}
     */
    set radialSpeedGraph(arg: Curve);
    /**
     * Gets the radial speed graph.
     *
     * @type {Curve}
     */
    get radialSpeedGraph(): Curve;
    /**
     * Sets the second radial speed graph. If not null, particles pick random values between
     * radialSpeedGraph and radialSpeedGraph2. Velocity vector points from emitter origin to
     * particle position.
     *
     * @type {Curve}
     */
    set radialSpeedGraph2(arg: Curve);
    /**
     * Gets the second radial speed graph.
     *
     * @type {Curve}
     */
    get radialSpeedGraph2(): Curve;
    /**
     * Sets the scale graph.
     *
     * @type {Curve}
     */
    set scaleGraph(arg: Curve);
    /**
     * Gets the scale graph.
     *
     * @type {Curve}
     */
    get scaleGraph(): Curve;
    /**
     * Sets the second scale graph. If not null, particles pick random values between `scaleGraph`
     * and `scaleGraph2`.
     *
     * @type {Curve}
     */
    set scaleGraph2(arg: Curve);
    /**
     * Gets the second scale graph.
     *
     * @type {Curve}
     */
    get scaleGraph2(): Curve;
    /**
     * Sets the color graph.
     *
     * @type {CurveSet}
     */
    set colorGraph(arg: CurveSet);
    /**
     * Gets the color graph.
     *
     * @type {CurveSet}
     */
    get colorGraph(): CurveSet;
    /**
     * Sets the second color graph. If not null, particles pick random values between `colorGraph`
     * and `colorGraph2`.
     *
     * @type {CurveSet}
     */
    set colorGraph2(arg: CurveSet);
    /**
     * Gets the second color graph.
     *
     * @type {CurveSet}
     */
    get colorGraph2(): CurveSet;
    /**
     * Sets the alpha graph.
     *
     * @type {Curve}
     */
    set alphaGraph(arg: Curve);
    /**
     * Gets the alpha graph.
     *
     * @type {Curve}
     */
    get alphaGraph(): Curve;
    /**
     * Sets the second alpha graph. If not null, particles pick random values between `alphaGraph`
     * and `alphaGraph2`.
     *
     * @type {Curve}
     */
    set alphaGraph2(arg: Curve);
    /**
     * Gets the second alpha graph.
     *
     * @type {Curve}
     */
    get alphaGraph2(): Curve;
    /**
     * Sets the color map texture to apply to all particles in the system. If no texture is
     * assigned, a default spot texture is used.
     *
     * @type {Texture}
     */
    set colorMap(arg: Texture);
    /**
     * Gets the color map texture to apply to all particles in the system.
     *
     * @type {Texture}
     */
    get colorMap(): Texture;
    /**
     * Sets the normal map texture to apply to all particles in the system. If no texture is
     * assigned, an approximate spherical normal is calculated for each vertex.
     *
     * @type {Texture}
     */
    set normalMap(arg: Texture);
    /**
     * Gets the normal map texture to apply to all particles in the system.
     *
     * @type {Texture}
     */
    get normalMap(): Texture;
    /**
     * Sets the number of horizontal tiles in the sprite sheet.
     *
     * @type {number}
     */
    set animTilesX(arg: number);
    /**
     * Gets the number of horizontal tiles in the sprite sheet.
     *
     * @type {number}
     */
    get animTilesX(): number;
    /**
     * Sets the number of vertical tiles in the sprite sheet.
     *
     * @type {number}
     */
    set animTilesY(arg: number);
    /**
     * Gets the number of vertical tiles in the sprite sheet.
     *
     * @type {number}
     */
    get animTilesY(): number;
    /**
     * Sets the sprite sheet frame that the animation should begin playing from. Indexed from the
     * start of the current animation.
     *
     * @type {number}
     */
    set animStartFrame(arg: number);
    /**
     * Gets the sprite sheet frame that the animation should begin playing from.
     *
     * @type {number}
     */
    get animStartFrame(): number;
    /**
     * Sets the number of sprite sheet frames in the current sprite sheet animation. The number of
     * animations multiplied by number of frames should be a value less than `animTilesX`
     * multiplied by `animTilesY`.
     *
     * @type {number}
     */
    set animNumFrames(arg: number);
    /**
     * Gets the number of sprite sheet frames in the current sprite sheet animation.
     *
     * @type {number}
     */
    get animNumFrames(): number;
    /**
     * Sets the number of sprite sheet animations contained within the current sprite sheet. The
     * number of animations multiplied by number of frames should be a value less than `animTilesX`
     * multiplied by `animTilesY`.
     *
     * @type {number}
     */
    set animNumAnimations(arg: number);
    /**
     * Gets the number of sprite sheet animations contained within the current sprite sheet.
     *
     * @type {number}
     */
    get animNumAnimations(): number;
    /**
     * Sets the index of the animation to play. When `animNumAnimations` is greater than 1, the
     * sprite sheet animation index determines which animation the particle system should play.
     *
     * @type {number}
     */
    set animIndex(arg: number);
    /**
     * Gets the index of the animation to play.
     *
     * @type {number}
     */
    get animIndex(): number;
    /**
     * Sets whether each particle emitted by the system will play a random animation from the
     * sprite sheet, up to `animNumAnimations`.
     *
     * @type {boolean}
     */
    set randomizeAnimIndex(arg: boolean);
    /**
     * Gets whether each particle emitted by the system will play a random animation from the
     * sprite sheet, up to `animNumAnimations`.
     *
     * @type {boolean}
     */
    get randomizeAnimIndex(): boolean;
    /**
     * Sets the sprite sheet animation speed. 1 = particle lifetime, 2 = double the particle
     * lifetime, etc.
     *
     * @type {number}
     */
    set animSpeed(arg: number);
    /**
     * Gets the sprite sheet animation speed.
     *
     * @type {number}
     */
    get animSpeed(): number;
    /**
     * Sets whether the sprite sheet animation plays once or loops continuously.
     *
     * @type {boolean}
     */
    set animLoop(arg: boolean);
    /**
     * Gets whether the sprite sheet animation plays once or loops continuously.
     *
     * @type {boolean}
     */
    get animLoop(): boolean;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which this particle system should belong.
     * Don't push/pop/splice or modify this array. If you want to change it, set a new one instead.
     *
     * @type {number[]}
     */
    set layers(arg: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which this particle system belongs.
     *
     * @type {number[]}
     */
    get layers(): number[];
    /**
     * Sets the draw order of the component. A higher value means that the component will be
     * rendered on top of other components in the same layer. This is not used unless the layer's
     * sort order is set to {@link SORTMODE_MANUAL}.
     *
     * @type {number}
     */
    set drawOrder(drawOrder: number);
    /**
     * Gets the draw order of the component.
     *
     * @type {number}
     */
    get drawOrder(): number;
    /** @ignore */
    _setValue(name: any, value: any): void;
    addMeshInstanceToLayers(): void;
    removeMeshInstanceFromLayers(): void;
    onSetLayers(name: any, oldValue: any, newValue: any): void;
    onLayersChanged(oldComp: any, newComp: any): void;
    onLayerAdded(layer: any): void;
    onLayerRemoved(layer: any): void;
    _bindColorMapAsset(asset: any): void;
    _unbindColorMapAsset(asset: any): void;
    _onColorMapAssetLoad(asset: any): void;
    _onColorMapAssetUnload(asset: any): void;
    _onColorMapAssetRemove(asset: any): void;
    _onColorMapAssetChange(asset: any): void;
    onSetColorMapAsset(name: any, oldValue: any, newValue: any): void;
    _bindNormalMapAsset(asset: any): void;
    _unbindNormalMapAsset(asset: any): void;
    _onNormalMapAssetLoad(asset: any): void;
    _onNormalMapAssetUnload(asset: any): void;
    _onNormalMapAssetRemove(asset: any): void;
    _onNormalMapAssetChange(asset: any): void;
    onSetNormalMapAsset(name: any, oldValue: any, newValue: any): void;
    _bindMeshAsset(asset: any): void;
    _unbindMeshAsset(asset: any): void;
    _onMeshAssetLoad(asset: any): void;
    _onMeshAssetUnload(asset: any): void;
    _onMeshAssetRemove(asset: any): void;
    _onMeshAssetChange(asset: any): void;
    onSetMeshAsset(name: any, oldValue: any, newValue: any): void;
    onSetMesh(name: any, oldValue: any, newValue: any): void;
    _onMeshChanged(mesh: any): void;
    onSetRenderAsset(name: any, oldValue: any, newValue: any): void;
    _bindRenderAsset(asset: any): void;
    _unbindRenderAsset(asset: any): void;
    _onRenderAssetLoad(asset: any): void;
    _onRenderAssetUnload(asset: any): void;
    _onRenderAssetRemove(asset: any): void;
    _onRenderChanged(render: any): void;
    _onRenderSetMeshes(meshes: any): void;
    onSetLoop(name: any, oldValue: any, newValue: any): void;
    onSetBlendType(name: any, oldValue: any, newValue: any): void;
    _requestDepth(): void;
    _releaseDepth(): void;
    onSetDepthSoftening(name: any, oldValue: any, newValue: any): void;
    onSetSimpleProperty(name: any, oldValue: any, newValue: any): void;
    onSetComplexProperty(name: any, oldValue: any, newValue: any): void;
    onSetGraphProperty(name: any, oldValue: any, newValue: any): void;
    emitter: ParticleEmitter;
    onBeforeRemove(): void;
    /**
     * Resets particle state, doesn't affect playing.
     */
    reset(): void;
    /**
     * Disables the emission of new particles, lets existing to finish their simulation.
     */
    stop(): void;
    /**
     * Freezes the simulation.
     */
    pause(): void;
    /**
     * Unfreezes the simulation.
     */
    unpause(): void;
    /**
     * Enables/unfreezes the simulation.
     */
    play(): void;
    /**
     * Checks if simulation is in progress.
     *
     * @returns {boolean} True if the particle system is currently playing and false otherwise.
     */
    isPlaying(): boolean;
    /**
     * Called by the Editor when the component is selected, to allow custom in Editor behavior.
     *
     * @private
     */
    private setInTools;
    /**
     * Rebuilds all data used by this particle system.
     *
     * @private
     */
    private rebuild;
}

declare class RenderComponentData {
    enabled: boolean;
}

/**
 * Allows an Entity to render a mesh or a primitive shape like a box, capsule, sphere, cylinder,
 * cone etc.
 *
 * @category Graphics
 */
declare class RenderComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof RenderComponent;
    DataType: typeof RenderComponentData;
    schema: string[];
    defaultMaterial: StandardMaterial;
    initializeComponentData(component: any, _data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onRemove(entity: any, component: any): void;
}

/**
 * @import { BoundingBox } from '../../../core/shape/bounding-box.js'
 * @import { Entity } from '../../entity.js'
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { Material } from '../../../scene/materials/material.js'
 * @import { RenderComponentSystem } from './system.js'
 */
/**
 * The RenderComponent enables an {@link Entity} to render 3D meshes. The {@link type} property can
 * be set to one of several predefined shapes (such as `box`, `sphere`, `cone` and so on).
 * Alternatively, the component can be configured to manage an arbitrary array of
 * {@link MeshInstance}s. These can either be created programmatically or loaded from an
 * {@link Asset}.
 *
 * The {@link MeshInstance}s managed by this component are positioned, rotated, and scaled in world
 * space by the world transformation matrix of the owner {@link Entity}. This world matrix is
 * derived by combining the entity's local transformation (position, rotation, and scale) with the
 * world transformation matrix of its parent entity in the scene hierarchy.
 *
 * You should never need to use the RenderComponent constructor directly. To add a RenderComponent
 * to an Entity, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     type: 'box'
 * });
 * ```
 *
 * Once the RenderComponent is added to the entity, you can access it via the {@link Entity#render}
 * property:
 *
 * ```javascript
 * entity.render.type = 'capsule';  // Set the render component's type
 *
 * console.log(entity.render.type); // Get the render component's type and print it
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Loading Render Assets](https://playcanvas.github.io/#/graphics/render-asset)
 * - [Primitive Shapes](https://playcanvas.github.io/#/graphics/shapes)
 * - [Spinning Cube](https://playcanvas.github.io/#/misc/hello-world)
 *
 * @category Graphics
 */
declare class RenderComponent extends Component {
    /**
     * Create a new RenderComponent.
     *
     * @param {RenderComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: RenderComponentSystem, entity: Entity);
    /**
     * @type {'asset'|'box'|'capsule'|'cone'|'cylinder'|'plane'|'sphere'|'torus'}
     * @private
     */
    private _type;
    /** @private */
    private _castShadows;
    /** @private */
    private _receiveShadows;
    /** @private */
    private _castShadowsLightmap;
    /** @private */
    private _lightmapped;
    /** @private */
    private _lightmapSizeMultiplier;
    /**
     * Mark meshes as non-movable (optimization).
     *
     * @type {boolean}
     */
    isStatic: boolean;
    /** @private */
    private _batchGroupId;
    /** @private */
    private _layers;
    /** @private */
    private _renderStyle;
    /**
     * @type {MeshInstance[]}
     * @private
     */
    private _meshInstances;
    /**
     * @type {BoundingBox|null}
     * @private
     */
    private _customAabb;
    /**
     * Used by lightmapper.
     *
     * @type {{x: number, y: number, z: number, uv: number}|null}
     * @ignore
     */
    _area: {
        x: number;
        y: number;
        z: number;
        uv: number;
    } | null;
    /**
     * @type {AssetReference}
     * @private
     */
    private _assetReference;
    /**
     * @type {AssetReference[]}
     * @private
     */
    private _materialReferences;
    /**
     * Material used to render meshes other than asset type. It gets priority when set to
     * something else than defaultMaterial, otherwise materialASsets[0] is used.
     *
     * @type {Material}
     * @private
     */
    private _material;
    /**
     * A reference to the entity to be used as the root bone for any skinned meshes that
     * are rendered by this component.
     *
     * @type {Entity|null}
     * @private
     */
    private _rootBone;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtSetMeshes;
    /**
     * Sets the render style of this component's {@link MeshInstance}s. Can be:
     *
     * - {@link RENDERSTYLE_SOLID}
     * - {@link RENDERSTYLE_WIREFRAME}
     * - {@link RENDERSTYLE_POINTS}
     *
     * Defaults to {@link RENDERSTYLE_SOLID}.
     *
     * @type {number}
     */
    set renderStyle(renderStyle: number);
    /**
     * Gets the render style of this component's {@link MeshInstance}s.
     *
     * @type {number}
     */
    get renderStyle(): number;
    /**
     * Sets the custom object space bounding box that is used for visibility culling of attached
     * mesh instances. This is an optimization, allowing an oversized bounding box to be specified
     * for skinned characters in order to avoid per frame bounding box computations based on bone
     * positions.
     *
     * @type {BoundingBox|null}
     */
    set customAabb(value: BoundingBox | null);
    /**
     * Gets the custom object space bounding box that is used for visibility culling of attached
     * mesh instances.
     *
     * @type {BoundingBox|null}
     */
    get customAabb(): BoundingBox | null;
    /**
     * Sets the type of the component, determining the source of the geometry to be rendered.
     * The geometry, whether it's a primitive shape or originates from an asset, is rendered
     * using the owning entity's final world transform. This world transform is calculated by
     * concatenating (multiplying) the local transforms (position, rotation, scale) of the
     * entity and all its ancestors in the scene hierarchy. This process positions, orientates,
     * and scales the geometry in world space.
     *
     * Can be one of the following values:
     *
     * - **"asset"**: Renders geometry defined in an {@link Asset} of type `render`. This asset,
     *   assigned to the {@link asset} property, contains one or more {@link MeshInstance}s.
     *   Alternatively, {@link meshInstances} can be set programmatically.
     * - **"box"**: A unit cube (sides of length 1) centered at the local space origin.
     * - **"capsule"**: A shape composed of a cylinder and two hemispherical caps that is aligned
     *   with the local Y-axis. It is centered at the local space origin and has an unscaled height
     *   of 2 and a radius of 0.5.
     * - **"cone"**: A cone aligned with the local Y-axis. It is centered at the local space
     *   origin, with its base in the local XZ plane at Y = -0.5 and its tip at Y = +0.5. It has
     *   an unscaled height of 1 and a base radius of 0.5.
     * - **"cylinder"**: A cylinder aligned with the local Y-axis. It is centered at the local
     *   space origin with an unscaled height of 1 and a radius of 0.5.
     * - **"plane"**: A flat plane in the local XZ plane at Y = 0 (normal along +Y). It is
     *   centered at the local space origin with unscaled dimensions of 1x1 units along local X and
     *   Z axes.
     * - **"sphere"**: A sphere with a radius of 0.5. It is centered at the local space origin and
     *   has poles at Y = -0.5 and Y = +0.5.
     * - **"torus"**: A doughnut shape lying in the local XZ plane at Y = 0. It is centered at
     *   the local space origin with a tube radius of 0.2 and a ring radius of 0.3.
     *
     * @type {'asset'|'box'|'capsule'|'cone'|'cylinder'|'plane'|'sphere'|'torus'}
     */
    set type(value: "asset" | "box" | "capsule" | "cone" | "cylinder" | "plane" | "sphere" | "torus");
    /**
     * Gets the type of the component.
     *
     * @type {'asset'|'box'|'capsule'|'cone'|'cylinder'|'plane'|'sphere'|'torus'}
     */
    get type(): "asset" | "box" | "capsule" | "cone" | "cylinder" | "plane" | "sphere" | "torus";
    /**
     * Sets the array of meshInstances contained in the component.
     *
     * @type {MeshInstance[]}
     */
    set meshInstances(value: MeshInstance[]);
    /**
     * Gets the array of meshInstances contained in the component.
     *
     * @type {MeshInstance[]}
     */
    get meshInstances(): MeshInstance[];
    /**
     * Sets whether the component is affected by the runtime lightmapper. If true, the meshes will
     * be lightmapped after using lightmapper.bake().
     *
     * @type {boolean}
     */
    set lightmapped(value: boolean);
    /**
     * Gets whether the component is affected by the runtime lightmapper.
     *
     * @type {boolean}
     */
    get lightmapped(): boolean;
    /**
     * Sets whether attached meshes will cast shadows for lights that have shadow casting enabled.
     *
     * @type {boolean}
     */
    set castShadows(value: boolean);
    /**
     * Gets whether attached meshes will cast shadows for lights that have shadow casting enabled.
     *
     * @type {boolean}
     */
    get castShadows(): boolean;
    /**
     * Sets whether shadows will be cast on attached meshes.
     *
     * @type {boolean}
     */
    set receiveShadows(value: boolean);
    /**
     * Gets whether shadows will be cast on attached meshes.
     *
     * @type {boolean}
     */
    get receiveShadows(): boolean;
    /**
     * Sets whether meshes instances will cast shadows when rendering lightmaps.
     *
     * @type {boolean}
     */
    set castShadowsLightmap(value: boolean);
    /**
     * Gets whether meshes instances will cast shadows when rendering lightmaps.
     *
     * @type {boolean}
     */
    get castShadowsLightmap(): boolean;
    /**
     * Sets the lightmap resolution multiplier.
     *
     * @type {number}
     */
    set lightmapSizeMultiplier(value: number);
    /**
     * Gets the lightmap resolution multiplier.
     *
     * @type {number}
     */
    get lightmapSizeMultiplier(): number;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which the mesh instances belong. Don't
     * push, pop, splice or modify this array. If you want to change it, set a new one instead.
     *
     * @type {number[]}
     */
    set layers(value: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which the mesh instances belong.
     *
     * @type {number[]}
     */
    get layers(): number[];
    /**
     * Sets the batch group for the mesh instances in this component (see {@link BatchGroup}).
     * Default is -1 (no group).
     *
     * @type {number}
     */
    set batchGroupId(value: number);
    /**
     * Gets the batch group for the mesh instances in this component (see {@link BatchGroup}).
     *
     * @type {number}
     */
    get batchGroupId(): number;
    /**
     * Sets the material {@link Material} that will be used to render the component. The material
     * is ignored for renders of type 'asset'.
     *
     * @type {Material}
     */
    set material(value: Material);
    /**
     * Gets the material {@link Material} that will be used to render the component.
     *
     * @type {Material}
     */
    get material(): Material;
    /**
     * Sets the material assets that will be used to render the component. Each material
     * corresponds to the respective mesh instance.
     *
     * @type {Asset[]|number[]}
     */
    set materialAssets(value: Asset[] | number[]);
    /**
     * Gets the material assets that will be used to render the component.
     *
     * @type {Asset[]|number[]}
     */
    get materialAssets(): Asset[] | number[];
    /**
     * Sets the render asset (or asset id) for the render component. This only applies to render components with
     * type 'asset'.
     *
     * @type {Asset|number}
     */
    set asset(value: number);
    /**
     * Gets the render asset id for the render component.
     *
     * @type {number}
     */
    get asset(): number;
    /**
     * Assign asset id to the component, without updating the component with the new asset.
     * This can be used to assign the asset id to already fully created component.
     *
     * @param {Asset|number} asset - The render asset or asset id to assign.
     * @ignore
     */
    assignAsset(asset: Asset | number): void;
    /**
     * Sets the root bone entity (or entity guid) for the render component.
     *
     * @type {Entity|string|null}
     */
    set rootBone(value: Entity | null);
    /**
     * Gets the root bone entity for the render component.
     *
     * @type {Entity|null}
     */
    get rootBone(): Entity | null;
    /** @private */
    private destroyMeshInstances;
    /** @private */
    private addToLayers;
    removeFromLayers(): void;
    /** @private */
    private onRemoveChild;
    /** @private */
    private onInsertChild;
    onRemove(): void;
    materialAsset: any;
    onLayersChanged(oldComp: any, newComp: any): void;
    onLayerAdded(layer: any): void;
    onLayerRemoved(layer: any): void;
    /**
     * Stop rendering {@link MeshInstance}s without removing them from the scene hierarchy. This
     * method sets the {@link MeshInstance#visible} property of every MeshInstance to false. Note,
     * this does not remove the mesh instances from the scene hierarchy or draw call list. So the
     * render component still incurs some CPU overhead.
     */
    hide(): void;
    /**
     * Enable rendering of the component's {@link MeshInstance}s if hidden using
     * {@link RenderComponent#hide}. This method sets the {@link MeshInstance#visible} property on
     * all mesh instances to true.
     */
    show(): void;
    _onRenderAssetAdded(): void;
    _onRenderAssetLoad(): void;
    _onSetMeshes(meshes: any): void;
    _clearSkinInstances(): void;
    _cloneSkinInstances(): void;
    _cloneMeshes(meshes: any): void;
    _onRenderAssetUnload(): void;
    _onRenderAssetRemove(): void;
    _onMaterialAdded(index: any, component: any, asset: any): void;
    _updateMainMaterial(index: any, material: any): void;
    _onMaterialLoad(index: any, component: any, asset: any): void;
    _onMaterialRemove(index: any, component: any, asset: any): void;
    _onMaterialUnload(index: any, component: any, asset: any): void;
    resolveDuplicatedEntityReferenceProperties(oldRender: any, duplicatedIdsMap: any): void;
}

/**
 * The RigidBodyComponent, when combined with a {@link CollisionComponent}, allows your entities
 * to be simulated using realistic physics. A RigidBodyComponent will fall under gravity and
 * collide with other rigid bodies. Using scripts, you can apply forces and impulses to rigid
 * bodies.
 *
 * You should never need to use the RigidBodyComponent constructor directly. To add an
 * RigidBodyComponent to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * // Create a static 1x1x1 box-shaped rigid body
 * const entity = pc.Entity();
 * entity.addComponent('collision'); // Without options, this defaults to a 1x1x1 box shape
 * entity.addComponent('rigidbody'); // Without options, this defaults to a 'static' body
 * ```
 *
 * To create a dynamic sphere with mass of 10, do:
 *
 * ```javascript
 * const entity = pc.Entity();
 * entity.addComponent('collision', {
 *     type: 'sphere'
 * });
 * entity.addComponent('rigidbody', {
 *     type: 'dynamic',
 *     mass: 10
 * });
 * ```
 *
 * Once the RigidBodyComponent is added to the entity, you can access it via the
 * {@link Entity#rigidbody} property:
 *
 * ```javascript
 * entity.rigidbody.mass = 10;
 * console.log(entity.rigidbody.mass);
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Falling shapes](https://playcanvas.github.io/#/physics/falling-shapes)
 * - [Vehicle physics](https://playcanvas.github.io/#/physics/vehicle)
 *
 * @hideconstructor
 * @category Physics
 */
declare class RigidBodyComponent extends Component {
    /**
     * Fired when a contact occurs between two rigid bodies. The handler is passed a
     * {@link ContactResult} object containing details of the contact between the two rigid bodies.
     *
     * @event
     * @example
     * entity.rigidbody.on('contact', (result) => {
     *    console.log(`Contact between ${entity.name} and ${result.other.name}`);
     * });
     */
    static EVENT_CONTACT: string;
    /**
     * Fired when two rigid bodies start touching. The handler is passed a {@link ContactResult}
     * object containing details of the contact between the two rigid bodies.
     *
     * @event
     * @example
     * entity.rigidbody.on('collisionstart', (result) => {
     *     console.log(`Collision started between ${entity.name} and ${result.other.name}`);
     * });
     */
    static EVENT_COLLISIONSTART: string;
    /**
     * Fired when two rigid bodies stop touching. The handler is passed an {@link Entity} that
     * represents the other rigid body involved in the collision.
     *
     * @event
     * @example
     * entity.rigidbody.on('collisionend', (other) => {
     *     console.log(`${entity.name} stopped touching ${other.name}`);
     * });
     */
    static EVENT_COLLISIONEND: string;
    /**
     * Fired when a rigid body enters a trigger volume. The handler is passed an {@link Entity}
     * representing the trigger volume that this rigid body entered.
     *
     * @event
     * @example
     * entity.rigidbody.on('triggerenter', (trigger) => {
     *     console.log(`Entity ${entity.name} entered trigger volume ${trigger.name}`);
     * });
     */
    static EVENT_TRIGGERENTER: string;
    /**
     * Fired when a rigid body exits a trigger volume. The handler is passed an {@link Entity}
     * representing the trigger volume that this rigid body exited.
     *
     * @event
     * @example
     * entity.rigidbody.on('triggerleave', (trigger) => {
     *     console.log(`Entity ${entity.name} exited trigger volume ${trigger.name}`);
     * });
     */
    static EVENT_TRIGGERLEAVE: string;
    /** @ignore */
    static onLibraryLoaded(): void;
    /** @ignore */
    static onAppDestroy(): void;
    /** @private */
    private _angularDamping;
    /** @private */
    private _angularFactor;
    /** @private */
    private _angularVelocity;
    /** @private */
    private _body;
    /** @private */
    private _friction;
    /** @private */
    private _group;
    /** @private */
    private _linearDamping;
    /** @private */
    private _linearFactor;
    /** @private */
    private _linearVelocity;
    /** @private */
    private _mask;
    /** @private */
    private _mass;
    /** @private */
    private _restitution;
    /** @private */
    private _rollingFriction;
    /** @private */
    private _simulationEnabled;
    /**
     * @type {BODYTYPE_DYNAMIC|BODYTYPE_KINEMATIC|BODYTYPE_STATIC}
     * @private
     */
    private _type;
    /**
     * Sets the rate at which a body loses angular velocity over time.
     *
     * @type {number}
     */
    set angularDamping(damping: number);
    /**
     * Gets the rate at which a body loses angular velocity over time.
     *
     * @type {number}
     */
    get angularDamping(): number;
    /**
     * Sets the scaling factor for angular movement of the body in each axis. Only valid for rigid
     * bodies of type {@link BODYTYPE_DYNAMIC}. Defaults to 1 in all axes (body can freely rotate).
     *
     * @type {Vec3}
     */
    set angularFactor(factor: Vec3);
    /**
     * Gets the scaling factor for angular movement of the body in each axis.
     *
     * @type {Vec3}
     */
    get angularFactor(): Vec3;
    /**
     * Sets the rotational speed of the body around each world axis.
     *
     * @type {Vec3}
     */
    set angularVelocity(velocity: Vec3);
    /**
     * Gets the rotational speed of the body around each world axis.
     *
     * @type {Vec3}
     */
    get angularVelocity(): Vec3;
    set body(body: any);
    get body(): any;
    /**
     * Sets the friction value used when contacts occur between two bodies. A higher value indicates
     * more friction. Should be set in the range 0 to 1. Defaults to 0.5.
     *
     * @type {number}
     */
    set friction(friction: number);
    /**
     * Gets the friction value used when contacts occur between two bodies.
     *
     * @type {number}
     */
    get friction(): number;
    /**
     * Sets the collision group this body belongs to. Combine the group and the mask to prevent bodies
     * colliding with each other. Defaults to 1.
     *
     * @type {number}
     */
    set group(group: number);
    /**
     * Gets the collision group this body belongs to.
     *
     * @type {number}
     */
    get group(): number;
    /**
     * Sets the rate at which a body loses linear velocity over time. Defaults to 0.
     *
     * @type {number}
     */
    set linearDamping(damping: number);
    /**
     * Gets the rate at which a body loses linear velocity over time.
     *
     * @type {number}
     */
    get linearDamping(): number;
    /**
     * Sets the scaling factor for linear movement of the body in each axis. Only valid for rigid
     * bodies of type {@link BODYTYPE_DYNAMIC}. Defaults to 1 in all axes (body can freely move).
     *
     * @type {Vec3}
     */
    set linearFactor(factor: Vec3);
    /**
     * Gets the scaling factor for linear movement of the body in each axis.
     *
     * @type {Vec3}
     */
    get linearFactor(): Vec3;
    /**
     * Sets the speed of the body in a given direction.
     *
     * @type {Vec3}
     */
    set linearVelocity(velocity: Vec3);
    /**
     * Gets the speed of the body in a given direction.
     *
     * @type {Vec3}
     */
    get linearVelocity(): Vec3;
    /**
     * Sets the collision mask sets which groups this body collides with. It is a bit field of 16
     * bits, the first 8 bits are reserved for engine use. Defaults to 65535.
     *
     * @type {number}
     */
    set mask(mask: number);
    /**
     * Gets the collision mask sets which groups this body collides with.
     *
     * @type {number}
     */
    get mask(): number;
    /**
     * Sets the mass of the body. This is only relevant for {@link BODYTYPE_DYNAMIC} bodies, other
     * types have infinite mass. Defaults to 1.
     *
     * @type {number}
     */
    set mass(mass: number);
    /**
     * Gets the mass of the body.
     *
     * @type {number}
     */
    get mass(): number;
    /**
     * Sets the value that controls the amount of energy lost when two rigid bodies collide. The
     * calculation multiplies the restitution values for both colliding bodies. A multiplied value
     * of 0 means that all energy is lost in the collision while a value of 1 means that no energy
     * is lost. Should be set in the range 0 to 1. Defaults to 0.
     *
     * @type {number}
     */
    set restitution(restitution: number);
    /**
     * Gets the value that controls the amount of energy lost when two rigid bodies collide.
     *
     * @type {number}
     */
    get restitution(): number;
    /**
     * Sets the torsional friction orthogonal to the contact point. Defaults to 0.
     *
     * @type {number}
     */
    set rollingFriction(friction: number);
    /**
     * Gets the torsional friction orthogonal to the contact point.
     *
     * @type {number}
     */
    get rollingFriction(): number;
    /**
     * Sets the rigid body type determines how the body is simulated. Can be:
     *
     * - {@link BODYTYPE_STATIC}: infinite mass and cannot move.
     * - {@link BODYTYPE_DYNAMIC}: simulated according to applied forces.
     * - {@link BODYTYPE_KINEMATIC}: infinite mass and does not respond to forces (can only be
     * moved by setting the position and rotation of component's {@link Entity}).
     *
     * Defaults to {@link BODYTYPE_STATIC}.
     *
     * @type {BODYTYPE_DYNAMIC|BODYTYPE_KINEMATIC|BODYTYPE_STATIC}
     */
    set type(type: "dynamic" | "kinematic" | "static");
    /**
     * Gets the rigid body type determines how the body is simulated.
     *
     * @type {BODYTYPE_DYNAMIC|BODYTYPE_KINEMATIC|BODYTYPE_STATIC}
     */
    get type(): "dynamic" | "kinematic" | "static";
    /**
     * If the Entity has a Collision shape attached then create a rigid body using this shape. This
     * method destroys the existing body.
     *
     * @private
     */
    private createBody;
    /**
     * Returns true if the rigid body is currently actively being simulated. I.e. Not 'sleeping'.
     *
     * @returns {boolean} True if the body is active.
     */
    isActive(): boolean;
    /**
     * Forcibly activate the rigid body simulation. Only affects rigid bodies of type
     * {@link BODYTYPE_DYNAMIC}.
     */
    activate(): void;
    /**
     * Add a body to the simulation.
     *
     * @ignore
     */
    enableSimulation(): void;
    /**
     * Remove a body from the simulation.
     *
     * @ignore
     */
    disableSimulation(): void;
    /**
     * Apply a force to the body at a point. By default, the force is applied at the origin of the
     * body. However, the force can be applied at an offset this point by specifying a world space
     * vector from the body's origin to the point of application.
     *
     * @overload
     * @param {number} x - X-component of the force in world space.
     * @param {number} y - Y-component of the force in world space.
     * @param {number} z - Z-component of the force in world space.
     * @param {number} [px] - X-component of the relative point at which to apply the force in
     * world space.
     * @param {number} [py] - Y-component of the relative point at which to apply the force in
     * world space.
     * @param {number} [pz] - Z-component of the relative point at which to apply the force in
     * world space.
     * @returns {void}
     * @example
     * // Apply an approximation of gravity at the body's center
     * this.entity.rigidbody.applyForce(0, -10, 0);
     * @example
     * // Apply an approximation of gravity at 1 unit down the world Z from the center of the body
     * this.entity.rigidbody.applyForce(0, -10, 0, 0, 0, 1);
     */
    applyForce(x: number, y: number, z: number, px?: number, py?: number, pz?: number): void;
    /**
     * Apply a force to the body at a point. By default, the force is applied at the origin of the
     * body. However, the force can be applied at an offset this point by specifying a world space
     * vector from the body's origin to the point of application.
     *
     * @overload
     * @param {Vec3} force - Vector representing the force in world space.
     * @param {Vec3} [relativePoint] - Optional vector representing the relative point at which to
     * apply the force in world space.
     * @returns {void}
     * @example
     * // Calculate a force vector pointing in the world space direction of the entity
     * const force = this.entity.forward.clone().mulScalar(100);
     *
     * // Apply the force at the body's center
     * this.entity.rigidbody.applyForce(force);
     * @example
     * // Apply a force at some relative offset from the body's center
     * // Calculate a force vector pointing in the world space direction of the entity
     * const force = this.entity.forward.clone().mulScalar(100);
     *
     * // Calculate the world space relative offset
     * const relativePoint = new pc.Vec3();
     * const childEntity = this.entity.findByName('Engine');
     * relativePoint.sub2(childEntity.getPosition(), this.entity.getPosition());
     *
     * // Apply the force
     * this.entity.rigidbody.applyForce(force, relativePoint);
     */
    applyForce(force: Vec3, relativePoint?: Vec3): void;
    /**
     * Apply torque (rotational force) to the body.
     *
     * @overload
     * @param {number} x - The x-component of the torque force in world space.
     * @param {number} y - The y-component of the torque force in world space.
     * @param {number} z - The z-component of the torque force in world space.
     * @returns {void}
     * @example
     * entity.rigidbody.applyTorque(0, 10, 0);
     */
    applyTorque(x: number, y: number, z: number): void;
    /**
     * Apply torque (rotational force) to the body.
     *
     * @overload
     * @param {Vec3} torque - Vector representing the torque force in world space.
     * @returns {void}
     * @example
     * const torque = new pc.Vec3(0, 10, 0);
     * entity.rigidbody.applyTorque(torque);
     */
    applyTorque(torque: Vec3): void;
    /**
     * Apply an impulse (instantaneous change of velocity) to the body at a point.
     *
     * @overload
     * @param {number} x - X-component of the impulse in world space.
     * @param {number} y - Y-component of the impulse in world space.
     * @param {number} z - Z-component of the impulse in world space.
     * @param {number} [px] - X-component of the point at which to apply the impulse in the local
     * space of the entity.
     * @param {number} [py] - Y-component of the point at which to apply the impulse in the local
     * space of the entity.
     * @param {number} [pz] - Z-component of the point at which to apply the impulse in the local
     * space of the entity.
     * @returns {void}
     * @example
     * // Apply an impulse along the world space positive y-axis at the entity's position.
     * entity.rigidbody.applyImpulse(0, 10, 0);
     * @example
     * // Apply an impulse along the world space positive y-axis at 1 unit down the positive
     * // z-axis of the entity's local space.
     * entity.rigidbody.applyImpulse(0, 10, 0, 0, 0, 1);
     */
    applyImpulse(x: number, y: number, z: number, px?: number, py?: number, pz?: number): void;
    /**
     * Apply an impulse (instantaneous change of velocity) to the body at a point.
     *
     * @overload
     * @param {Vec3} impulse - Vector representing the impulse in world space.
     * @param {Vec3} [relativePoint] - Optional vector representing the relative point at which to
     * apply the impulse in the local space of the entity.
     * @returns {void}
     * @example
     * // Apply an impulse along the world space positive y-axis at the entity's position.
     * const impulse = new pc.Vec3(0, 10, 0);
     * entity.rigidbody.applyImpulse(impulse);
     * @example
     * // Apply an impulse along the world space positive y-axis at 1 unit down the positive
     * // z-axis of the entity's local space.
     * const impulse = new pc.Vec3(0, 10, 0);
     * const relativePoint = new pc.Vec3(0, 0, 1);
     * entity.rigidbody.applyImpulse(impulse, relativePoint);
     */
    applyImpulse(impulse: Vec3, relativePoint?: Vec3): void;
    /**
     * Apply a torque impulse (rotational force applied instantaneously) to the body.
     *
     * @overload
     * @param {number} x - X-component of the torque impulse in world space.
     * @param {number} y - Y-component of the torque impulse in world space.
     * @param {number} z - Z-component of the torque impulse in world space.
     * @returns {void}
     * @example
     * entity.rigidbody.applyTorqueImpulse(0, 10, 0);
     */
    applyTorqueImpulse(x: number, y: number, z: number): void;
    /**
     * Apply a torque impulse (rotational force applied instantaneously) to the body.
     *
     * @overload
     * @param {Vec3} torque - Vector representing the torque impulse in world space.
     * @returns {void}
     * @example
     * const torque = new pc.Vec3(0, 10, 0);
     * entity.rigidbody.applyTorqueImpulse(torque);
     */
    applyTorqueImpulse(torque: Vec3): void;
    /**
     * Returns true if the rigid body is of type {@link BODYTYPE_STATIC}.
     *
     * @returns {boolean} True if static.
     */
    isStatic(): boolean;
    /**
     * Returns true if the rigid body is of type {@link BODYTYPE_STATIC} or {@link BODYTYPE_KINEMATIC}.
     *
     * @returns {boolean} True if static or kinematic.
     */
    isStaticOrKinematic(): boolean;
    /**
     * Returns true if the rigid body is of type {@link BODYTYPE_KINEMATIC}.
     *
     * @returns {boolean} True if kinematic.
     */
    isKinematic(): boolean;
    /**
     * Writes an entity transform into an Ammo.btTransform but ignoring scale.
     *
     * @param {object} transform - The ammo transform to write the entity transform to.
     * @private
     */
    private _getEntityTransform;
    /**
     * Set the rigid body transform to be the same as the Entity transform. This must be called
     * after any Entity transformation functions (e.g. {@link Entity#setPosition}) are called in
     * order to update the rigid body to match the Entity.
     *
     * @private
     */
    private syncEntityToBody;
    /**
     * Sets an entity's transform to match that of the world transformation matrix of a dynamic
     * rigid body's motion state.
     *
     * @private
     */
    private _updateDynamic;
    /**
     * Writes the entity's world transformation matrix into the motion state of a kinematic body.
     *
     * @private
     */
    private _updateKinematic;
    /**
     * Teleport an entity to a new world space position, optionally setting orientation. This
     * function should only be called for rigid bodies that are dynamic.
     *
     * @overload
     * @param {number} x - X-coordinate of the new world space position.
     * @param {number} y - Y-coordinate of the new world space position.
     * @param {number} z - Z-coordinate of the new world space position.
     * @param {number} [rx] - X-rotation of the world space Euler angles in degrees.
     * @param {number} [ry] - Y-rotation of the world space Euler angles in degrees.
     * @param {number} [rz] - Z-rotation of the world space Euler angles in degrees.
     * @returns {void}
     * @example
     * // Teleport the entity to the origin
     * entity.rigidbody.teleport(0, 0, 0);
     * @example
     * // Teleport the entity to world space coordinate [1, 2, 3] and reset orientation
     * entity.rigidbody.teleport(1, 2, 3, 0, 0, 0);
     */
    teleport(x: number, y: number, z: number, rx?: number, ry?: number, rz?: number): void;
    /**
     * Teleport an entity to a new world space position, optionally setting orientation. This
     * function should only be called for rigid bodies that are dynamic.
     *
     * @overload
     * @param {Vec3} position - Vector holding the new world space position.
     * @param {Vec3} [angles] - Vector holding the new world space Euler angles in degrees.
     * @returns {void}
     * @example
     * // Teleport the entity to the origin
     * entity.rigidbody.teleport(pc.Vec3.ZERO);
     * @example
     * // Teleport the entity to world space coordinate [1, 2, 3] and reset orientation
     * const position = new pc.Vec3(1, 2, 3);
     * entity.rigidbody.teleport(position, pc.Vec3.ZERO);
     */
    teleport(position: Vec3, angles?: Vec3): void;
    /**
     * Teleport an entity to a new world space position, optionally setting orientation. This
     * function should only be called for rigid bodies that are dynamic.
     *
     * @overload
     * @param {Vec3} position - Vector holding the new world space position.
     * @param {Quat} [rotation] - Quaternion holding the new world space rotation.
     * @returns {void}
     * @example
     * // Teleport the entity to the origin
     * entity.rigidbody.teleport(pc.Vec3.ZERO);
     * @example
     * // Teleport the entity to world space coordinate [1, 2, 3] and reset orientation
     * const position = new pc.Vec3(1, 2, 3);
     * entity.rigidbody.teleport(position, pc.Quat.IDENTITY);
     */
    teleport(position: Vec3, rotation?: Quat): void;
}

declare class ScreenComponentData {
    enabled: boolean;
}

/**
 * A ordered list-type data structure that can provide item look up by key and can also return a list.
 *
 * @ignore
 */
declare class IndexedList {
    /**
     * @type {object[]}
     * @private
     */
    private _list;
    /**
     * @type {Object<string, number>}
     * @private
     */
    private _index;
    /**
     * Add a new item into the list with a index key.
     *
     * @param {string} key - Key used to look up item in index.
     * @param {object} item - Item to be stored.
     */
    push(key: string, item: object): void;
    /**
     * Test whether a key has been added to the index.
     *
     * @param {string} key - The key to test.
     * @returns {boolean} Returns true if key is in the index, false if not.
     */
    has(key: string): boolean;
    /**
     * Return the item indexed by a key.
     *
     * @param {string} key - The key of the item to retrieve.
     * @returns {object|null} The item stored at key. Returns null if key is not in the index.
     */
    get(key: string): object | null;
    /**
     * Remove the item indexed by key from the list.
     *
     * @param {string} key - The key at which to remove the item.
     * @returns {boolean} Returns true if the key exists and an item was removed, returns false if
     * no item was removed.
     */
    remove(key: string): boolean;
    /**
     * Returns the list of items.
     *
     * @returns {object[]} The list of items.
     */
    list(): object[];
    /**
     * Remove all items from the list.
     */
    clear(): void;
}

/**
 * Manages creation of {@link ScreenComponent}s.
 *
 * @category User Interface
 */
declare class ScreenComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ScreenComponent;
    DataType: typeof ScreenComponentData;
    schema: string[];
    windowResolution: Vec2;
    _drawOrderSyncQueue: IndexedList;
    initializeComponentData(component: any, data: any, properties: any): void;
    _updateDescendantElements(entity: any, screenEntity: any): void;
    _onUpdate(dt: any): void;
    _onResize(width: any, height: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onRemoveComponent(entity: any, component: any): void;
    processDrawOrderSyncQueue(): void;
    queueDrawOrderSync(id: any, fn: any, scope: any): void;
}

/**
 * A ScreenComponent defines a rectangular area where user interfaces can be constructed. Screens
 * can either be 2D (screen space) or 3D (world space) - see {@link screenSpace}. It is possible to
 * create an {@link Entity} hierarchy underneath an Entity with a ScreenComponent to create complex
 * user interfaces using the following components:
 *
 * - {@link ButtonComponent}
 * - {@link ElementComponent}
 * - {@link LayoutChildComponent}
 * - {@link LayoutGroupComponent}
 * - {@link ScrollbarComponent}
 * - {@link ScrollViewComponent}
 *
 * You should never need to use the ScreenComponent constructor directly. To add a ScreenComponent
 * to an {@link Entity}, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('screen', {
 *     referenceResolution: new pc.Vec2(1280, 720),
 *     screenSpace: false
 * });
 * ```
 *
 * Once the ScreenComponent is added to the entity, you can access it via the {@link Entity#screen}
 * property:
 *
 * ```javascript
 * entity.screen.scaleBlend = 0.6; // Set the screen's scale blend to 0.6
 *
 * console.log(entity.screen.scaleBlend); // Get the screen's scale blend and print it
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Screen Space Screen](https://playcanvas.github.io/#/user-interface/text)
 * - [World Space Screen](https://playcanvas.github.io/#/user-interface/world-ui)
 *
 * @hideconstructor
 * @category User Interface
 */
declare class ScreenComponent extends Component {
    /**
     * Create a new ScreenComponent.
     *
     * @param {ScreenComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ScreenComponentSystem, entity: Entity);
    _resolution: Vec2;
    _referenceResolution: Vec2;
    _scaleMode: string;
    scale: number;
    _scaleBlend: number;
    _priority: number;
    _screenSpace: boolean;
    /**
     * If true, then elements inside this screen will be not be rendered when outside of the
     * screen (only valid when screenSpace is true).
     *
     * @type {boolean}
     */
    cull: boolean;
    _screenMatrix: Mat4;
    _elements: Set<any>;
    /**
     * Set the drawOrder of each child {@link ElementComponent} so that ElementComponents which are
     * last in the hierarchy are rendered on top. Draw Order sync is queued and will be updated by
     * the next update loop.
     */
    syncDrawOrder(): void;
    _recurseDrawOrderSync(e: any, i: any): any;
    _processDrawOrderSync(): void;
    _calcProjectionMatrix(): void;
    _updateScale(): void;
    _calcScale(resolution: any, referenceResolution: any): number;
    _onResize(width: any, height: any): void;
    /**
     * Sets the width and height of the ScreenComponent. When {@link screenSpace} is true, the
     * resolution will always be equal to {@link GraphicsDevice#width} by
     * {@link GraphicsDevice#height}.
     *
     * @type {Vec2}
     */
    set resolution(value: Vec2);
    /**
     * Gets the width and height of the ScreenComponent.
     *
     * @type {Vec2}
     */
    get resolution(): Vec2;
    _bindElement(element: any): void;
    _unbindElement(element: any): void;
    onRemove(): void;
    /**
     * Sets the resolution that the ScreenComponent is designed for. This is only taken into
     * account when {@link screenSpace} is true and {@link scaleMode} is {@link SCALEMODE_BLEND}.
     * If the actual resolution is different, then the ScreenComponent will be scaled according to
     * the {@link scaleBlend} value.
     *
     * @type {Vec2}
     */
    set referenceResolution(value: Vec2);
    /**
     * Gets the resolution that the ScreenComponent is designed for.
     *
     * @type {Vec2}
     */
    get referenceResolution(): Vec2;
    /**
     * Sets whether the ScreenComponent will render its child {@link ElementComponent}s in screen
     * space instead of world space. Enable this to create 2D user interfaces. Defaults to false.
     *
     * @type {boolean}
     */
    set screenSpace(value: boolean);
    /**
     * Gets whether the ScreenComponent will render its child {@link ElementComponent}s in screen
     * space instead of world space.
     *
     * @type {boolean}
     */
    get screenSpace(): boolean;
    /**
     * Sets the scale mode. Can either be {@link SCALEMODE_NONE} or {@link SCALEMODE_BLEND}. See
     * the description of {@link referenceResolution} for more information. Defaults to
     * {@link SCALEMODE_NONE}.
     *
     * @type {string}
     */
    set scaleMode(value: string);
    /**
     * Gets the scale mode.
     *
     * @type {string}
     */
    get scaleMode(): string;
    /**
     * Sets the scale blend. This is a value between 0 and 1 that is used when {@link scaleMode} is
     * equal to {@link SCALEMODE_BLEND}. Scales the ScreenComponent with width as a reference (when
     * value is 0), the height as a reference (when value is 1) or anything in between. Defaults to
     * 0.5.
     *
     * @type {number}
     */
    set scaleBlend(value: number);
    /**
     * Gets the scale blend.
     *
     * @type {number}
     */
    get scaleBlend(): number;
    /**
     * Sets the screen's render priority. Priority determines the order in which ScreenComponents
     * in the same layer are rendered. Number must be an integer between 0 and 127. Priority is set
     * into the top 8 bits of the {@link ElementComponent#drawOrder} property. Defaults to 0.
     *
     * @type {number}
     */
    set priority(value: number);
    /**
     * Gets the screen's render priority.
     *
     * @type {number}
     */
    get priority(): number;
}

/**
 * Helper class used to hold an array of items in a specific order. This array is safe to modify
 * while we loop through it. The class assumes that it holds objects that need to be sorted based
 * on one of their fields.
 *
 * @ignore
 */
declare class SortedLoopArray {
    /**
     * Create a new SortedLoopArray instance.
     *
     * @param {object} args - Arguments.
     * @param {string} args.sortBy - The name of the field that each element in the array is going
     * to be sorted by.
     * @example
     * const array = new pc.SortedLoopArray({ sortBy: 'priority' });
     * array.insert(item); // adds item to the right slot based on item.priority
     * array.append(item); // adds item to the end of the array
     * array.remove(item); // removes item from array
     * for (array.loopIndex = 0; array.loopIndex < array.length; array.loopIndex++) {
     *   // do things with array elements
     *   // safe to remove and add elements into the array while looping
     * }
     */
    constructor(args: {
        sortBy: string;
    });
    /**
     * The internal array that holds the actual array elements.
     *
     * @type {object[]}
     */
    items: object[];
    /**
     * The number of elements in the array.
     *
     * @type {number}
     */
    length: number;
    /**
     * The current index used to loop through the array. This gets modified if we add or remove
     * elements from the array while looping. See the example to see how to loop through this
     * array.
     *
     * @type {number}
     */
    loopIndex: number;
    /** @private */
    private _sortBy;
    /** @private */
    private _sortHandler;
    /**
     * Searches for the right spot to insert the specified item.
     *
     * @param {object} item - The item.
     * @returns {number} The index where to insert the item.
     * @private
     */
    private _binarySearch;
    _doSort(a: any, b: any): number;
    /**
     * Inserts the specified item into the array at the right index based on the 'sortBy' field
     * passed into the constructor. This also adjusts the loopIndex accordingly.
     *
     * @param {object} item - The item to insert.
     */
    insert(item: object): void;
    /**
     * Appends the specified item to the end of the array. Faster than insert() as it does not
     * binary search for the right index. This also adjusts the loopIndex accordingly.
     *
     * @param {object} item - The item to append.
     */
    append(item: object): void;
    /**
     * Removes the specified item from the array.
     *
     * @param {object} item - The item to remove.
     */
    remove(item: object): void;
    /**
     * Sorts elements in the array based on the 'sortBy' field passed into the constructor. This
     * also updates the loopIndex if we are currently looping.
     *
     * WARNING: Be careful if you are sorting while iterating because if after sorting the array
     * element that you are currently processing is moved behind other elements then you might end
     * up iterating over elements more than once!
     */
    sort(): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 * @import { Entity } from '../entity.js'
 */
/**
 * The `Script` class is the fundamental base class for all scripts within PlayCanvas. It provides
 * the minimal interface required for a script to be compatible with both the Engine and the
 * Editor.
 *
 * At its core, a script is simply a collection of methods that are called at various points in the
 * Engine's lifecycle. These methods are:
 *
 * - `Script#initialize` - Called once when the script is initialized.
 * - `Script#postInitialize` - Called once after all scripts have been initialized.
 * - `Script#update` - Called every frame, if the script is enabled.
 * - `Script#postUpdate` - Called every frame, after all scripts have been updated.
 * - `Script#swap` - Called when a script is redefined.
 *
 * These methods are entirely optional, but provide a useful way to manage the lifecycle of a
 * script and perform any necessary setup and cleanup.
 *
 * Below is a simple example of a script that rotates an entity every frame.
 * @example
 * ```javascript
 * import { Script } from 'playcanvas';
 *
 * export class Rotator extends Script {
 *     static scriptName = 'rotator';
 *
 *     update(dt) {
 *         this.entity.rotateLocal(0, 1, 0);
 *     }
 * }
 * ```
 *
 * When this script is attached to an entity, the update will be called every frame, slowly
 * rotating the entity around the Y-axis.
 *
 * For more information on how to create scripts, see the [Scripting Overview](https://developer.playcanvas.com/user-manual/scripting/).
 *
 * @category Script
 */
declare class Script extends EventHandler {
    /**
     * Fired when a script instance becomes enabled.
     *
     * @event
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('enable', () => {
     *             // Script Instance is now enabled
     *         });
     *     }
     * };
     */
    static EVENT_ENABLE: string;
    /**
     * Fired when a script instance becomes disabled.
     *
     * @event
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('disable', () => {
     *             // Script Instance is now disabled
     *         });
     *     }
     * };
     */
    static EVENT_DISABLE: string;
    /**
     * Fired when a script instance changes state to enabled or disabled. The handler is passed a
     * boolean parameter that states whether the script instance is now enabled or disabled.
     *
     * @event
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('state', (enabled) => {
     *             console.log(`Script Instance is now ${enabled ? 'enabled' : 'disabled'}`);
     *         });
     *     }
     * };
     */
    static EVENT_STATE: string;
    /**
     * Fired when a script instance is destroyed and removed from component.
     *
     * @event
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('destroy', () => {
     *             // no longer part of the entity
     *             // this is a good place to clean up allocated resources used by the script
     *         });
     *     }
     * };
     */
    static EVENT_DESTROY: string;
    /**
     * Fired when script attributes have changed. This event is available in two forms. They are as
     * follows:
     *
     * 1. `attr` - Fired for any attribute change. The handler is passed the name of the attribute
     * that changed, the value of the attribute before the change and the value of the attribute
     * after the change.
     * 2. `attr:[name]` - Fired for a specific attribute change. The handler is passed the value of
     * the attribute before the change and the value of the attribute after the change.
     *
     * @event
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('attr', (name, newValue, oldValue) => {
     *             console.log(`Attribute '${name}' changed from '${oldValue}' to '${newValue}'`);
     *         });
     *     }
     * };
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('attr:speed', (newValue, oldValue) => {
     *             console.log(`Attribute 'speed' changed from '${oldValue}' to '${newValue}'`);
     *         });
     *     }
     * };
     */
    static EVENT_ATTR: string;
    /**
     * Fired when a script instance had an exception. The script instance will be automatically
     * disabled. The handler is passed an Error object containing the details of the
     * exception and the name of the method that threw the exception.
     *
     * @event
     * @example
     * export class PlayerController extends Script {
     *     static scriptName = 'playerController';
     *     initialize() {
     *         this.on('error', (err, method) => {
     *             // caught an exception
     *             console.log(err.stack);
     *         });
     *     }
     * };
     */
    static EVENT_ERROR: string;
    /**
     * @type {string|null}
     * @private
     */
    private static __name;
    /**
     * @param {*} constructorFn - The constructor function of the script type.
     * @returns {string} The script name.
     * @private
     */
    private static __getScriptName;
    /**
     * Sets the unique name of the script.
     *
     * @type {string|null}
     */
    static set scriptName(value: string | null);
    /**
     * Gets the unique name of the script.
     *
     * @type {string|null}
     */
    static get scriptName(): string | null;
    /**
     * Create a new Script instance.
     *
     * @param {object} args - The input arguments object.
     * @param {AppBase} args.app - The {@link AppBase} that is running the script.
     * @param {Entity} args.entity - The {@link Entity} that the script is attached to.
     */
    constructor(args: {
        app: AppBase;
        entity: Entity;
    });
    /**
     * The {@link AppBase} that the instance of this script belongs to.
     *
     * @type {AppBase}
     */
    app: AppBase;
    /**
     * The {@link Entity} that the instance of this script belongs to.
     *
     * @type {Entity}
     */
    entity: Entity;
    /** @private */
    private _enabled;
    /** @private */
    private _enabledOld;
    /** @private */
    private _initialized;
    /** @private */
    private _postInitialized;
    /** @private */
    private __destroyed;
    /** @private */
    private __scriptType;
    /**
     * The order in the script component that the methods of this script instance will run
     * relative to other script instances in the component.
     *
     * @type {number}
     * @private
     */
    private __executionOrder;
    /**
     * True if the instance of this script is in running state. False when script is not running,
     * because the Entity or any of its parents are disabled or the {@link ScriptComponent} is
     * disabled or the Script Instance is disabled. When disabled, no update methods will be called
     * on each tick. `initialize` and `postInitialize` methods will run once when the script
     * instance is in the `enabled` state during an app tick.
     *
     * @type {boolean}
     */
    set enabled(value: boolean);
    get enabled(): boolean;
    /**
     * @typedef {object} ScriptInitializationArgs
     * @property {boolean} [enabled] - True if the script instance is in running state.
     * @property {AppBase} app - The {@link AppBase} that is running the script.
     * @property {Entity} entity - The {@link Entity} that the script is attached to.
     */
    /**
     * @param {ScriptInitializationArgs} args - The input arguments object.
     * @protected
     */
    protected initScript(args: {
        /**
         * - True if the script instance is in running state.
         */
        enabled?: boolean;
        /**
         * - The {@link AppBase} that is running the script.
         */
        app: AppBase;
        /**
         * - The {@link Entity} that the script is attached to.
         */
        entity: Entity;
    }): void;
}

/**
 * A WebGL implementation of the Buffer.
 *
 * @ignore
 */
declare class WebglBuffer {
    bufferId: any;
    destroy(device: any): void;
    get initialized(): boolean;
    loseContext(): void;
    unlock(device: any, usage: any, target: any, storage: any): void;
}

/**
 * A WebGL implementation of the VertexBuffer.
 *
 * @ignore
 */
declare class WebglVertexBuffer extends WebglBuffer {
    vao: any;
    unlock(vertexBuffer: any): void;
}

/**
 * A WebGL implementation of the IndexBuffer.
 *
 * @ignore
 */
declare class WebglIndexBuffer extends WebglBuffer {
    constructor(indexBuffer: any);
    glFormat: any;
    unlock(indexBuffer: any): void;
}

/**
 * A WebGL implementation of the Shader.
 *
 * @ignore
 */
declare class WebglShader {
    constructor(shader: any);
    compileDuration: number;
    /**
     * Free the WebGL resources associated with a shader.
     *
     * @param {Shader} shader - The shader to free.
     */
    destroy(shader: Shader): void;
    glProgram: WebGLProgram;
    init(): void;
    uniforms: any[];
    samplers: any[];
    attributes: any[];
    glVertexShader: WebGLShader;
    glFragmentShader: WebGLShader;
    /**
     * Dispose the shader when the context has been lost.
     */
    loseContext(): void;
    /**
     * Restore shader after the context has been obtained.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {Shader} shader - The shader to restore.
     */
    restoreContext(device: WebglGraphicsDevice, shader: Shader): void;
    /**
     * Compile shader programs.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {Shader} shader - The shader to compile.
     */
    compile(device: WebglGraphicsDevice, shader: Shader): void;
    /**
     * Link shader programs. This is called at a later stage, to allow many shaders to compile in parallel.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {Shader} shader - The shader to compile.
     */
    link(device: WebglGraphicsDevice, shader: Shader): void;
    /**
     * Compiles an individual shader.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {string} src - The shader source code.
     * @param {boolean} isVertexShader - True if the shader is a vertex shader, false if it is a
     * fragment shader.
     * @returns {WebGLShader|null} The compiled shader, or null if the device is lost.
     * @private
     */
    private _compileShaderSource;
    /**
     * Link the shader, and extract its attributes and uniform information.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {Shader} shader - The shader to query.
     * @returns {boolean} True if the shader was successfully queried and false otherwise.
     */
    finalize(device: WebglGraphicsDevice, shader: Shader): boolean;
    /**
     * Check the compilation status of a shader.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {Shader} shader - The shader to query.
     * @param {WebGLShader} glShader - The WebGL shader.
     * @param {string} source - The shader source code.
     * @param {string} shaderType - The shader type. Can be 'vertex' or 'fragment'.
     * @returns {boolean} True if the shader compiled successfully, false otherwise.
     * @private
     */
    private _isCompiled;
    /**
     * Check the linking status of a shader.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @returns {boolean} True if the shader is already linked, false otherwise. Note that unless the
     * device supports the KHR_parallel_shader_compile extension, this will always return true.
     */
    isLinked(device: WebglGraphicsDevice): boolean;
    /**
     * Truncate the WebGL shader compilation log to just include the error line plus the 5 lines
     * before and after it.
     *
     * @param {string} src - The shader source code.
     * @param {string} infoLog - The info log returned from WebGL on a failed shader compilation.
     * @returns {Array} An array where the first element is the 10 lines of code around the first
     * detected error, and the second element an object storing the error message, line number and
     * complete shader source.
     * @private
     */
    private _processError;
}

/**
 * WebGL implementation of DrawCommands.
 *
 * @ignore
 */
declare class WebglDrawCommands {
    /**
     * @param {number} indexSizeBytes - Size of index in bytes (1, 2 or 4). 0 for non-indexed.
     */
    constructor(indexSizeBytes: number);
    /** @type {number} */
    indexSizeBytes: number;
    /** @type {Int32Array|null} */
    glCounts: Int32Array | null;
    /** @type {Int32Array|null} */
    glOffsetsBytes: Int32Array | null;
    /** @type {Int32Array|null} */
    glInstanceCounts: Int32Array | null;
    /**
     * Allocate SoA arrays for multi-draw.
     * @param {number} maxCount - Number of sub-draws.
     */
    allocate(maxCount: number): void;
    /**
     * Write a single draw entry.
     * @param {number} i - Draw index.
     * @param {number} indexOrVertexCount - Count of indices/vertices.
     * @param {number} instanceCount - Instance count.
     * @param {number} firstIndexOrVertex - First index/vertex.
     */
    add(i: number, indexOrVertexCount: number, instanceCount: number, firstIndexOrVertex: number): void;
    /**
     * Calculate total primitives for stats (profiler builds only).
     * @param {number} count - Number of active draws.
     * @returns {number} Total primitive count.
     */
    update(count: number): number;
}

/**
 * A WebGL implementation of the Texture.
 *
 * @ignore
 */
declare class WebglTexture {
    constructor(texture: any);
    _glTexture: any;
    _glTarget: any;
    _glFormat: any;
    _glInternalFormat: any;
    _glPixelType: any;
    _glCreated: any;
    dirtyParameterFlags: number;
    /** @type {Texture} */
    texture: Texture;
    destroy(device: any): void;
    loseContext(): void;
    propertyChanged(flag: any): void;
    initialize(device: any, texture: any): void;
    /**
     * @param {WebglGraphicsDevice} device - The device.
     * @param {Texture} texture - The texture to update.
     */
    upload(device: WebglGraphicsDevice, texture: Texture): void;
    /**
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {Texture} texture - The texture.
     */
    uploadImmediate(device: WebglGraphicsDevice, texture: Texture): void;
    read(x: any, y: any, width: any, height: any, options: any): Promise<any>;
    write(x: any, y: any, width: any, height: any, data: any): any;
}

/**
 * A WebGL implementation of the RenderTarget.
 *
 * @ignore
 */
declare class WebglRenderTarget {
    _glFrameBuffer: any;
    _glDepthBuffer: any;
    _glResolveFrameBuffer: any;
    /**
     * A list of framebuffers created When MSAA and MRT are used together, one for each color buffer.
     * This allows color buffers to be resolved separately.
     *
     * @type {FramebufferPair[]}
     */
    colorMrtFramebuffers: FramebufferPair[];
    _glMsaaColorBuffers: any[];
    _glMsaaDepthBuffer: any;
    /**
     * Key used to store _glMsaaDepthBuffer in the cache.
     */
    msaaDepthBufferKey: any;
    /**
     * The supplied single-sampled framebuffer for rendering. Undefined represents no supplied
     * framebuffer. Null represents the default framebuffer. A value represents a user-supplied
     * framebuffer.
     */
    suppliedColorFramebuffer: any;
    _isInitialized: boolean;
    destroy(device: any): void;
    get initialized(): boolean;
    init(device: any, target: any): void;
    _createMsaaMrtFramebuffers(device: any, target: any, colorBufferCount: any): void;
    /**
     * Checks the completeness status of the currently bound WebGLFramebuffer object.
     *
     * @param {WebglGraphicsDevice} device - The graphics device.
     * @param {RenderTarget} target - The render target.
     * @param {string} [type] - An optional type string to append to the error message.
     * @private
     */
    private _checkFbo;
    loseContext(): void;
    internalResolve(device: any, src: any, dst: any, target: any, mask: any): void;
    resolve(device: any, target: any, color: any, depth: any): void;
}
/**
 * A private class representing a pair of framebuffers, when MSAA is used.
 *
 * @ignore
 */
declare class FramebufferPair {
    /**
     * @param {WebGLFramebuffer} msaaFB - Multi-sampled rendering framebuffer.
     * @param {WebGLFramebuffer} resolveFB - Single-sampled resolve framebuffer.
     */
    constructor(msaaFB: WebGLFramebuffer, resolveFB: WebGLFramebuffer);
    /**
     * Multi-sampled rendering framebuffer.
     *
     * @type {WebGLFramebuffer|null}
     */
    msaaFB: WebGLFramebuffer | null;
    /**
     * Single-sampled resolve framebuffer.
     *
     * @type {WebGLFramebuffer|null}
     */
    resolveFB: WebGLFramebuffer | null;
    /**
     * @param {WebGLRenderingContext} gl - The WebGL rendering context.
     */
    destroy(gl: WebGLRenderingContext): void;
}

/**
 * @import { UploadStream } from '../upload-stream.js'
 * @import { Texture } from '../texture.js'
 */
/**
 * WebGL implementation of UploadStream.
 * Can use either simple direct texture uploads or optimized PBO strategy with orphaning.
 *
 * @ignore
 */
declare class WebglUploadStream {
    /**
     * @param {UploadStream} uploadStream - The upload stream.
     */
    constructor(uploadStream: UploadStream);
    /**
     * Available PBOs ready for immediate use.
     *
     * @type {Array<{pbo: WebGLBuffer, size: number}>}
     */
    availablePBOs: Array<{
        pbo: WebGLBuffer;
        size: number;
    }>;
    /**
     * PBOs currently in use by the GPU.
     *
     * @type {Array<{pbo: WebGLBuffer, size: number, sync: WebGLSync}>}
     */
    pendingPBOs: Array<{
        pbo: WebGLBuffer;
        size: number;
        sync: WebGLSync;
    }>;
    uploadStream: UploadStream;
    useSingleBuffer: boolean;
    destroy(): void;
    /**
     * Handles device lost event by clearing all PBO and sync object arrays.
     *
     * @protected
     */
    protected _onDeviceLost(): void;
    /**
     * Update PBOs: poll completed ones and remove undersized buffers.
     *
     * @param {number} minByteSize - Minimum size for buffers to keep. Smaller buffers are destroyed.
     */
    update(minByteSize: number): void;
    /**
     * Upload data to a texture using PBOs (optimized) or direct upload (simple).
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload.
     * @param {Texture} target - The target texture.
     * @param {number} offset - The element offset in the target. Must be a multiple of texture width.
     * @param {number} size - The number of elements to upload. Must be a multiple of texture width.
     */
    upload(data: Uint8Array | Uint32Array | Float32Array, target: Texture, offset: number, size: number): void;
    /**
     * Direct texture upload (simple, blocking).
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload.
     * @param {Texture} target - The target texture.
     * @param {number} offset - The element offset in the target.
     * @param {number} size - The number of elements to upload.
     * @private
     */
    private uploadDirect;
    /**
     * PBO-based upload with orphaning (optimized, potentially non-blocking).
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload.
     * @param {import('../texture.js').Texture} target - The target texture.
     * @param {number} offset - The element offset in the target.
     * @param {number} size - The number of elements to upload.
     * @private
     */
    private uploadPBO;
}

/**
 * WebglGraphicsDevice extends the base {@link GraphicsDevice} to provide rendering capabilities
 * utilizing the WebGL 2.0 specification.
 *
 * @category Graphics
 */
declare class WebglGraphicsDevice extends GraphicsDevice {
    /**
     * Creates a new WebglGraphicsDevice instance.
     *
     * @param {HTMLCanvasElement} canvas - The canvas to which the graphics device will render.
     * @param {object} [options] - Options passed when creating the WebGL context.
     * @param {boolean} [options.alpha] - Boolean that indicates if the canvas contains an
     * alpha buffer. Defaults to true.
     * @param {boolean} [options.depth] - Boolean that indicates that the drawing buffer is
     * requested to have a depth buffer of at least 16 bits. Defaults to true.
     * @param {boolean} [options.stencil] - Boolean that indicates that the drawing buffer is
     * requested to have a stencil buffer of at least 8 bits. Defaults to true.
     * @param {boolean} [options.antialias] - Boolean that indicates whether or not to perform
     * anti-aliasing if possible. Defaults to true.
     * @param {boolean} [options.premultipliedAlpha] - Boolean that indicates that the page
     * compositor will assume the drawing buffer contains colors with pre-multiplied alpha.
     * Defaults to true.
     * @param {boolean} [options.preserveDrawingBuffer] - If the value is true the buffers will not
     * be cleared and will preserve their values until cleared or overwritten by the author.
     * Defaults to false.
     * @param {'default'|'high-performance'|'low-power'} [options.powerPreference] - A hint to the
     * user agent indicating what configuration of GPU is suitable for the WebGL context. Possible
     * values are:
     *
     * - 'default': Let the user agent decide which GPU configuration is most suitable. This is the
     * default value.
     * - 'high-performance': Prioritizes rendering performance over power consumption.
     * - 'low-power': Prioritizes power saving over rendering performance.
     *
     * Defaults to 'default'.
     * @param {boolean} [options.failIfMajorPerformanceCaveat] - Boolean that indicates if a
     * context will be created if the system performance is low or if no hardware GPU is available.
     * Defaults to false.
     * @param {boolean} [options.desynchronized] - Boolean that hints the user agent to reduce the
     * latency by desynchronizing the canvas paint cycle from the event loop. Defaults to false.
     * @param {boolean} [options.xrCompatible] - Boolean that hints to the user agent to use a
     * compatible graphics adapter for an immersive XR device.
     * @param {WebGL2RenderingContext} [options.gl] - The rendering context
     * to use. If not specified, a new context will be created.
     */
    constructor(canvas: HTMLCanvasElement, options?: {
        alpha?: boolean;
        depth?: boolean;
        stencil?: boolean;
        antialias?: boolean;
        premultipliedAlpha?: boolean;
        preserveDrawingBuffer?: boolean;
        powerPreference?: "default" | "high-performance" | "low-power";
        failIfMajorPerformanceCaveat?: boolean;
        desynchronized?: boolean;
        xrCompatible?: boolean;
        gl?: WebGL2RenderingContext;
    });
    /**
     * The WebGL2 context managed by the graphics device.
     *
     * @type {WebGL2RenderingContext}
     * @ignore
     */
    gl: WebGL2RenderingContext;
    /**
     * WebGLFramebuffer object that represents the backbuffer of the device for a rendering frame.
     * When null, this is a framebuffer created when the device was created, otherwise it is a
     * framebuffer supplied by the XR session.
     *
     * @ignore
     */
    _defaultFramebuffer: any;
    /**
     * True if the default framebuffer has changed since the last frame.
     *
     * @ignore
     */
    _defaultFramebufferChanged: boolean;
    _contextLostHandler: (event: any) => void;
    _contextRestoredHandler: () => void;
    forceDisableMultisampling: boolean;
    isWebGL2: boolean;
    _deviceType: string;
    _tempEnableSafariTextureUnitWorkaround: boolean;
    _tempMacChromeBlitFramebufferWorkaround: boolean;
    supportsImageBitmap: boolean;
    _samplerTypes: Set<35679 | 35682 | 36289 | 36293 | 36298 | 36299 | 36303 | 36306 | 36307 | 36311 | 35678 | 35680>;
    glAddress: (10497 | 33071 | 33648)[];
    glBlendEquation: (32775 | 32776 | 32774 | 32778 | 32779)[];
    glBlendFunctionColor: (0 | 1 | 768 | 769 | 770 | 771 | 772 | 773 | 774 | 775 | 776 | 32769 | 32770)[];
    glBlendFunctionAlpha: (0 | 1 | 768 | 769 | 770 | 771 | 772 | 773 | 774 | 775 | 776 | 32771 | 32772)[];
    glComparison: (512 | 513 | 514 | 515 | 516 | 517 | 518 | 519)[];
    glStencilOp: (0 | 7680 | 7681 | 7682 | 7683 | 5386 | 34055 | 34056)[];
    glClearFlag: number[];
    glCull: number[];
    glFrontFace: (2304 | 2305)[];
    glFilter: (9728 | 9729 | 9984 | 9985 | 9986 | 9987)[];
    glPrimitive: (0 | 2 | 1 | 3 | 4 | 5 | 6)[];
    glType: (5131 | 5120 | 5121 | 5122 | 5123 | 5124 | 5125 | 5126)[];
    pcUniformType: {};
    targetToSlot: {};
    commitFunction: {}[];
    constantTexSource: ScopeId;
    createBackbuffer(frameBuffer: any): void;
    updateBackbufferFormat(framebuffer: any): void;
    updateBackbuffer(): void;
    createVertexBufferImpl(vertexBuffer: any, format: any): WebglVertexBuffer;
    createIndexBufferImpl(indexBuffer: any): WebglIndexBuffer;
    createShaderImpl(shader: any): WebglShader;
    createDrawCommandImpl(drawCommands: any): WebglDrawCommands;
    createTextureImpl(texture: any): WebglTexture;
    createRenderTargetImpl(renderTarget: any): WebglRenderTarget;
    createUploadStreamImpl(uploadStream: any): WebglUploadStream;
    pushMarker(name: any): void;
    popMarker(): void;
    /**
     * Query the precision supported by ints and floats in vertex and fragment shaders. Note that
     * getShaderPrecisionFormat is not guaranteed to be present (such as some instances of the
     * default Android browser). In this case, assume highp is available.
     *
     * @returns {"highp"|"mediump"|"lowp"} The highest precision supported by the WebGL context.
     * @ignore
     */
    getPrecision(): "highp" | "mediump" | "lowp";
    getExtension(...args: any[]): ANGLE_instanced_arrays;
    get extDisjointTimerQuery(): ANGLE_instanced_arrays;
    _extDisjointTimerQuery: ANGLE_instanced_arrays;
    /**
     * Initialize the extensions provided by the WebGL context.
     *
     * @ignore
     */
    initializeExtensions(): void;
    supportedExtensions: string[];
    extColorBufferFloat: ANGLE_instanced_arrays;
    extColorBufferHalfFloat: ANGLE_instanced_arrays;
    extDebugRendererInfo: ANGLE_instanced_arrays;
    extTextureFloatLinear: ANGLE_instanced_arrays;
    extFloatBlend: ANGLE_instanced_arrays;
    extTextureFilterAnisotropic: ANGLE_instanced_arrays;
    extParallelShaderCompile: ANGLE_instanced_arrays;
    extMultiDraw: ANGLE_instanced_arrays;
    extCompressedTextureETC1: ANGLE_instanced_arrays;
    extCompressedTextureETC: ANGLE_instanced_arrays;
    extCompressedTexturePVRTC: ANGLE_instanced_arrays;
    extCompressedTextureS3TC: ANGLE_instanced_arrays;
    extCompressedTextureS3TC_SRGB: ANGLE_instanced_arrays;
    extCompressedTextureATC: ANGLE_instanced_arrays;
    extCompressedTextureASTC: ANGLE_instanced_arrays;
    extTextureCompressionBPTC: ANGLE_instanced_arrays;
    /**
     * Query the capabilities of the WebGL context.
     *
     * @ignore
     */
    initializeCapabilities(): void;
    maxPrecision: "highp" | "mediump" | "lowp";
    supportsMsaa: boolean;
    maxRenderBufferSize: any;
    maxTextures: any;
    maxCombinedTextures: any;
    maxVertexTextures: any;
    vertexUniformsCount: any;
    fragmentUniformsCount: any;
    unmaskedRenderer: any;
    unmaskedVendor: any;
    supportsGpuParticles: boolean;
    supportsAreaLights: boolean;
    cullFace: any;
    stencil: any;
    stencilFuncFront: any;
    stencilFuncBack: any;
    stencilRefFront: any;
    stencilRefBack: any;
    stencilMaskFront: any;
    stencilMaskBack: any;
    stencilFailFront: any;
    stencilFailBack: any;
    stencilZfailFront: any;
    stencilZfailBack: any;
    stencilZpassFront: any;
    stencilZpassBack: any;
    stencilWriteMaskFront: any;
    stencilWriteMaskBack: any;
    alphaToCoverage: any;
    raster: any;
    depthBiasEnabled: boolean;
    clearDepth: any;
    clearColor: Color;
    clearStencil: any;
    unpackFlipY: any;
    unpackPremultiplyAlpha: any;
    unpackAlignment: any;
    initTextureUnits(count?: number): void;
    textureUnits: any[];
    _vaoMap: Map<any, any>;
    boundVao: any;
    activeFramebuffer: any;
    feedback: WebGLTransformFeedback;
    transformFeedbackBuffer: any;
    textureUnit: any;
    /**
     * Set the active rectangle for rendering on the specified device.
     *
     * @param {number} x - The pixel space x-coordinate of the bottom left corner of the viewport.
     * @param {number} y - The pixel space y-coordinate of the bottom left corner of the viewport.
     * @param {number} w - The width of the viewport in pixels.
     * @param {number} h - The height of the viewport in pixels.
     */
    setViewport(x: number, y: number, w: number, h: number): void;
    /**
     * Set the active scissor rectangle on the specified device.
     *
     * @param {number} x - The pixel space x-coordinate of the bottom left corner of the scissor rectangle.
     * @param {number} y - The pixel space y-coordinate of the bottom left corner of the scissor rectangle.
     * @param {number} w - The width of the scissor rectangle in pixels.
     * @param {number} h - The height of the scissor rectangle in pixels.
     */
    setScissor(x: number, y: number, w: number, h: number): void;
    /**
     * Binds the specified framebuffer object.
     *
     * @param {WebGLFramebuffer | null} fb - The framebuffer to bind.
     * @ignore
     */
    setFramebuffer(fb: WebGLFramebuffer | null): void;
    /**
     * Copies source render target into destination render target. Mostly used by post-effects.
     *
     * @param {RenderTarget} [source] - The source render target. Defaults to frame buffer.
     * @param {RenderTarget} [dest] - The destination render target. Defaults to frame buffer.
     * @param {boolean} [color] - If true, will copy the color buffer. Defaults to false.
     * @param {boolean} [depth] - If true, will copy the depth buffer. Defaults to false.
     * @returns {boolean} True if the copy was successful, false otherwise.
     */
    copyRenderTarget(source?: RenderTarget, dest?: RenderTarget, color?: boolean, depth?: boolean): boolean;
    /**
     * Start a render pass.
     *
     * @param {RenderPass} renderPass - The render pass to start.
     * @ignore
     */
    startRenderPass(renderPass: RenderPass): void;
    /**
     * End a render pass.
     *
     * @param {RenderPass} renderPass - The render pass to end.
     * @ignore
     */
    endRenderPass(renderPass: RenderPass): void;
    set defaultFramebuffer(value: any);
    get defaultFramebuffer(): any;
    /**
     * Marks the beginning of a block of rendering. Internally, this function binds the render
     * target currently set on the device. This function should be matched with a call to
     * {@link GraphicsDevice#updateEnd}. Calls to {@link GraphicsDevice#updateBegin} and
     * {@link GraphicsDevice#updateEnd} must not be nested.
     *
     * @ignore
     */
    updateBegin(): void;
    /**
     * Marks the end of a block of rendering. This function should be called after a matching call
     * to {@link GraphicsDevice#updateBegin}. Calls to {@link GraphicsDevice#updateBegin} and
     * {@link GraphicsDevice#updateEnd} must not be nested.
     *
     * @ignore
     */
    updateEnd(): void;
    /**
     * Updates a texture's vertical flip.
     *
     * @param {boolean} flipY - True to flip the texture vertically.
     * @ignore
     */
    setUnpackFlipY(flipY: boolean): void;
    /**
     * Updates a texture to have its RGB channels premultiplied by its alpha channel or not.
     *
     * @param {boolean} premultiplyAlpha - True to premultiply the alpha channel against the RGB
     * channels.
     * @ignore
     */
    setUnpackPremultiplyAlpha(premultiplyAlpha: boolean): void;
    /**
     * Sets the byte alignment for unpacking pixel data during texture uploads.
     *
     * @param {number} alignment - The alignment in bytes. Must be 1, 2, 4, or 8.
     * @ignore
     */
    setUnpackAlignment(alignment: number): void;
    /**
     * Activate the specified texture unit.
     *
     * @param {number} textureUnit - The texture unit to activate.
     * @ignore
     */
    activeTexture(textureUnit: number): void;
    /**
     * If the texture is not already bound on the currently active texture unit, bind it.
     *
     * @param {Texture} texture - The texture to bind.
     * @ignore
     */
    bindTexture(texture: Texture): void;
    /**
     * If the texture is not bound on the specified texture unit, active the texture unit and bind
     * the texture to it.
     *
     * @param {Texture} texture - The texture to bind.
     * @param {number} textureUnit - The texture unit to activate and bind the texture to.
     * @ignore
     */
    bindTextureOnUnit(texture: Texture, textureUnit: number): void;
    /**
     * Update the texture parameters for a given texture if they have changed.
     *
     * @param {Texture} texture - The texture to update.
     * @ignore
     */
    setTextureParameters(texture: Texture): void;
    /**
     * Sets the specified texture on the specified texture unit.
     *
     * @param {Texture} texture - The texture to set.
     * @param {number} textureUnit - The texture unit to set the texture on.
     * @ignore
     */
    setTexture(texture: Texture, textureUnit: number): void;
    createVertexArray(vertexBuffers: any): any;
    unbindVertexArray(): void;
    setBuffers(indexBuffer: any): void;
    _multiDrawLoopFallback(mode: any, primitive: any, indexBuffer: any, numInstances: any, drawCommands: any): void;
    draw(primitive: any, indexBuffer: any, numInstances: any, drawCommands: any, first?: boolean, last?: boolean): void;
    /**
     * Clears the frame buffer of the currently set render target.
     *
     * @param {object} [options] - Optional options object that controls the behavior of the clear
     * operation defined as follows:
     * @param {number[]} [options.color] - The color to clear the color buffer to in the range 0 to
     * 1 for each component.
     * @param {number} [options.depth] - The depth value to clear the depth buffer to in the
     * range 0 to 1. Defaults to 1.
     * @param {number} [options.flags] - The buffers to clear (the types being color, depth and
     * stencil). Can be any bitwise combination of:
     *
     * - {@link CLEARFLAG_COLOR}
     * - {@link CLEARFLAG_DEPTH}
     * - {@link CLEARFLAG_STENCIL}
     *
     * @param {number} [options.stencil] - The stencil value to clear the stencil buffer to.
     * Defaults to 0.
     * @example
     * // Clear color buffer to black and depth buffer to 1
     * device.clear();
     *
     * // Clear just the color buffer to red
     * device.clear({
     *     color: [1, 0, 0, 1],
     *     flags: pc.CLEARFLAG_COLOR
     * });
     *
     * // Clear color buffer to yellow and depth to 1.0
     * device.clear({
     *     color: [1, 1, 0, 1],
     *     depth: 1,
     *     flags: pc.CLEARFLAG_COLOR | pc.CLEARFLAG_DEPTH
     * });
     */
    clear(options?: {
        color?: number[];
        depth?: number;
        flags?: number;
        stencil?: number;
    }): void;
    submit(): void;
    /**
     * Reads a block of pixels from a specified rectangle of the current color framebuffer into an
     * ArrayBufferView object.
     *
     * @param {number} x - The x-coordinate of the rectangle's lower-left corner.
     * @param {number} y - The y-coordinate of the rectangle's lower-left corner.
     * @param {number} w - The width of the rectangle, in pixels.
     * @param {number} h - The height of the rectangle, in pixels.
     * @param {ArrayBufferView} pixels - The ArrayBufferView object that holds the returned pixel
     * data.
     * @ignore
     */
    readPixels(x: number, y: number, w: number, h: number, pixels: ArrayBufferView): void;
    clientWaitAsync(flags: any, interval_ms: any): Promise<any>;
    /**
     * Asynchronously reads a block of pixels from a specified rectangle of the current color framebuffer
     * into an ArrayBufferView object.
     *
     * @param {number} x - The x-coordinate of the rectangle's lower-left corner.
     * @param {number} y - The y-coordinate of the rectangle's lower-left corner.
     * @param {number} w - The width of the rectangle, in pixels.
     * @param {number} h - The height of the rectangle, in pixels.
     * @param {ArrayBufferView} pixels - The ArrayBufferView object that holds the returned pixel
     * data.
     * @param {boolean} [forceRgba] - If true, forces RGBA/UNSIGNED_BYTE format for guaranteed
     * WebGL support. Used for reading non-RGBA 8-bit normalized textures. Defaults to false.
     * @ignore
     */
    readPixelsAsync(x: number, y: number, w: number, h: number, pixels: ArrayBufferView, forceRgba?: boolean): Promise<ArrayBufferView<ArrayBufferLike>>;
    readTextureAsync(texture: any, x: any, y: any, width: any, height: any, options: any): Promise<any>;
    writeTextureAsync(texture: any, x: any, y: any, width: any, height: any, data: any): Promise<void>;
    /**
     * Enables or disables alpha to coverage.
     *
     * @param {boolean} state - True to enable alpha to coverage and false to disable it.
     * @ignore
     */
    setAlphaToCoverage(state: boolean): void;
    /**
     * Sets the output vertex buffer. It will be written to by a shader with transform feedback
     * varyings.
     *
     * @param {VertexBuffer} tf - The output vertex buffer.
     * @ignore
     */
    setTransformFeedbackBuffer(tf: VertexBuffer): void;
    /**
     * Toggles the rasterization render state. Useful with transform feedback, when you only need
     * to process the data without drawing.
     *
     * @param {boolean} on - True to enable rasterization and false to disable it.
     * @ignore
     */
    setRaster(on: boolean): void;
    setStencilTest(enable: any): void;
    setStencilFunc(func: any, ref: any, mask: any): void;
    setStencilFuncFront(func: any, ref: any, mask: any): void;
    setStencilFuncBack(func: any, ref: any, mask: any): void;
    setStencilOperation(fail: any, zfail: any, zpass: any, writeMask: any): void;
    setStencilOperationFront(fail: any, zfail: any, zpass: any, writeMask: any): void;
    setStencilOperationBack(fail: any, zfail: any, zpass: any, writeMask: any): void;
    setBlendState(blendState: any): void;
    setStencilState(stencilFront: any, stencilBack: any): void;
    setDepthState(depthState: any): void;
    setCullMode(cullMode: any): void;
    setFrontFace(frontFace: any): void;
    /**
     * Sets the active shader to be used during subsequent draw calls.
     *
     * @param {Shader} shader - The shader to assign to the device.
     * @param {boolean} [asyncCompile] - If true, rendering will be skipped until the shader is
     * compiled, otherwise the rendering will wait for the shader compilation to finish. Defaults
     * to false.
     */
    setShader(shader: Shader, asyncCompile?: boolean): void;
    activateShader(): void;
    /**
     * Frees memory from all vertex array objects ever allocated with this device.
     *
     * @ignore
     */
    clearVertexArrayObjectCache(): void;
    debugLoseContext(sleep?: number): void;
}

/**
 * Callback used by {@link Mouse#enablePointerLock} and {@link Mouse#disablePointerLock}.
 */
type LockMouseCallback = () => void;
/**
 * @callback LockMouseCallback
 * Callback used by {@link Mouse#enablePointerLock} and {@link Mouse#disablePointerLock}.
 * @returns {void}
 */
/**
 * Manages mouse input by tracking button states and dispatching events. Extends {@link EventHandler}
 * to fire `mousedown`, `mouseup`, `mousemove` and `mousewheel` events (see {@link MouseEvent}).
 *
 * Allows the state of mouse buttons to be queried to check if they are currently pressed or were
 * pressed/released since the last update. Provides methods to enable/disable pointer lock for
 * raw mouse movement input and control over the context menu. The Mouse instance must be attached
 * to a DOM element before it can detect mouse events.
 *
 * Your application's Mouse instance is managed and accessible via {@link AppBase#mouse}.
 *
 * @category Input
 */
declare class Mouse extends EventHandler {
    /**
     * Fired when the mouse is moved. The handler is passed a {@link MouseEvent}.
     *
     * @event
     * @example
     * app.mouse.on('mousemove', (e) => {
     *     console.log(`Current mouse position is: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_MOUSEMOVE: string;
    /**
     * Fired when a mouse button is pressed. The handler is passed a {@link MouseEvent}.
     *
     * @event
     * @example
     * app.mouse.on('mousedown', (e) => {
     *     console.log(`The ${e.button} button was pressed at position: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_MOUSEDOWN: string;
    /**
     * Fired when a mouse button is released. The handler is passed a {@link MouseEvent}.
     *
     * @event
     * @example
     * app.mouse.on('mouseup', (e) => {
     *     console.log(`The ${e.button} button was released at position: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_MOUSEUP: string;
    /**
     * Fired when a mouse wheel is moved. The handler is passed a {@link MouseEvent}.
     *
     * @event
     * @example
     * app.mouse.on('mousewheel', (e) => {
     *     console.log(`The mouse wheel was moved by ${e.wheelDelta}`);
     * });
     */
    static EVENT_MOUSEWHEEL: string;
    /**
     * Check if the mouse pointer has been locked, using {@link Mouse#enablePointerLock}.
     *
     * @returns {boolean} True if locked.
     */
    static isPointerLocked(): boolean;
    /**
     * Create a new Mouse instance.
     *
     * @param {Element} [element] - The Element that the mouse events are attached to.
     */
    constructor(element?: Element);
    /** @private */
    private _lastX;
    /** @private */
    private _lastY;
    /** @private */
    private _buttons;
    /** @private */
    private _lastbuttons;
    /** @private */
    private _target;
    /** @private */
    private _attached;
    _upHandler: any;
    _downHandler: any;
    _moveHandler: any;
    _wheelHandler: any;
    _contextMenuHandler: (event: any) => void;
    /**
     * Attach mouse events to an Element.
     *
     * @param {Element} element - The DOM element to attach the mouse to.
     */
    attach(element: Element): void;
    /**
     * Remove mouse events from the element that it is attached to.
     */
    detach(): void;
    /**
     * Disable the context menu usually activated with right-click.
     */
    disableContextMenu(): void;
    /**
     * Enable the context menu usually activated with right-click. This option is active by
     * default.
     */
    enableContextMenu(): void;
    /**
     * Request that the browser hides the mouse cursor and locks the mouse to the element. Allowing
     * raw access to mouse movement input without risking the mouse exiting the element. Notes:
     *
     * - In some browsers this will only work when the browser is running in fullscreen mode. See
     * {@link https://developer.mozilla.org/en-US/docs/Web/API/Fullscreen_API Fullscreen API} for
     * more details.
     * - Enabling pointer lock can only be initiated by a user action e.g. in the event handler for
     * a mouse or keyboard input.
     *
     * @param {LockMouseCallback} [success] - Function called if the request for mouse lock is
     * successful.
     * @param {LockMouseCallback} [error] - Function called if the request for mouse lock is
     * unsuccessful.
     */
    enablePointerLock(success?: LockMouseCallback, error?: LockMouseCallback): void;
    /**
     * Return control of the mouse cursor to the user.
     *
     * @param {LockMouseCallback} [success] - Function called when the mouse lock is disabled.
     */
    disablePointerLock(success?: LockMouseCallback): void;
    /**
     * Update method, should be called once per frame.
     */
    update(): void;
    /**
     * Returns true if the mouse button is currently pressed.
     *
     * @param {number} button - The mouse button to test. Can be:
     *
     * - {@link MOUSEBUTTON_LEFT}
     * - {@link MOUSEBUTTON_MIDDLE}
     * - {@link MOUSEBUTTON_RIGHT}
     *
     * @returns {boolean} True if the mouse button is current pressed.
     */
    isPressed(button: number): boolean;
    /**
     * Returns true if the mouse button was pressed this frame (since the last call to update).
     *
     * @param {number} button - The mouse button to test. Can be:
     *
     * - {@link MOUSEBUTTON_LEFT}
     * - {@link MOUSEBUTTON_MIDDLE}
     * - {@link MOUSEBUTTON_RIGHT}
     *
     * @returns {boolean} True if the mouse button was pressed since the last update.
     */
    wasPressed(button: number): boolean;
    /**
     * Returns true if the mouse button was released this frame (since the last call to update).
     *
     * @param {number} button - The mouse button to test. Can be:
     *
     * - {@link MOUSEBUTTON_LEFT}
     * - {@link MOUSEBUTTON_MIDDLE}
     * - {@link MOUSEBUTTON_RIGHT}
     *
     * @returns {boolean} True if the mouse button was released since the last update.
     */
    wasReleased(button: number): boolean;
    _handleUp(event: any): void;
    _handleDown(event: any): void;
    _handleMove(event: any): void;
    _handleWheel(event: any): void;
    _getTargetCoords(event: any): {
        x: number;
        y: number;
    };
}

/**
 * The MouseEvent object is passed into all event handlers registered on the {@link Mouse}. The
 * events are:
 *
 * - {@link Mouse.EVENT_MOUSEDOWN}
 * - {@link Mouse.EVENT_MOUSEUP}
 * - {@link Mouse.EVENT_MOUSEMOVE}
 * - {@link Mouse.EVENT_MOUSEWHEEL}
 *
 * @category Input
 */
declare class MouseEvent {
    /**
     * Create a new MouseEvent instance.
     *
     * @param {Mouse} mouse - The Mouse device that is firing this event.
     * @param {globalThis.MouseEvent|globalThis.WheelEvent} event - The original browser event that fired.
     */
    constructor(mouse: Mouse, event: globalThis.MouseEvent | globalThis.WheelEvent);
    /**
     * The x coordinate of the mouse pointer relative to the element {@link Mouse} is attached to.
     *
     * @type {number}
     */
    x: number;
    /**
     * The y coordinate of the mouse pointer relative to the element {@link Mouse} is attached to.
     *
     * @type {number}
     */
    y: number;
    /**
     * The change in x coordinate since the last mouse event.
     *
     * @type {number}
     */
    dx: number;
    /**
     * The change in y coordinate since the last mouse event.
     *
     * @type {number}
     */
    dy: number;
    /**
     * The mouse button associated with this event. Can be:
     *
     * - {@link MOUSEBUTTON_LEFT}
     * - {@link MOUSEBUTTON_MIDDLE}
     * - {@link MOUSEBUTTON_RIGHT}
     *
     * @type {number}
     */
    button: number;
    /**
     * A value representing the amount the mouse wheel has moved, only valid for
     * {@link Mouse.EVENT_MOUSEWHEEL} events.
     *
     * @type {number}
     */
    wheelDelta: number;
    /**
     * The element that the mouse was fired from.
     *
     * @type {Element}
     */
    element: Element;
    /**
     * True if the ctrl key was pressed when this event was fired.
     *
     * @type {boolean}
     */
    ctrlKey: boolean;
    /**
     * True if the alt key was pressed when this event was fired.
     *
     * @type {boolean}
     */
    altKey: boolean;
    /**
     * True if the shift key was pressed when this event was fired.
     *
     * @type {boolean}
     */
    shiftKey: boolean;
    /**
     * True if the meta key was pressed when this event was fired.
     *
     * @type {boolean}
     */
    metaKey: boolean;
    /**
     * The original browser event.
     *
     * @type {globalThis.MouseEvent|globalThis.WheelEvent}
     */
    event: globalThis.MouseEvent | globalThis.WheelEvent;
    buttons: boolean[];
}

/**
 * Manages touch input by handling and dispatching touch events. Extends {@link EventHandler}
 * to fire `touchstart`, `touchend`, `touchmove`, and `touchcancel` events (see {@link TouchEvent}).
 *
 * Detects and processes touch interactions with the attached DOM element, allowing applications
 * to respond to common touch gestures. The TouchDevice instance must be attached to a DOM element
 * before it can detect touch events.
 *
 * Your application's TouchDevice instance is managed and accessible via {@link AppBase#touch}.
 *
 * @category Input
 */
declare class TouchDevice extends EventHandler {
    /**
     * Fired when a touch starts. The handler is passed a {@link TouchEvent}.
     *
     * @event
     * @example
     * app.touch.on('touchstart', (e) => {
     *     console.log(`Touch started at position: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_TOUCHSTART: string;
    /**
     * Fired when a touch ends. The handler is passed a {@link TouchEvent}.
     *
     * @event
     * @example
     * app.touch.on('touchend', (e) => {
     *     console.log(`Touch ended at position: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_TOUCHEND: string;
    /**
     * Fired when a touch moves. The handler is passed a {@link TouchEvent}.
     *
     * @event
     * @example
     * app.touch.on('touchmove', (e) => {
     *     console.log(`Touch moved to position: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_TOUCHMOVE: string;
    /**
     * Fired when a touch is interrupted in some way. The exact reasons for canceling a touch can
     * vary from device to device. For example, a modal alert pops up during the interaction; the
     * touch point leaves the document area, or there are more touch points than the device
     * supports, in which case the earliest touch point is canceled. The handler is passed a
     * {@link TouchEvent}.
     *
     * @event
     * @example
     * app.touch.on('touchcancel', (e) => {
     *     console.log(`Touch canceled at position: ${e.x}, ${e.y}`);
     * });
     */
    static EVENT_TOUCHCANCEL: string;
    /**
     * Create a new touch device and attach it to an element.
     *
     * @param {Element} element - The element to attach listen for events on.
     */
    constructor(element: Element);
    _element: Element;
    _startHandler: any;
    _endHandler: any;
    _moveHandler: any;
    _cancelHandler: any;
    /**
     * Attach a device to an element in the DOM. If the device is already attached to an element
     * this method will detach it first.
     *
     * @param {Element} element - The element to attach to.
     */
    attach(element: Element): void;
    /**
     * Detach a device from the element it is attached to.
     */
    detach(): void;
    _handleTouchStart(e: any): void;
    _handleTouchEnd(e: any): void;
    _handleTouchMove(e: any): void;
    _handleTouchCancel(e: any): void;
}

/**
 * @import { TouchDevice } from './touch-device.js'
 */
/**
 * This function takes a browser Touch object and returns the coordinates of the touch relative to
 * the target DOM element.
 *
 * @param {globalThis.Touch} touch - The browser Touch object.
 * @returns {object} The coordinates of the touch relative to the touch.target DOM element. In the
 * format \{x, y\}.
 * @category Input
 */
declare function getTouchTargetCoords(touch: globalThis.Touch): object;
/**
 * A instance of a single point touch on a {@link TouchDevice}.
 *
 * @category Input
 */
declare class Touch {
    /**
     * Create a new Touch object from the browser Touch.
     *
     * @param {globalThis.Touch} touch - The browser Touch object.
     */
    constructor(touch: globalThis.Touch);
    /**
     * The identifier of the touch.
     *
     * @type {number}
     */
    id: number;
    /**
     * The x coordinate relative to the element that the TouchDevice is attached to.
     *
     * @type {number}
     */
    x: number;
    /**
     * The y coordinate relative to the element that the TouchDevice is attached to.
     *
     * @type {number}
     */
    y: number;
    /**
     * The target DOM element of the touch event.
     *
     * @type {Element}
     */
    target: Element;
    /**
     * The original browser Touch object.
     *
     * @type {globalThis.Touch}
     */
    touch: globalThis.Touch;
}
/**
 * The TouchEvent object is passed into all event handlers registered on the {@link TouchDevice}.
 * The events are:
 *
 * - {@link TouchDevice.EVENT_TOUCHSTART}
 * - {@link TouchDevice.EVENT_TOUCHEND}
 * - {@link TouchDevice.EVENT_TOUCHMOVE}
 * - {@link TouchDevice.EVENT_TOUCHCANCEL}
 *
 * @category Input
 */
declare class TouchEvent {
    /**
     * Create a new TouchEvent instance. It is created from an existing browser event.
     *
     * @param {TouchDevice} device - The source device of the touch events.
     * @param {globalThis.TouchEvent} event - The original browser TouchEvent.
     */
    constructor(device: TouchDevice, event: globalThis.TouchEvent);
    /**
     * The target DOM element that the event was fired from.
     *
     * @type {Element}
     */
    element: Element;
    /**
     * The original browser TouchEvent.
     *
     * @type {globalThis.TouchEvent}
     */
    event: globalThis.TouchEvent;
    /**
     * A list of all touches currently in contact with the device.
     *
     * @type {Touch[]}
     */
    touches: Touch[];
    /**
     * A list of touches that have changed since the last event.
     *
     * @type {Touch[]}
     */
    changedTouches: Touch[];
    /**
     * Get an event from one of the touch lists by the id. It is useful to access touches by their
     * id so that you can be sure you are referencing the same touch.
     *
     * @param {number} id - The identifier of the touch.
     * @param {Touch[]} list - An array of touches to search.
     * @returns {Touch|null} The {@link Touch} object or null.
     */
    getTouchById(id: number, list: Touch[]): Touch | null;
}

/**
 * Handles mouse and touch events for {@link ElementComponent}s. When input events occur on an
 * ElementComponent this fires the appropriate events on the ElementComponent.
 *
 * @category User Interface
 */
declare class ElementInput {
    static buildHitCorners(element: any, screenOrWorldCorners: any, scale: any): any;
    static calculateScaleToScreen(element: any): Vec3;
    static calculateScaleToWorld(element: any): Vec3;
    /**
     * Create a new ElementInput instance.
     *
     * @param {Element} domElement - The DOM element.
     * @param {object} [options] - Optional arguments.
     * @param {boolean} [options.useMouse] - Whether to allow mouse input. Defaults to true.
     * @param {boolean} [options.useTouch] - Whether to allow touch input. Defaults to true.
     * @param {boolean} [options.useXr] - Whether to allow XR input sources. Defaults to true.
     */
    constructor(domElement: Element, options?: {
        useMouse?: boolean;
        useTouch?: boolean;
        useXr?: boolean;
    });
    _app: any;
    _attached: boolean;
    _target: Element;
    _enabled: boolean;
    _lastX: number;
    _lastY: number;
    _upHandler: any;
    _downHandler: any;
    _moveHandler: any;
    _wheelHandler: any;
    _touchstartHandler: any;
    _touchendHandler: any;
    _touchcancelHandler: any;
    _touchmoveHandler: any;
    _sortHandler: any;
    _elements: any[];
    _hoveredElement: any;
    _pressedElement: any;
    _touchedElements: {};
    _touchesForWhichTouchLeaveHasFired: {};
    _selectedElements: {};
    _selectedPressedElements: {};
    _useMouse: boolean;
    _useTouch: boolean;
    _useXr: boolean;
    _selectEventsAttached: boolean;
    _clickedEntities: {};
    set enabled(value: boolean);
    get enabled(): boolean;
    set app(value: any);
    get app(): any;
    /**
     * Attach mouse and touch events to a DOM element.
     *
     * @param {Element} domElement - The DOM element.
     */
    attach(domElement: Element): void;
    attachSelectEvents(): void;
    /**
     * Remove mouse and touch events from the DOM element that it is attached to.
     */
    detach(): void;
    /**
     * Add a {@link ElementComponent} to the internal list of ElementComponents that are being
     * checked for input.
     *
     * @param {ElementComponent} element - The
     * ElementComponent.
     */
    addElement(element: ElementComponent): void;
    /**
     * Remove a {@link ElementComponent} from the internal list of ElementComponents that are being
     * checked for input.
     *
     * @param {ElementComponent} element - The
     * ElementComponent.
     */
    removeElement(element: ElementComponent): void;
    _handleUp(event: any): void;
    _handleDown(event: any): void;
    _handleMove(event: any): void;
    _handleWheel(event: any): void;
    _determineTouchedElements(event: any): {};
    _handleTouchStart(event: any): void;
    _handleTouchEnd(event: any): void;
    _handleTouchMove(event: any): void;
    _onElementMouseEvent(eventType: any, event: any): void;
    _onXrStart(): void;
    _onXrEnd(): void;
    _onXrUpdate(): void;
    _onXrInputRemove(inputSource: any): void;
    _onSelectStart(inputSource: any, event: any): void;
    _onSelectEnd(inputSource: any, event: any): void;
    _onElementSelectEvent(eventType: any, inputSource: any, event: any): void;
    _fireEvent(name: any, evt: any): void;
    _calcMouseCoords(event: any): void;
    _sortElements(a: any, b: any): any;
    _getTargetElementByCoords(camera: any, x: any, y: any): any;
    _getTargetElementByRay(ray: any, camera: any): any;
    _getTargetElement(camera: any, rayScreen: any, ray3d: any): any;
    _calculateRayScreen(x: any, y: any, camera: any, ray: any): boolean;
    _calculateRay3d(x: any, y: any, camera: any, ray: any): boolean;
    _checkElement(ray: any, element: any, screen: any): number;
}
/**
 * Represents an input event fired on a {@link ElementComponent}. When an event is raised on an
 * ElementComponent it bubbles up to its parent ElementComponents unless we call stopPropagation().
 *
 * @category User Interface
 */
declare class ElementInputEvent {
    /**
     * Create a new ElementInputEvent instance.
     *
     * @param {MouseEvent|TouchEvent} event - MouseEvent or TouchEvent that was originally raised.
     * @param {ElementComponent} element - The ElementComponent that this event was originally
     * raised on.
     * @param {CameraComponent} camera - The CameraComponent that this event was originally raised
     * via.
     */
    constructor(event: MouseEvent | TouchEvent, element: ElementComponent, camera: CameraComponent);
    /**
     * MouseEvent or TouchEvent that was originally raised.
     *
     * @type {MouseEvent|TouchEvent}
     */
    event: MouseEvent | TouchEvent;
    /**
     * The ElementComponent that this event was originally raised on.
     *
     * @type {ElementComponent}
     */
    element: ElementComponent;
    /**
     * The CameraComponent that this event was originally raised via.
     *
     * @type {CameraComponent}
     */
    camera: CameraComponent;
    _stopPropagation: boolean;
    /**
     * Stop propagation of the event to parent {@link ElementComponent}s. This also stops
     * propagation of the event to other event listeners of the original DOM Event.
     */
    stopPropagation(): void;
}
/**
 * Represents a Mouse event fired on a {@link ElementComponent}.
 *
 * @category User Interface
 */
declare class ElementMouseEvent extends ElementInputEvent {
    /**
     * Create an instance of an ElementMouseEvent.
     *
     * @param {MouseEvent} event - The MouseEvent that
     * was originally raised.
     * @param {ElementComponent} element - The
     * ElementComponent that this event was originally raised on.
     * @param {CameraComponent} camera - The
     * CameraComponent that this event was originally raised via.
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @param {number} lastX - The last x coordinate.
     * @param {number} lastY - The last y coordinate.
     */
    constructor(event: MouseEvent, element: ElementComponent, camera: CameraComponent, x: number, y: number, lastX: number, lastY: number);
    x: number;
    y: number;
    /**
     * Whether the ctrl key was pressed.
     *
     * @type {boolean}
     */
    ctrlKey: boolean;
    /**
     * Whether the alt key was pressed.
     *
     * @type {boolean}
     */
    altKey: boolean;
    /**
     * Whether the shift key was pressed.
     *
     * @type {boolean}
     */
    shiftKey: boolean;
    /**
     * Whether the meta key was pressed.
     *
     * @type {boolean}
     */
    metaKey: boolean;
    /**
     * The mouse button.
     *
     * @type {number}
     */
    button: number;
    /**
     * The amount of horizontal movement of the cursor.
     *
     * @type {number}
     */
    dx: number;
    /**
     * The amount of vertical movement of the cursor.
     *
     * @type {number}
     */
    dy: number;
    /**
     * The amount of the wheel movement.
     *
     * @type {number}
     */
    wheelDelta: number;
}
/**
 * Represents a XRInputSourceEvent fired on a {@link ElementComponent}.
 *
 * @category User Interface
 */
declare class ElementSelectEvent extends ElementInputEvent {
    /**
     * Create an instance of a ElementSelectEvent.
     *
     * @param {XRInputSourceEvent} event - The XRInputSourceEvent that was originally raised.
     * @param {ElementComponent} element - The
     * ElementComponent that this event was originally raised on.
     * @param {CameraComponent} camera - The
     * CameraComponent that this event was originally raised via.
     * @param {XrInputSource} inputSource - The XR input source
     * that this event was originally raised from.
     */
    constructor(event: XRInputSourceEvent, element: ElementComponent, camera: CameraComponent, inputSource: XrInputSource);
    /**
     * The XR input source that this event was originally raised from.
     *
     * @type {XrInputSource}
     */
    inputSource: XrInputSource;
}
/**
 * Represents a TouchEvent fired on a {@link ElementComponent}.
 *
 * @category User Interface
 */
declare class ElementTouchEvent extends ElementInputEvent {
    /**
     * Create an instance of an ElementTouchEvent.
     *
     * @param {TouchEvent} event - The TouchEvent that was originally raised.
     * @param {ElementComponent} element - The
     * ElementComponent that this event was originally raised on.
     * @param {CameraComponent} camera - The
     * CameraComponent that this event was originally raised via.
     * @param {number} x - The x coordinate of the touch that triggered the event.
     * @param {number} y - The y coordinate of the touch that triggered the event.
     * @param {Touch} touch - The touch object that triggered the event.
     */
    constructor(event: TouchEvent, element: ElementComponent, camera: CameraComponent, x: number, y: number, touch: Touch);
    /**
     * The Touch objects representing all current points of contact with the surface,
     * regardless of target or changed status.
     *
     * @type {Touch[]}
     */
    touches: Touch[];
    /**
     * The Touch objects representing individual points of contact whose states changed between
     * the previous touch event and this one.
     *
     * @type {Touch[]}
     */
    changedTouches: Touch[];
    x: number;
    y: number;
    /**
     * The touch object that triggered the event.
     *
     * @type {Touch}
     */
    touch: Touch;
}

/**
 * Manages keyboard input by tracking key states and dispatching events. Extends {@link EventHandler}
 * in order to fire `keydown` and `keyup` events (see {@link KeyboardEvent}).
 *
 * Allows the state of individual keys to be queried to check if they are currently pressed or were
 * pressed/released since the last update. The class automatically handles browser visibility
 * changes and window blur events by clearing key states. The Keyboard instance must be attached to
 * a DOM element before it can detect key events.
 *
 * Your application's Keyboard instance is managed and accessible via {@link AppBase#keyboard}.
 *
 * @category Input
 */
declare class Keyboard extends EventHandler {
    /**
     * Fired when a key is pressed. The handler is passed a {@link KeyboardEvent}.
     *
     * @event
     * @example
     * const onKeyDown = (e) => {
     *     if (e.key === pc.KEY_SPACE) {
     *         // space key pressed
     *     }
     *     e.event.preventDefault(); // Use original browser event to prevent browser action.
     * };
     *
     * app.keyboard.on('keydown', onKeyDown, this);
     */
    static EVENT_KEYDOWN: string;
    /**
     * Fired when a key is released. The handler is passed a {@link KeyboardEvent}.
     *
     * @event
     * @example
     * const onKeyUp = (e) => {
     *     if (e.key === pc.KEY_SPACE) {
     *         // space key released
     *     }
     *     e.event.preventDefault(); // Use original browser event to prevent browser action.
     * };
     *
     * app.keyboard.on('keyup', onKeyUp, this);
     */
    static EVENT_KEYUP: string;
    /**
     * Create a new Keyboard instance.
     *
     * @param {Element|Window} [element] - Element to attach Keyboard to. Note that elements like
     * &lt;div&gt; can't accept focus by default. To use keyboard events on an element like this it
     * must have a value of 'tabindex' e.g. tabindex="0". See
     * [here](https://www.w3.org/WAI/GL/WCAG20/WD-WCAG20-TECHS/SCR29.html) for more details.
     * @param {object} [options] - Optional options object.
     * @param {boolean} [options.preventDefault] - Call preventDefault() in key event handlers.
     * This stops the default action of the event occurring. e.g. Ctrl+T will not open a new
     * browser tab.
     * @param {boolean} [options.stopPropagation] - Call stopPropagation() in key event handlers.
     * This stops the event bubbling up the DOM so no parent handlers will be notified of the
     * event.
     * @example
     * // attach keyboard listeners to the window
     * const keyboard = new pc.Keyboard(window);
     */
    constructor(element?: Element | Window, options?: {
        preventDefault?: boolean;
        stopPropagation?: boolean;
    });
    /** @private */
    private _element;
    /** @private */
    private _keymap;
    /** @private */
    private _lastmap;
    _keyDownHandler: any;
    _keyUpHandler: any;
    _keyPressHandler: any;
    _visibilityChangeHandler: any;
    _windowBlurHandler: any;
    preventDefault: boolean;
    stopPropagation: boolean;
    /**
     * Attach the keyboard event handlers to an Element.
     *
     * @param {Element|Window} element - The element to listen for keyboard events on.
     */
    attach(element: Element | Window): void;
    /**
     * Detach the keyboard event handlers from the element it is attached to.
     */
    detach(): void;
    /**
     * Convert a key code into a key identifier.
     *
     * @param {number} keyCode - The key code.
     * @returns {string} The key identifier.
     * @private
     */
    private toKeyIdentifier;
    /**
     * Process the browser keydown event.
     *
     * @param {globalThis.KeyboardEvent} event - The browser keyboard event.
     * @private
     */
    private _handleKeyDown;
    /**
     * Process the browser keyup event.
     *
     * @param {globalThis.KeyboardEvent} event - The browser keyboard event.
     * @private
     */
    private _handleKeyUp;
    /**
     * Process the browser keypress event.
     *
     * @param {globalThis.KeyboardEvent} event - The browser keyboard event.
     * @private
     */
    private _handleKeyPress;
    /**
     * Handle the browser visibilitychange event.
     *
     * @private
     */
    private _handleVisibilityChange;
    /**
     * Handle the browser blur event.
     *
     * @private
     */
    private _handleWindowBlur;
    /**
     * Called once per frame to update internal state.
     *
     * @ignore
     */
    update(): void;
    /**
     * Return true if the key is currently down.
     *
     * @param {number} key - The keyCode of the key to test. See the KEY_* constants.
     * @returns {boolean} True if the key was pressed, false if not.
     */
    isPressed(key: number): boolean;
    /**
     * Returns true if the key was pressed since the last update.
     *
     * @param {number} key - The keyCode of the key to test. See the KEY_* constants.
     * @returns {boolean} True if the key was pressed.
     */
    wasPressed(key: number): boolean;
    /**
     * Returns true if the key was released since the last update.
     *
     * @param {number} key - The keyCode of the key to test. See the KEY_* constants.
     * @returns {boolean} True if the key was pressed.
     */
    wasReleased(key: number): boolean;
}

/**
 * Input handler for accessing GamePad input.
 *
 * @category Input
 */
declare class GamePads extends EventHandler {
    /**
     * Fired when a gamepad is connected. The handler is passed the {@link GamePad} object that was
     * connected.
     *
     * @event
     * @example
     * const onPadConnected = (pad) => {
     *     if (!pad.mapping) {
     *         // Map the gamepad as the system could not find the proper map.
     *     } else {
     *         // Make the gamepad pulse.
     *     }
     * };
     *
     * app.keyboard.on("gamepadconnected", onPadConnected, this);
     */
    static EVENT_GAMEPADCONNECTED: string;
    /**
     * Fired when a gamepad is disconnected. The handler is passed the {@link GamePad} object that
     * was disconnected.
     *
     * @event
     * @example
     * const onPadDisconnected = (pad) => {
     *     // Pause the game.
     * };
     *
     * app.keyboard.on("gamepaddisconnected", onPadDisconnected, this);
     */
    static EVENT_GAMEPADDISCONNECTED: string;
    /**
     * Whether gamepads are supported by this device.
     *
     * @type {boolean}
     */
    gamepadsSupported: boolean;
    /**
     * The list of current gamepads.
     *
     * @type {GamePad[]}
     */
    current: GamePad[];
    /**
     * The list of previous buttons states
     *
     * @type {boolean[][]}
     * @private
     */
    private _previous;
    _ongamepadconnectedHandler: any;
    _ongamepaddisconnectedHandler: any;
    /**
     * Sets the threshold for axes to return values. Must be between 0 and 1.
     *
     * @type {number}
     * @ignore
     */
    set deadZone(value: number);
    /**
     * Gets the threshold for axes to return values.
     *
     * @type {number}
     * @ignore
     */
    get deadZone(): number;
    /**
     * Gets the list of previous button states.
     *
     * @type {boolean[][]}
     * @ignore
     */
    get previous(): boolean[][];
    /**
     * Callback function when a gamepad is connecting.
     *
     * @param {GamepadEvent} event - The event containing the connecting gamepad.
     * @private
     */
    private _ongamepadconnected;
    /**
     * Callback function when a gamepad is disconnecting.
     *
     * @param {GamepadEvent} event - The event containing the disconnecting gamepad.
     * @private
     */
    private _ongamepaddisconnected;
    /**
     * Update the previous state of the gamepads. This must be called every frame for
     * `wasPressed` and `wasTouched` to work.
     *
     * @ignore
     */
    update(): void;
    /**
     * Poll for the latest data from the gamepad API.
     *
     * @param {GamePad[]} [pads] - An optional array used to receive the gamepads mapping. This
     * array will be returned by this function.
     * @returns {GamePad[]} An array of gamepads and mappings for the model of gamepad that is
     * attached.
     * @example
     * const gamepads = new pc.GamePads();
     * const pads = gamepads.poll();
     */
    poll(pads?: GamePad[]): GamePad[];
    /**
     * Destroy the event listeners.
     *
     * @ignore
     */
    destroy(): void;
    /**
     * Retrieve the order for buttons and axes for given HTML5 Gamepad.
     *
     * @param {Gamepad} pad - The HTML5 Gamepad object.
     * @returns {object} Object defining the order of buttons and axes for given HTML5 Gamepad.
     */
    getMap(pad: Gamepad): object;
    /**
     * Returns true if the button on the pad requested is pressed.
     *
     * @param {number} orderIndex - The order index of the pad to check, use constants {@link PAD_1}, {@link PAD_2}, etc. For gamepad index call the function from the pad.
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} True if the button is pressed.
     */
    isPressed(orderIndex: number, button: number): boolean;
    /**
     * Returns true if the button was pressed since the last frame.
     *
     * @param {number} orderIndex - The index of the pad to check, use constants {@link PAD_1}, {@link PAD_2}, etc. For gamepad index call the function from the pad.
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} True if the button was pressed since the last frame.
     */
    wasPressed(orderIndex: number, button: number): boolean;
    /**
     * Returns true if the button was released since the last frame.
     *
     * @param {number} orderIndex - The index of the pad to check, use constants {@link PAD_1}, {@link PAD_2}, etc. For gamepad index call the function from the pad.
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} True if the button was released since the last frame.
     */
    wasReleased(orderIndex: number, button: number): boolean;
    /**
     * Get the value of one of the analog axes of the pad.
     *
     * @param {number} orderIndex - The index of the pad to check, use constants {@link PAD_1}, {@link PAD_2}, etc. For gamepad index call the function from the pad.
     * @param {number} axis - The axis to get the value of, use constants {@link PAD_L_STICK_X}, etc.
     * @returns {number} The value of the axis between -1 and 1.
     */
    getAxis(orderIndex: number, axis: number): number;
    /**
     * Make the gamepad vibrate.
     *
     * @param {number} orderIndex - The index of the pad to check, use constants {@link PAD_1}, {@link PAD_2}, etc. For gamepad index call the function from the pad.
     * @param {number} intensity - Intensity for the vibration in the range 0 to 1.
     * @param {number} duration - Duration for the vibration in milliseconds.
     * @param {object} [options] - Options for special vibration pattern.
     * @param {number} [options.startDelay] - Delay before the pattern starts, in milliseconds. Defaults to 0.
     * @param {number} [options.strongMagnitude] - Intensity for strong actuators in the range 0 to 1. Defaults to intensity.
     * @param {number} [options.weakMagnitude] - Intensity for weak actuators in the range 0 to 1. Defaults to intensity.
     * @returns {Promise<boolean>} Return a Promise resulting in true if the pulse was successfully completed.
     */
    pulse(orderIndex: number, intensity: number, duration: number, options?: {
        startDelay?: number;
        strongMagnitude?: number;
        weakMagnitude?: number;
    }): Promise<boolean>;
    /**
     * Make all gamepads vibrate.
     *
     * @param {number} intensity - Intensity for the vibration in the range 0 to 1.
     * @param {number} duration - Duration for the vibration in milliseconds.
     * @param {object} [options] - Options for special vibration pattern.
     * @param {number} [options.startDelay] - Delay before the pattern starts, in milliseconds. Defaults to 0.
     * @param {number} [options.strongMagnitude] - Intensity for strong actuators in the range 0 to 1. Defaults to intensity.
     * @param {number} [options.weakMagnitude] - Intensity for weak actuators in the range 0 to 1. Defaults to intensity.
     * @returns {Promise<boolean[]>} Return a Promise resulting in an array of booleans defining if the pulse was successfully completed for every gamepads.
     */
    pulseAll(intensity: number, duration: number, options?: {
        startDelay?: number;
        strongMagnitude?: number;
        weakMagnitude?: number;
    }): Promise<boolean[]>;
    /**
     * Find a connected {@link GamePad} from its identifier.
     *
     * @param {string} id - The identifier to search for.
     * @returns {GamePad|null} The {@link GamePad} with the matching identifier or null if no gamepad is found or the gamepad is not connected.
     */
    findById(id: string): GamePad | null;
    /**
     * Find a connected {@link GamePad} from its device index.
     *
     * @param {number} index - The device index to search for.
     * @returns {GamePad|null} The {@link GamePad} with the matching device index or null if no gamepad is found or the gamepad is not connected.
     */
    findByIndex(index: number): GamePad | null;
}
/**
 * A GamePad stores information about a gamepad from the Gamepad API.
 *
 * @category Input
 */
declare class GamePad {
    /**
     * Create a new GamePad Instance.
     *
     * @param {Gamepad} gamepad - The original Gamepad API gamepad.
     * @param {object} map - The buttons and axes map.
     * @ignore
     */
    constructor(gamepad: Gamepad, map: object);
    /**
     * The compiled mapping to reduce lookup delay when retrieving buttons
     *
     * @type {object}
     * @private
     */
    private _compiledMapping;
    /**
     * The identifier for the gamepad. Its structure depends on device.
     *
     * @type {string}
     */
    id: string;
    /**
     * The index for this controller. A gamepad that is disconnected and reconnected will retain the same index.
     *
     * @type {number}
     */
    index: number;
    /**
     * The buttons present on the GamePad. Order is provided by API, use GamePad#buttons instead.
     *
     * @type {GamePadButton[]}
     * @private
     */
    private _buttons;
    /**
     * The axes values from the GamePad. Order is provided by API, use GamePad#axes instead.
     *
     * @type {number[]}
     * @private
     */
    private _axes;
    /**
     * Previous value for the analog axes present on the gamepad. Values are between -1 and 1.
     *
     * @type {number[]}
     * @private
     */
    private _previousAxes;
    /**
     * The gamepad mapping detected by the browser. Value is either "standard", "xr-standard", "" or "custom". When empty string, you may need to update the mapping yourself. "custom" means you updated the mapping.
     *
     * @type {string}
     */
    mapping: string;
    /**
     * The buttons and axes map.
     *
     * @type {object}
     */
    map: object;
    /**
     * The hand this gamepad is usually handled on. Only relevant for XR pads. Value is either "left", "right" or "none".
     *
     * @type {string}
     */
    hand: string;
    /**
     * The original Gamepad API gamepad.
     *
     * @type {Gamepad}
     * @ignore
     */
    pad: Gamepad;
    /**
     * Gets whether the gamepad is connected.
     *
     * @type {boolean}
     */
    get connected(): boolean;
    /**
     * Compile the buttons mapping to reduce lookup delay.
     *
     * @private
     */
    private _compileMapping;
    /**
     * Update the existing GamePad Instance.
     *
     * @param {Gamepad} gamepad - The original Gamepad API gamepad.
     * @ignore
     */
    update(gamepad: Gamepad): this;
    /**
     * Update the map for this gamepad.
     *
     * @param {object} map - The new mapping for this gamepad.
     * @param {string[]} map.buttons - Buttons mapping for this gamepad.
     * @param {string[]} map.axes - Axes mapping for this gamepad.
     * @param {object} [map.synthesizedButtons] - Information about buttons to pull from axes for this gamepad. Requires definition of axis index, min value and max value.
     * @param {"custom"} [map.mapping] - New mapping format. Will be forced into "custom".
     * @example
     * this.pad.updateMap({
     *     buttons: [[
     *         'PAD_FACE_1',
     *         'PAD_FACE_2',
     *         'PAD_FACE_3',
     *         'PAD_FACE_4',
     *         'PAD_L_SHOULDER_1',
     *         'PAD_R_SHOULDER_1',
     *         'PAD_L_SHOULDER_2',
     *         'PAD_R_SHOULDER_2',
     *         'PAD_SELECT',
     *         'PAD_START',
     *         'PAD_L_STICK_BUTTON',
     *         'PAD_R_STICK_BUTTON',
     *         'PAD_VENDOR'
     *     ],
     *     axes: [
     *         'PAD_L_STICK_X',
     *         'PAD_L_STICK_Y',
     *         'PAD_R_STICK_X',
     *         'PAD_R_STICK_Y'
     *     ],
     *     synthesizedButtons: {
     *         PAD_UP: { axis: 0, min: 0, max: 1 },
     *         PAD_DOWN: { axis: 0, min: -1, max: 0 },
     *         PAD_LEFT: { axis: 0, min: -1, max: 0 },
     *         PAD_RIGHT: { axis: 0, min: 0, max: 1 }
     *     }
     * });
     */
    updateMap(map: {
        buttons: string[];
        axes: string[];
        synthesizedButtons?: object;
        mapping?: "custom";
    }): void;
    /**
     * Reset gamepad mapping to default.
     */
    resetMap(): void;
    /**
     * Gets the values from analog axes present on the GamePad. Values are between -1 and 1.
     *
     * @type {number[]}
     */
    get axes(): number[];
    /**
     * Gets the buttons present on the GamePad.
     *
     * @type {GamePadButton[]}
     */
    get buttons(): GamePadButton[];
    /**
     * Make the gamepad vibrate.
     *
     * @param {number} intensity - Intensity for the vibration in the range 0 to 1.
     * @param {number} duration - Duration for the vibration in milliseconds.
     * @param {object} [options] - Options for special vibration pattern.
     * @param {number} [options.startDelay] - Delay before the pattern starts, in milliseconds. Defaults to 0.
     * @param {number} [options.strongMagnitude] - Intensity for strong actuators in the range 0 to 1. Defaults to intensity.
     * @param {number} [options.weakMagnitude] - Intensity for weak actuators in the range 0 to 1. Defaults to intensity.
     * @returns {Promise<boolean>} Return a Promise resulting in true if the pulse was successfully completed.
     */
    pulse(intensity: number, duration: number, options?: {
        startDelay?: number;
        strongMagnitude?: number;
        weakMagnitude?: number;
    }): Promise<boolean>;
    /**
     * Retrieve a button from its index.
     *
     * @param {number} index - The index to return the button for.
     * @returns {GamePadButton} The button for the searched index. May be a placeholder if none found.
     */
    getButton(index: number): GamePadButton;
    /**
     * Returns true if the button is pressed.
     *
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} True if the button is pressed.
     */
    isPressed(button: number): boolean;
    /**
     * Return true if the button was pressed since the last update.
     *
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} Return true if the button was pressed, false if not.
     */
    wasPressed(button: number): boolean;
    /**
     * Return true if the button was released since the last update.
     *
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} Return true if the button was released, false if not.
     */
    wasReleased(button: number): boolean;
    /**
     * Returns true if the button is touched.
     *
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} True if the button is touched.
     */
    isTouched(button: number): boolean;
    /**
     * Return true if the button was touched since the last update.
     *
     * @param {number} button - The button to test, use constants {@link PAD_FACE_1}, etc.
     * @returns {boolean} Return true if the button was touched, false if not.
     */
    wasTouched(button: number): boolean;
    /**
     * Returns the value of a button between 0 and 1, with 0 representing a button that is not pressed, and 1 representing a button that is fully pressed.
     *
     * @param {number} button - The button to retrieve, use constants {@link PAD_FACE_1}, etc.
     * @returns {number} The value of the button between 0 and 1.
     */
    getValue(button: number): number;
    /**
     * Get the value of one of the analog axes of the pad.
     *
     * @param {number} axis - The axis to get the value of, use constants {@link PAD_L_STICK_X}, etc.
     * @returns {number} The value of the axis between -1 and 1.
     */
    getAxis(axis: number): number;
}
/**
 * A GamePadButton stores information about a button from the Gamepad API.
 *
 * @category Input
 */
declare class GamePadButton {
    /**
     * Create a new GamePadButton instance.
     *
     * @param {number|GamepadButton} current - The original Gamepad API gamepad button.
     * @param {number|GamepadButton} [previous] - The previous Gamepad API gamepad button.
     * @ignore
     */
    constructor(current: number | GamepadButton, previous?: number | GamepadButton);
    /**
     * The value for the button between 0 and 1, with 0 representing a button that is not pressed, and 1 representing a button that is fully pressed.
     *
     * @type {number}
     */
    value: number;
    /**
     * Whether the button is currently down.
     *
     * @type {boolean}
     */
    pressed: boolean;
    /**
     * Whether the button is currently touched.
     *
     * @type {boolean}
     */
    touched: boolean;
    /**
     * Whether the button was pressed.
     *
     * @type {boolean}
     */
    wasPressed: boolean;
    /**
     * Whether the button was released since the last update.
     *
     * @type {boolean}
     */
    wasReleased: boolean;
    /**
     * Whether the button was touched since the last update.
     *
     * @type {boolean}
     */
    wasTouched: boolean;
    /**
     * Update the existing GamePadButton Instance.
     *
     * @param {GamepadButton} button - The original Gamepad API gamepad button.
     * @ignore
     */
    update(button: GamepadButton): void;
}

/**
 * @import { ElementInput } from './input/element-input.js'
 * @import { GamePads } from '../platform/input/game-pads.js'
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 * @import { Keyboard } from '../platform/input/keyboard.js'
 * @import { Mouse } from '../platform/input/mouse.js'
 * @import { TouchDevice } from '../platform/input/touch-device.js'
 */
/**
 * Application is a subclass of {@link AppBase}, which represents the base functionality for all
 * PlayCanvas applications. It acts as a convenience class by internally registering all
 * {@link ComponentSystem}s and {@link ResourceHandler}s implemented in the PlayCanvas Engine. This
 * makes app setup simple but results in the full engine being included when bundling your
 * application.
 */
declare class Application extends AppBase {
    /**
     * Create a new Application instance.
     *
     * Automatically registers these component systems with the application's component system registry:
     *
     * - anim ({@link AnimComponentSystem})
     * - animation ({@link AnimationComponentSystem})
     * - audiolistener ({@link AudioListenerComponentSystem})
     * - button ({@link ButtonComponentSystem})
     * - camera ({@link CameraComponentSystem})
     * - collision ({@link CollisionComponentSystem})
     * - element ({@link ElementComponentSystem})
     * - layoutchild ({@link LayoutChildComponentSystem})
     * - layoutgroup ({@link LayoutGroupComponentSystem})
     * - light ({@link LightComponentSystem})
     * - model ({@link ModelComponentSystem})
     * - particlesystem ({@link ParticleSystemComponentSystem})
     * - rigidbody ({@link RigidBodyComponentSystem})
     * - render ({@link RenderComponentSystem})
     * - screen ({@link ScreenComponentSystem})
     * - script ({@link ScriptComponentSystem})
     * - scrollbar ({@link ScrollbarComponentSystem})
     * - scrollview ({@link ScrollViewComponentSystem})
     * - sound ({@link SoundComponentSystem})
     * - sprite ({@link SpriteComponentSystem})
     *
     * @param {HTMLCanvasElement | OffscreenCanvas} canvas - The canvas element.
     * @param {object} [options] - The options object to configure the Application.
     * @param {ElementInput} [options.elementInput] - Input handler for {@link ElementComponent}s.
     * @param {Keyboard} [options.keyboard] - Keyboard handler for input.
     * @param {Mouse} [options.mouse] - Mouse handler for input.
     * @param {TouchDevice} [options.touch] - TouchDevice handler for input.
     * @param {GamePads} [options.gamepads] - Gamepad handler for input.
     * @param {string} [options.scriptPrefix] - Prefix to apply to script urls before loading.
     * @param {string} [options.assetPrefix] - Prefix to apply to asset urls before loading.
     * @param {GraphicsDevice} [options.graphicsDevice] - The graphics device used by the
     * application. If not provided, a WebGl graphics device will be created.
     * @param {object} [options.graphicsDeviceOptions] - Options object that is passed into the
     * {@link GraphicsDevice} constructor.
     * @param {string[]} [options.scriptsOrder] - Scripts in order of loading first.
     * @example
     * // Engine-only example: create the application manually
     * const app = new pc.Application(canvas, options);
     *
     * // Start the application's main loop
     * app.start();
     */
    constructor(canvas: HTMLCanvasElement | OffscreenCanvas, options?: {
        elementInput?: ElementInput;
        keyboard?: Keyboard;
        mouse?: Mouse;
        touch?: TouchDevice;
        gamepads?: GamePads;
        scriptPrefix?: string;
        assetPrefix?: string;
        graphicsDevice?: GraphicsDevice;
        graphicsDeviceOptions?: object;
        scriptsOrder?: string[];
    });
    createDevice(canvas: any, options: any): WebglGraphicsDevice;
    addComponentSystems(appOptions: any): void;
    addResourceHandles(appOptions: any): void;
}

/**
 * Assigns values to a script instance based on a map of attributes schemas
 * and a corresponding map of data.
 *
 * @param {Application} app - The application instance
 * @param {Object<string, AttributeSchema>} attributeSchemaMap - A map of names to Schemas
 * @param {Object<string, *>} data - A Map of data to assign to the Script instance
 * @param {Script} script - A Script instance to assign values on
 */
declare function assignAttributesToScript(app: Application, attributeSchemaMap: {
    [x: string]: AttributeSchema;
}, data: {
    [x: string]: any;
}, script: Script): void;
type AttributeSchema = {
    /**
     * - The Attribute type
     */
    type: "boolean" | "number" | "string" | "json" | "asset" | "entity" | "rgb" | "rgba" | "vec2" | "vec3" | "vec4" | "curve";
    /**
     * - True if this attribute is an array of `type`
     */
    array?: boolean;
};

/**
 * Container of Script Attribute definitions. Implements an interface to add/remove attributes and
 * store their definition for a {@link ScriptType}. Note: An instance of ScriptAttributes is
 * created automatically by each {@link ScriptType}.
 *
 * @category Script
 */
declare class ScriptAttributes {
    static assignAttributesToScript: typeof assignAttributesToScript;
    static attributeToValue: typeof attributeToValue;
    static reservedNames: Set<string>;
    /**
     * Create a new ScriptAttributes instance.
     *
     * @param {typeof ScriptType} scriptType - Script Type that attributes relate to.
     */
    constructor(scriptType: typeof ScriptType);
    scriptType: typeof ScriptType;
    index: {};
    /**
     * Add Attribute.
     *
     * @param {string} name - Name of an attribute.
     * @param {object} args - Object with Arguments for an attribute.
     * @param {("boolean"|"number"|"string"|"json"|"asset"|"entity"|"rgb"|"rgba"|"vec2"|"vec3"|"vec4"|"curve")} args.type - Type
     * of an attribute value.  Can be:
     *
     * - "asset"
     * - "boolean"
     * - "curve"
     * - "entity"
     * - "json"
     * - "number"
     * - "rgb"
     * - "rgba"
     * - "string"
     * - "vec2"
     * - "vec3"
     * - "vec4"
     *
     * @param {*} [args.default] - Default attribute value.
     * @param {string} [args.title] - Title for Editor's for field UI.
     * @param {string} [args.description] - Description for Editor's for field UI.
     * @param {string|string[]} [args.placeholder] - Placeholder for Editor's for field UI.
     * For multi-field types, such as vec2, vec3, and others use array of strings.
     * @param {boolean} [args.array] - If attribute can hold single or multiple values.
     * @param {number} [args.size] - If attribute is array, maximum number of values can be set.
     * @param {number} [args.min] - Minimum value for type 'number', if max and min defined, slider
     * will be rendered in Editor's UI.
     * @param {number} [args.max] - Maximum value for type 'number', if max and min defined, slider
     * will be rendered in Editor's UI.
     * @param {number} [args.precision] - Level of precision for field type 'number' with floating
     * values.
     * @param {number} [args.step] - Step value for type 'number'. The amount used to increment the
     * value when using the arrow keys in the Editor's UI.
     * @param {string} [args.assetType] - Name of asset type to be used in 'asset' type attribute
     * picker in Editor's UI, defaults to '*' (all).
     * @param {string[]} [args.curves] - List of names for Curves for field type 'curve'.
     * @param {string} [args.color] - String of color channels for Curves for field type 'curve',
     * can be any combination of `rgba` characters. Defining this property will render Gradient in
     * Editor's field UI.
     * @param {object[]} [args.enum] - List of fixed choices for field, defined as array of objects,
     * where key in object is a title of an option.
     * @param {object[]} [args.schema] - List of attributes for type 'json'. Each attribute
     * description is an object with the same properties as regular script attributes but with an
     * added 'name' field to specify the name of each attribute in the JSON.
     * @example
     * PlayerController.attributes.add('fullName', {
     *     type: 'string'
     * });
     * @example
     * PlayerController.attributes.add('speed', {
     *     type: 'number',
     *     title: 'Speed',
     *     placeholder: 'km/h',
     *     default: 22.2
     * });
     * @example
     * PlayerController.attributes.add('resolution', {
     *     type: 'number',
     *     default: 32,
     *     enum: [
     *         { '32x32': 32 },
     *         { '64x64': 64 },
     *         { '128x128': 128 }
     *     ]
     * });
     * @example
     * PlayerController.attributes.add('config', {
     *     type: 'json',
     *     schema: [{
     *         name: 'speed',
     *         type: 'number',
     *         title: 'Speed',
     *         placeholder: 'km/h',
     *         default: 22.2
     *     }, {
     *         name: 'resolution',
     *         type: 'number',
     *         default: 32,
     *         enum: [
     *             { '32x32': 32 },
     *             { '64x64': 64 },
     *             { '128x128': 128 }
     *         ]
     *     }]
     * });
     */
    add(name: string, args: {
        type: ("boolean" | "number" | "string" | "json" | "asset" | "entity" | "rgb" | "rgba" | "vec2" | "vec3" | "vec4" | "curve");
        default?: any;
        title?: string;
        description?: string;
        placeholder?: string | string[];
        array?: boolean;
        size?: number;
        min?: number;
        max?: number;
        precision?: number;
        step?: number;
        assetType?: string;
        curves?: string[];
        color?: string;
        enum?: object[];
        schema?: object[];
    }): void;
    /**
     * Remove Attribute.
     *
     * @param {string} name - Name of an attribute.
     * @returns {boolean} True if removed or false if not defined.
     * @example
     * PlayerController.attributes.remove('fullName');
     */
    remove(name: string): boolean;
    /**
     * Detect if Attribute is added.
     *
     * @param {string} name - Name of an attribute.
     * @returns {boolean} True if Attribute is defined.
     * @example
     * if (PlayerController.attributes.has('fullName')) {
     *     // attribute fullName is defined
     * }
     */
    has(name: string): boolean;
    /**
     * Get object with attribute arguments. Note: Changing argument properties will not affect
     * existing Script Instances.
     *
     * @param {string} name - Name of an attribute.
     * @returns {?object} Arguments with attribute properties.
     * @example
     * // changing default value for an attribute 'fullName'
     * var attr = PlayerController.attributes.get('fullName');
     * if (attr) attr.default = 'Unknown';
     */
    get(name: string): object | null;
}

/**
 * @typedef {Object} AttributeSchema
 * @property {"boolean"|"number"|"string"|"json"|"asset"|"entity"|"rgb"|"rgba"|"vec2"|"vec3"|"vec4"|"curve"} type - The Attribute type
 * @property {boolean} [array] - True if this attribute is an array of `type`
 */
/**
 * Takes an attribute schema, a value and current value, and return a new value.
 *
 * @param {Application} app - The working application
 * @param {AttributeSchema} schema - The attribute schema used to resolve properties
 * @param {*} value - The raw value to create
 * @param {*} current - The existing value
 * @returns {*} The return value
 */
declare function attributeToValue(app: Application, schema: AttributeSchema, value: any, current: any): any;

/**
 * @import { AppBase } from '../app-base.js'
 * @import { Entity } from '../entity.js'
 */
/**
 * This is the legacy format for creating PlayCanvas script returned when calling `pc.createScript()`.
 * You should not use this inherit from this class directly.
 *
 * @deprecated Use {@link Script} instead.
 * @category Script
 */
declare class ScriptType extends Script {
    /**
     * The interface to define attributes for Script Types. Refer to {@link ScriptAttributes}.
     *
     * @type {ScriptAttributes}
     * @example
     * var PlayerController = pc.createScript('playerController');
     *
     * PlayerController.attributes.add('speed', {
     *     type: 'number',
     *     title: 'Speed',
     *     placeholder: 'km/h',
     *     default: 22.2
     * });
     */
    static get attributes(): ScriptAttributes;
    /**
     * Shorthand function to extend Script Type prototype with list of methods.
     *
     * @param {object} methods - Object with methods, where key - is name of method, and value - is function.
     * @example
     * var PlayerController = pc.createScript('playerController');
     *
     * PlayerController.extend({
     *     initialize: function () {
     *         // called once on initialize
     *     },
     *     update: function (dt) {
     *         // called each tick
     *     }
     * });
     */
    static extend(methods: object): void;
    /** @private */
    private __attributes;
    /** @private */
    private __attributesRaw;
    /**
     * @param {*} args - initialization arguments
     * @protected
     */
    protected initScript(args: any): void;
    /**
     * Expose initScript as initScriptType for backwards compatibility
     * @param {*} args - Initialization arguments
     * @protected
     */
    protected initScriptType(args: any): void;
    /**
     * @param {boolean} [force] - Set to true to force initialization of the attributes.
     * @ignore
     */
    __initializeAttributes(force?: boolean): void;
}

declare class ScriptComponentData {
    enabled: boolean;
}

/**
 * Allows scripts to be attached to an Entity and executed.
 *
 * @category Script
 */
declare class ScriptComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ScriptComponent;
    DataType: typeof ScriptComponentData;
    _components: SortedLoopArray;
    _enabledComponents: SortedLoopArray;
    preloading: boolean;
    initializeComponentData(component: any, data: any): void;
    cloneComponent(entity: any, clone: any): Component;
    _resetExecutionOrder(): void;
    _callComponentMethod(components: any, name: any, dt: any): void;
    _onInitialize(): void;
    _onPostInitialize(): void;
    _onUpdate(dt: any): void;
    _onPostUpdate(dt: any): void;
    _addComponentToEnabled(component: any): void;
    _removeComponentFromEnabled(component: any): void;
    _onBeforeRemove(entity: any, component: any): void;
}

/**
 * The ScriptComponent allows you add custom behavior to an {@link Entity} by attaching
 * your own scripts written in JavaScript (or TypeScript).
 *
 * You should never need to use the ScriptComponent constructor directly. To add a
 * ScriptComponent to an Entity, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('script');
 * ```
 *
 * Once the ScriptComponent is added to the entity, you can access it via the {@link Entity#script}
 * property.
 *
 * Add scripts to the entity by calling the `create` method:
 *
 * ```javascript
 * // Option 1: Add a script using the name registered in the ScriptRegistry
 * entity.script.create('cameraControls');
 *
 * // Option 2: Add a script using the script class
 * entity.script.create(CameraControls);
 * ```
 *
 * For more details on scripting see the [Scripting Section](https://developer.playcanvas.com/user-manual/scripting/)
 * of the User Manual.
 *
 * @hideconstructor
 * @category Script
 */
declare class ScriptComponent extends Component {
    /**
     * Fired when a {@link ScriptType} instance is created and attached to the script component.
     * This event is available in two forms. They are as follows:
     *
     * 1. `create` - Fired when a script instance is created. The name of the script type and the
     * script type instance are passed as arguments.
     * 2. `create:[name]` - Fired when a script instance is created that has the specified script
     * type name. The script instance is passed as an argument to the handler.
     *
     * @event
     * @example
     * entity.script.on('create', (name, scriptInstance) => {
     *     console.log(`Instance of script '${name}' created`);
     * });
     * @example
     * entity.script.on('create:player', (scriptInstance) => {
     *     console.log(`Instance of script 'player' created`);
     * });
     */
    static EVENT_CREATE: string;
    /**
     * Fired when a {@link ScriptType} instance is destroyed and removed from the script component.
     * This event is available in two forms. They are as follows:
     *
     * 1. `destroy` - Fired when a script instance is destroyed. The name of the script type and
     * the script type instance are passed as arguments.
     * 2. `destroy:[name]` - Fired when a script instance is destroyed that has the specified
     * script type name. The script instance is passed as an argument.
     *
     * @event
     * @example
     * entity.script.on('destroy', (name, scriptInstance) => {
     *     console.log(`Instance of script '${name}' destroyed`);
     * });
     * @example
     * entity.script.on('destroy:player', (scriptInstance) => {
     *     console.log(`Instance of script 'player' destroyed`);
     * });
     */
    static EVENT_DESTROY: string;
    /**
     * Fired when the script component becomes enabled. This event does not take into account the
     * enabled state of the entity or any of its ancestors.
     *
     * @event
     * @example
     * entity.script.on('enable', () => {
     *     console.log(`Script component of entity '${entity.name}' has been enabled`);
     * });
     */
    static EVENT_ENABLE: string;
    /**
     * Fired when the script component becomes disabled. This event does not take into account the
     * enabled state of the entity or any of its ancestors.
     *
     * @event
     * @example
     * entity.script.on('disable', () => {
     *     console.log(`Script component of entity '${entity.name}' has been disabled`);
     * });
     */
    static EVENT_DISABLE: string;
    /**
     * Fired when the script component has been removed from its entity.
     *
     * @event
     * @example
     * entity.script.on('remove', () => {
     *     console.log(`Script component removed from entity '${entity.name}'`);
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Fired when the script component changes state to enabled or disabled. The handler is passed
     * the new boolean enabled state of the script component. This event does not take into account
     * the enabled state of the entity or any of its ancestors.
     *
     * @event
     * @example
     * entity.script.on('state', (enabled) => {
     *     console.log(`Script component of entity '${entity.name}' changed state to '${enabled}'`);
     * });
     */
    static EVENT_STATE: string;
    /**
     * Fired when the index of a {@link ScriptType} instance is changed in the script component.
     * This event is available in two forms. They are as follows:
     *
     * 1. `move` - Fired when a script instance is moved. The name of the script type, the script
     * type instance, the new index and the old index are passed as arguments.
     * 2. `move:[name]` - Fired when a specifically named script instance is moved. The script
     * instance, the new index and the old index are passed as arguments.
     *
     * @event
     * @example
     * entity.script.on('move', (name, scriptInstance, newIndex, oldIndex) => {
     *     console.log(`Script '${name}' moved from index '${oldIndex}' to '${newIndex}'`);
     * });
     * @example
     * entity.script.on('move:player', (scriptInstance, newIndex, oldIndex) => {
     *     console.log(`Script 'player' moved from index '${oldIndex}' to '${newIndex}'`);
     * });
     */
    static EVENT_MOVE: string;
    /**
     * Fired when a {@link ScriptType} instance had an exception. The handler is passed the script
     * instance, the exception and the method name that the exception originated from.
     *
     * @event
     * @example
     * entity.script.on('error', (scriptInstance, exception, methodName) => {
     *     console.log(`Script error: ${exception} in method '${methodName}'`);
     * });
     */
    static EVENT_ERROR: string;
    /**
     * Create a new ScriptComponent instance.
     *
     * @param {ScriptComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ScriptComponentSystem, entity: Entity);
    /**
     * A map of script name to initial component data.
     *
     * @type {Map<string, object>}
     * @private
     */
    private _attributeDataMap;
    /**
     * Holds all script instances for this component.
     *
     * @type {ScriptType[]}
     * @private
     */
    private _scripts;
    _updateList: SortedLoopArray;
    _postUpdateList: SortedLoopArray;
    _scriptsIndex: {};
    _destroyedScripts: any[];
    _destroyed: boolean;
    _scriptsData: ScriptType[];
    _oldState: boolean;
    _enabled: boolean;
    _beingEnabled: boolean;
    _isLoopingThroughScripts: boolean;
    _executionOrder: number;
    /**
     * Sets the array of all script instances attached to an entity. This array is read-only and
     * should not be modified by developer.
     *
     * @type {Script[]}
     */
    set scripts(value: ScriptType[]);
    /**
     * Gets the array of all script instances attached to an entity.
     *
     * @type {ScriptType[]}
     */
    get scripts(): ScriptType[];
    _beginLooping(): boolean;
    _endLooping(wasLoopingBefore: any): void;
    _onSetEnabled(prop: any, old: any, value: any): void;
    _checkState(): void;
    _onBeforeRemove(): void;
    _removeDestroyedScripts(): void;
    _onInitializeAttributes(): void;
    initializeAttributes(script: any): void;
    _scriptMethod(script: any, method: any, arg: any): void;
    _onInitialize(): void;
    _onPostInitialize(): void;
    _onUpdate(dt: any): void;
    _onPostUpdate(dt: any): void;
    /**
     * Inserts script instance into the scripts array at the specified index. Also inserts the
     * script into the update list if it has an update method and the post update list if it has a
     * postUpdate method.
     *
     * @param {object} scriptInstance - The script instance.
     * @param {number} index - The index where to insert the script at. If -1, append it at the end.
     * @param {number} scriptsLength - The length of the scripts array.
     * @private
     */
    private _insertScriptInstance;
    _removeScriptInstance(scriptInstance: any): number;
    _resetExecutionOrder(startIndex: any, scriptsLength: any): void;
    _resolveEntityScriptAttribute(attribute: any, attributeName: any, oldValue: any, useGuid: any, newAttributes: any, duplicatedIdsMap: any): void;
    /**
     * Detect if script is attached to an entity.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @returns {boolean} If script is attached to an entity.
     * @example
     * if (entity.script.has('playerController')) {
     *     // entity has script
     * }
     */
    has(nameOrType: string | typeof ScriptType): boolean;
    /**
     * Get a script instance (if attached).
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @returns {ScriptType|null} If script is attached, the instance is returned. Otherwise null
     * is returned.
     * @example
     * const controller = entity.script.get('playerController');
     */
    get(nameOrType: string | typeof ScriptType): ScriptType | null;
    /**
     * Create a script instance and attach to an entity script component.
     *
     * @param {string|typeof Script} nameOrType - The name or type of {@link Script}.
     * @param {object} [args] - Object with arguments for a script.
     * @param {boolean} [args.enabled] - If script instance is enabled after creation. Defaults to
     * true.
     * @param {object} [args.attributes] - Object with values for attributes (if any), where key is
     * name of an attribute.
     * @param {object} [args.properties] - Object with values that are **assigned** to the script instance.
     * @param {boolean} [args.preloading] - If script instance is created during preload. If true,
     * script and attributes must be initialized manually. Defaults to false.
     * @param {number} [args.ind] - The index where to insert the script instance at. Defaults to
     * -1, which means append it at the end.
     * @returns {ScriptType|null} Returns an instance of a {@link ScriptType} if successfully
     * attached to an entity, or null if it failed because a script with a same name has already
     * been added or if the {@link ScriptType} cannot be found by name in the
     * {@link ScriptRegistry}.
     * @example
     * entity.script.create('playerController', {
     *     attributes: {
     *         speed: 4
     *     }
     * });
     */
    create(nameOrType: string | typeof Script, args?: {
        enabled?: boolean;
        attributes?: object;
        properties?: object;
        preloading?: boolean;
        ind?: number;
    }): ScriptType | null;
    /**
     * Destroy the script instance that is attached to an entity.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @returns {boolean} If it was successfully destroyed.
     * @example
     * entity.script.destroy('playerController');
     */
    destroy(nameOrType: string | typeof ScriptType): boolean;
    /**
     * Swap the script instance.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @returns {boolean} If it was successfully swapped.
     * @private
     */
    private swap;
    /**
     * When an entity is cloned and it has entity script attributes that point to other entities in
     * the same subtree that is cloned, then we want the new script attributes to point at the
     * cloned entities. This method remaps the script attributes for this entity and it assumes
     * that this entity is the result of the clone operation.
     *
     * @param {ScriptComponent} oldScriptComponent - The source script component that belongs to
     * the entity that was being cloned.
     * @param {object} duplicatedIdsMap - A dictionary with guid-entity values that contains the
     * entities that were cloned.
     * @private
     */
    private resolveDuplicatedEntityReferenceProperties;
    /**
     * Move script instance to different position to alter update order of scripts within entity.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @param {number} ind - New position index.
     * @returns {boolean} If it was successfully moved.
     * @example
     * entity.script.move('playerController', 0);
     */
    move(nameOrType: string | typeof ScriptType, ind: number): boolean;
}

/**
 * @import { Entity } from '../../../framework/entity'
 */
declare class ScrollbarComponentData {
    enabled: boolean;
    orientation: number;
    value: number;
    /** @type {number} */
    handleSize: number;
    /** @type {Entity|null} */
    handleEntity: Entity | null;
}

/**
 * Helper class that makes it easy to create Elements that can be dragged by the mouse or touch.
 *
 * @category User Interface
 */
declare class ElementDragHelper extends EventHandler {
    /**
     * Fired when a new drag operation starts.
     *
     * @event
     * @example
     * elementDragHelper.on('drag:start', () => {
     *     console.log('Drag started');
     * });
     */
    static EVENT_DRAGSTART: string;
    /**
     * Fired when the current new drag operation ends.
     *
     * @event
     * @example
     * elementDragHelper.on('drag:end', () => {
     *     console.log('Drag ended');
     * });
     */
    static EVENT_DRAGEND: string;
    /**
     * Fired whenever the position of the dragged element changes. The handler is passed the
     * current {@link Vec3} position of the dragged element.
     *
     * @event
     * @example
     * elementDragHelper.on('drag:move', (position) => {
     *     console.log(`Dragged element position is ${position}`);
     * });
     */
    static EVENT_DRAGMOVE: string;
    /**
     * Create a new ElementDragHelper instance.
     *
     * @param {ElementComponent} element - The Element that should become draggable.
     * @param {string} [axis] - Optional axis to constrain to, either 'x', 'y' or null.
     */
    constructor(element: ElementComponent, axis?: string);
    _element: ElementComponent;
    _app: AppBase;
    _axis: string;
    _enabled: boolean;
    _dragScale: Vec3;
    _dragStartMousePosition: Vec3;
    _dragStartHandlePosition: Vec3;
    _deltaMousePosition: Vec3;
    _deltaHandlePosition: Vec3;
    _isDragging: boolean;
    /**
     * @param {'on'|'off'} onOrOff - Either 'on' or 'off'.
     * @private
     */
    private _toggleLifecycleListeners;
    /**
     * @param {'on'|'off'} onOrOff - Either 'on' or 'off'.
     * @private
     */
    private _toggleDragListeners;
    _hasDragListeners: boolean;
    _onMouseDownOrTouchStart(event: any): void;
    _dragCamera: any;
    _onMouseUpOrTouchEnd(): void;
    /**
     * This method calculates the `Vec3` intersection point of plane/ray intersection based on
     * the mouse/touch input event. If there is no intersection, it returns `null`.
     *
     * @param {ElementTouchEvent|ElementMouseEvent|ElementSelectEvent} event - The event.
     * @returns {Vec3|null} The `Vec3` intersection point of plane/ray intersection, if there
     * is an intersection, otherwise `null`
     * @private
     */
    private _screenToLocal;
    _determineInputPosition(event: any): void;
    _chooseRayOriginAndDirection(): void;
    _calculateDragScale(): void;
    /**
     * This method is linked to `_element` events: `mousemove` and `touchmove`
     *
     * @param {ElementTouchEvent} event - The event.
     * @private
     */
    private _onMove;
    destroy(): void;
    set enabled(value: boolean);
    get enabled(): boolean;
    get isDragging(): boolean;
}

/**
 * Manages creation of {@link ScrollbarComponent}s.
 *
 * @category User Interface
 */
declare class ScrollbarComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ScrollbarComponent;
    DataType: typeof ScrollbarComponentData;
    schema: {
        name: string;
        type: string;
    }[];
    initializeComponentData(component: any, data: any, properties: any): void;
    _onAddComponent(entity: any): void;
    _onRemoveComponent(entity: any, component: any): void;
}

/**
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { Entity } from '../../entity.js'
 * @import { ScrollbarComponentData } from './data.js'
 * @import { ScrollbarComponentSystem } from './system.js'
 */
/**
 * A ScrollbarComponent enables a group of entities to behave like a draggable scrollbar.
 *
 * @hideconstructor
 * @category User Interface
 */
declare class ScrollbarComponent extends Component {
    /**
     * Fired whenever the scroll value changes. The handler is passed a number representing the
     * current scroll value.
     *
     * @event
     * @example
     * entity.scrollbar.on('set:value', (value) => {
     *     console.log(`Scroll value is now ${value}`);
     * });
     */
    static EVENT_SETVALUE: string;
    /**
     * Create a new ScrollbarComponent.
     *
     * @param {ScrollbarComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ScrollbarComponentSystem, entity: Entity);
    /**
     * @type {Entity|null}
     * @private
     */
    private _handleEntity;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtHandleEntityElementAdd;
    /**
     * @type {EventHandle[]}
     * @private
     */
    private _evtHandleEntityChanges;
    /**
     * @type {ScrollbarComponentData}
     * @ignore
     */
    get data(): ScrollbarComponentData;
    /**
     * Sets whether the scrollbar moves horizontally or vertically. Can be:
     *
     * - {@link ORIENTATION_HORIZONTAL}: The scrollbar animates in the horizontal axis.
     * - {@link ORIENTATION_VERTICAL}: The scrollbar animates in the vertical axis.
     *
     * Defaults to {@link ORIENTATION_HORIZONTAL}.
     *
     * @type {number}
     */
    set orientation(arg: number);
    /**
     * Gets whether the scrollbar moves horizontally or vertically.
     *
     * @type {number}
     */
    get orientation(): number;
    /**
     * Sets the current position value of the scrollbar, in the range 0 to 1. Defaults to 0.
     *
     * @type {number}
     */
    set value(arg: number);
    /**
     * Gets the current position value of the scrollbar.
     *
     * @type {number}
     */
    get value(): number;
    /**
     * Sets the size of the handle relative to the size of the track, in the range 0 to 1. For a
     * vertical scrollbar, a value of 1 means that the handle will take up the full height of the
     * track.
     *
     * @type {number}
     */
    set handleSize(arg: number);
    /**
     * Gets the size of the handle relative to the size of the track.
     *
     * @type {number}
     */
    get handleSize(): number;
    /**
     * Sets the entity to be used as the scrollbar handle. This entity must have a
     * {@link ScrollbarComponent}.
     *
     * @type {Entity|string|null}
     */
    set handleEntity(arg: Entity | null);
    /**
     * Gets the entity to be used as the scrollbar handle.
     *
     * @type {Entity|null}
     */
    get handleEntity(): Entity | null;
    /** @ignore */
    _setValue(name: any, value: any): void;
    /**
     * @param {string} onOrOff - 'on' or 'off'.
     * @private
     */
    private _toggleLifecycleListeners;
    _handleEntitySubscribe(): void;
    _handleEntityUnsubscribe(): void;
    _handleEntityElementSubscribe(): void;
    _handleEntityElementUnsubscribe(): void;
    _onHandleElementGain(): void;
    _handleDragHelper: ElementDragHelper;
    _onHandleElementLose(): void;
    _onHandleDrag(position: any): void;
    _onSetValue(name: any, oldValue: any, newValue: any): void;
    _onSetHandleSize(name: any, oldValue: any, newValue: any): void;
    _onSetHandleAlignment(): void;
    _onSetOrientation(name: any, oldValue: any, newValue: any): void;
    _updateHandlePositionAndSize(): void;
    _handlePositionToScrollValue(handlePosition: any): number;
    _scrollValueToHandlePosition(value: any): number;
    _getUsableTrackLength(): number;
    _getTrackLength(): number;
    _getHandleLength(): number;
    _getHandlePosition(): number;
    _getSign(): 1 | -1;
    _getAxis(): "x" | "y";
    _getDimension(): "height" | "width";
    _getOppositeDimension(): "height" | "width";
    _destroyDragHelper(): void;
    _setHandleDraggingEnabled(enabled: any): void;
    onRemove(): void;
    resolveDuplicatedEntityReferenceProperties(oldScrollbar: any, duplicatedIdsMap: any): void;
}

declare class ScrollViewComponentData {
    enabled: boolean;
    /** @type {boolean} */
    horizontal: boolean;
    /** @type {boolean} */
    vertical: boolean;
    /** @type {number} */
    scrollMode: number;
    /** @type {number} */
    bounceAmount: number;
    /** @type {number} */
    friction: number;
    dragThreshold: number;
    useMouseWheel: boolean;
    mouseWheelSensitivity: Vec2;
    /** @type {number} */
    horizontalScrollbarVisibility: number;
    /** @type {number} */
    verticalScrollbarVisibility: number;
    /** @type {Entity|null} */
    viewportEntity: Entity | null;
    /** @type {Entity|null} */
    contentEntity: Entity | null;
    /** @type {Entity|null} */
    horizontalScrollbarEntity: Entity | null;
    /** @type {Entity|null} */
    verticalScrollbarEntity: Entity | null;
}

/**
 * Manages creation of {@link ScrollViewComponent}s.
 *
 * @category User Interface
 */
declare class ScrollViewComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ScrollViewComponent;
    DataType: typeof ScrollViewComponentData;
    schema: {
        name: string;
        type: string;
    }[];
    initializeComponentData(component: any, data: any, properties: any): void;
    onUpdate(dt: any): void;
    _onRemoveComponent(entity: any, component: any): void;
}

/**
 * A ScrollViewComponent enables a group of entities to behave like a masked scrolling area, with
 * optional horizontal and vertical scroll bars.
 *
 * @hideconstructor
 * @category User Interface
 */
declare class ScrollViewComponent extends Component {
    /**
     * Fired whenever the scroll position changes. The handler is passed a {@link Vec2} containing
     * the horizontal and vertical scroll values in the range 0..1.
     *
     * @event
     * @example
     * entity.scrollview.on('set:scroll', (scroll) => {
     *     console.log(`Horizontal scroll position: ${scroll.x}`);
     *     console.log(`Vertical scroll position: ${scroll.y}`);
     * });
     */
    static EVENT_SETSCROLL: string;
    /**
     * Create a new ScrollViewComponent.
     *
     * @param {ScrollViewComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ScrollViewComponentSystem, entity: Entity);
    /**
     * @type {Entity|null}
     * @private
     */
    private _viewportEntity;
    /**
     * @type {Entity|null}
     * @private
     */
    private _contentEntity;
    /**
     * @type {Entity|null}
     * @private
     */
    private _horizontalScrollbarEntity;
    /**
     * @type {Entity|null}
     * @private
     */
    private _verticalScrollbarEntity;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtElementRemove;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtViewportElementRemove;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtViewportResize;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtContentEntityElementAdd;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtContentElementRemove;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtContentResize;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtHorizontalScrollbarAdd;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtHorizontalScrollbarRemove;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtHorizontalScrollbarValue;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtVerticalScrollbarAdd;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtVerticalScrollbarRemove;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtVerticalScrollbarValue;
    _scrollbarUpdateFlags: {};
    _scrollbarEntities: {};
    _prevContentSizes: {};
    _scroll: Vec2;
    _velocity: Vec3;
    _dragStartPosition: Vec3;
    _disabledContentInput: boolean;
    _disabledContentInputEntities: any[];
    /**
     * @type {ScrollViewComponentData}
     * @ignore
     */
    get data(): ScrollViewComponentData;
    /**
     * Sets whether horizontal scrolling is enabled.
     *
     * @type {boolean}
     */
    set horizontal(arg: boolean);
    /**
     * Gets whether horizontal scrolling is enabled.
     *
     * @type {boolean}
     */
    get horizontal(): boolean;
    /**
     * Sets whether vertical scrolling is enabled.
     *
     * @type {boolean}
     */
    set vertical(arg: boolean);
    /**
     * Gets whether vertical scrolling is enabled.
     *
     * @type {boolean}
     */
    get vertical(): boolean;
    /**
     * Sets the scroll mode of the scroll viewer. Specifies how the scroll view should behave when
     * the user scrolls past the end of the content. Modes are defined as follows:
     *
     * - {@link SCROLL_MODE_CLAMP}: Content does not scroll any further than its bounds.
     * - {@link SCROLL_MODE_BOUNCE}: Content scrolls past its bounds and then gently bounces back.
     * - {@link SCROLL_MODE_INFINITE}: Content can scroll forever.
     *
     * @type {number}
     */
    set scrollMode(arg: number);
    /**
     * Gets the scroll mode of the scroll viewer.
     *
     * @type {number}
     */
    get scrollMode(): number;
    /**
     * Sets how far the content should move before bouncing back.
     *
     * @type {number}
     */
    set bounceAmount(arg: number);
    /**
     * Gets how far the content should move before bouncing back.
     *
     * @type {number}
     */
    get bounceAmount(): number;
    /**
     * Sets how freely the content should move if thrown, i.e. By flicking on a phone or by
     * flinging the scroll wheel on a mouse. A value of 1 means that content will stop immediately;
     * 0 means that content will continue moving forever (or until the bounds of the content are
     * reached, depending on the scrollMode).
     *
     * @type {number}
     */
    set friction(arg: number);
    /**
     * Gets how freely the content should move if thrown.
     *
     * @type {number}
     */
    get friction(): number;
    set dragThreshold(arg: number);
    get dragThreshold(): number;
    /**
     * Sets whether to use mouse wheel for scrolling (horizontally and vertically).
     *
     * @type {boolean}
     */
    set useMouseWheel(arg: boolean);
    /**
     * Gets whether to use mouse wheel for scrolling (horizontally and vertically).
     *
     * @type {boolean}
     */
    get useMouseWheel(): boolean;
    /**
     * Sets the mouse wheel horizontal and vertical sensitivity. Only used if useMouseWheel is set.
     * Setting a direction to 0 will disable mouse wheel scrolling in that direction. 1 is a
     * default sensitivity that is considered to feel good. The values can be set higher or lower
     * than 1 to tune the sensitivity. Defaults to [1, 1].
     *
     * @type {Vec2}
     */
    set mouseWheelSensitivity(arg: Vec2);
    /**
     * Gets the mouse wheel horizontal and vertical sensitivity.
     *
     * @type {Vec2}
     */
    get mouseWheelSensitivity(): Vec2;
    /**
     * Sets whether the horizontal scrollbar should be visible all the time, or only visible when
     * the content exceeds the size of the viewport.
     *
     * @type {number}
     */
    set horizontalScrollbarVisibility(arg: number);
    /**
     * Gets whether the horizontal scrollbar should be visible all the time, or only visible when
     * the content exceeds the size of the viewport.
     *
     * @type {number}
     */
    get horizontalScrollbarVisibility(): number;
    /**
     * Sets whether the vertical scrollbar should be visible all the time, or only visible when the
     * content exceeds the size of the viewport.
     *
     * @type {number}
     */
    set verticalScrollbarVisibility(arg: number);
    /**
     * Gets whether the vertical scrollbar should be visible all the time, or only visible when the
     * content exceeds the size of the viewport.
     *
     * @type {number}
     */
    get verticalScrollbarVisibility(): number;
    /**
     * Sets the entity to be used as the masked viewport area, within which the content will scroll.
     * This entity must have an ElementGroup component.
     *
     * @type {Entity|string|null}
     */
    set viewportEntity(arg: Entity | null);
    /**
     * Gets the entity to be used as the masked viewport area, within which the content will scroll.
     *
     * @type {Entity|null}
     */
    get viewportEntity(): Entity | null;
    /**
     * Sets the entity which contains the scrolling content itself. This entity must have an
     * {@link ElementComponent}.
     *
     * @type {Entity|string|null}
     */
    set contentEntity(arg: Entity | null);
    /**
     * Gets the entity which contains the scrolling content itself.
     *
     * @type {Entity|null}
     */
    get contentEntity(): Entity | null;
    /**
     * Sets the entity to be used as the horizontal scrollbar. This entity must have a
     * {@link ScrollbarComponent}.
     *
     * @type {Entity|string|null}
     */
    set horizontalScrollbarEntity(arg: Entity | null);
    /**
     * Gets the entity to be used as the horizontal scrollbar.
     *
     * @type {Entity|null}
     */
    get horizontalScrollbarEntity(): Entity | null;
    /**
     * Sets the entity to be used as the vertical scrollbar. This entity must have a
     * {@link ScrollbarComponent}.
     *
     * @type {Entity|string|null}
     */
    set verticalScrollbarEntity(arg: Entity | null);
    /**
     * Gets the entity to be used as the vertical scrollbar.
     *
     * @type {Entity|null}
     */
    get verticalScrollbarEntity(): Entity | null;
    /**
     * Sets the scroll value.
     *
     * @type {Vec2}
     */
    set scroll(value: Vec2);
    /**
     * Gets the scroll value.
     *
     * @type {Vec2}
     */
    get scroll(): Vec2;
    /** @ignore */
    _setValue(name: any, value: any): void;
    /**
     * @param {string} onOrOff - 'on' or 'off'.
     * @private
     */
    private _toggleLifecycleListeners;
    /**
     * @param {string} onOrOff - 'on' or 'off'.
     * @private
     */
    private _toggleElementListeners;
    _hasElementListeners: boolean;
    _onElementComponentAdd(entity: any): void;
    _onElementComponentRemove(entity: any): void;
    _viewportEntitySubscribe(): void;
    _evtViewportEntityElementAdd: EventHandle;
    _viewportEntityUnsubscribe(): void;
    _viewportEntityElementSubscribe(): void;
    _viewportEntityElementUnsubscribe(): void;
    _onViewportElementGain(): void;
    _onViewportElementLose(): void;
    _contentEntitySubscribe(): void;
    _contentEntityUnsubscribe(): void;
    _contentEntityElementSubscribe(): void;
    _contentEntityElementUnsubscribe(): void;
    _onContentElementGain(): void;
    _contentDragHelper: ElementDragHelper;
    _onContentElementLose(): void;
    _onContentDragStart(): void;
    _onContentDragEnd(): void;
    _prevContentDragPosition: any;
    _onContentDragMove(position: any): void;
    _wasDragged: boolean;
    _horizontalScrollbarEntitySubscribe(): void;
    _verticalScrollbarEntitySubscribe(): void;
    _horizontalScrollbarEntityUnsubscribe(): void;
    _verticalScrollbarEntityUnsubscribe(): void;
    _onSetHorizontalScrollbarValue(scrollValueX: any): void;
    _onSetVerticalScrollbarValue(scrollValueY: any): void;
    _onHorizontalScrollbarGain(): void;
    _onVerticalScrollbarGain(): void;
    _onHorizontalScrollbarLose(): void;
    _onVerticalScrollbarLose(): void;
    _onSetHorizontalScrollingEnabled(): void;
    _onSetVerticalScrollingEnabled(): void;
    _onSetScroll(x: any, y: any, resetVelocity: any): void;
    _updateAxis(scrollValue: any, axis: any, orientation: any): boolean;
    _determineNewScrollValue(scrollValue: any, axis: any, orientation: any): any;
    _syncAll(): void;
    _syncContentPosition(orientation: any): void;
    _syncScrollbarPosition(orientation: any): void;
    _syncScrollbarEnabledState(orientation: any): void;
    _contentIsLargerThanViewport(orientation: any): boolean;
    _contentPositionToScrollValue(contentPosition: any): Vec2;
    _getMaxOffset(orientation: any, contentSize: any): number;
    _getMaxScrollValue(orientation: any): 0 | 1;
    _getScrollbarHandleSize(axis: any, orientation: any): number;
    _getViewportSize(orientation: any): any;
    _getContentSize(orientation: any): any;
    _getSize(orientation: any, entity: any): any;
    _getScrollingEnabled(orientation: any): boolean;
    _getScrollbarVisibility(orientation: any): number;
    _getSign(orientation: any): 1 | -1;
    _getAxis(orientation: any): "x" | "y";
    _getCalculatedDimension(orientation: any): "calculatedWidth" | "calculatedHeight";
    _destroyDragHelper(): void;
    onUpdate(): void;
    _updateVelocity(): void;
    _hasOvershoot(axis: any, orientation: any): boolean;
    _toOvershoot(scrollValue: any, orientation: any): any;
    _setVelocityFromOvershoot(scrollValue: any, axis: any, orientation: any): void;
    _setVelocityFromContentPositionDelta(position: any): void;
    _setScrollFromContentPosition(position: any): void;
    _applyScrollValueTension(scrollValue: any): any;
    _isDragging(): boolean;
    _setScrollbarComponentsEnabled(enabled: any): void;
    _setContentDraggingEnabled(enabled: any): void;
    _onMouseWheel(event: any): void;
    _enableContentInput(): void;
    _disableContentInput(): void;
    onRemove(): void;
    resolveDuplicatedEntityReferenceProperties(oldScrollView: any, duplicatedIdsMap: any): void;
}

/**
 * Represents the raw audio data of playable sound. A Sound is the resource of an audio
 * {@link Asset}. An audio asset can be assigned to a {@link SoundSlot} owned by a
 * {@link SoundComponent}.
 *
 * @category Sound
 */
declare class Sound {
    /**
     * Create a new Sound instance.
     *
     * @param {HTMLAudioElement|AudioBuffer} resource - If the Web Audio API is supported, pass an
     * AudioBuffer object, otherwise an Audio object.
     */
    constructor(resource: HTMLAudioElement | AudioBuffer);
    /**
     * If the Web Audio API is not supported this contains the audio data.
     *
     * @type {HTMLAudioElement|undefined}
     */
    audio: HTMLAudioElement | undefined;
    /**
     * If the Web Audio API is supported this contains the audio data.
     *
     * @type {AudioBuffer|undefined}
     */
    buffer: AudioBuffer | undefined;
    /**
     * Gets the duration of the sound. If the sound is not loaded it returns 0.
     *
     * @type {number}
     */
    get duration(): number;
}

/**
 * @import { SoundManager } from './manager.js'
 */
/**
 * Represents an audio listener - used internally.
 *
 * @ignore
 */
declare class Listener {
    /**
     * Create a new listener instance.
     *
     * @param {SoundManager} manager - The sound manager.
     */
    constructor(manager: SoundManager);
    /**
     * @type {SoundManager}
     * @private
     */
    private _manager;
    /**
     * @type {Vec3}
     * @private
     */
    private position;
    /**
     * @type {Mat4}
     * @private
     */
    private orientation;
    /**
     * Get the position of the listener.
     *
     * @returns {Vec3} The position of the listener.
     */
    getPosition(): Vec3;
    /**
     * Set the position of the listener.
     *
     * @param {Vec3} position - The new position of the listener.
     */
    setPosition(position: Vec3): void;
    /**
     * Set the orientation matrix of the listener.
     *
     * @param {Mat4} orientation - The new orientation matrix of the listener.
     */
    setOrientation(orientation: Mat4): void;
    /**
     * Get the orientation matrix of the listener.
     *
     * @returns {Mat4} The orientation matrix of the listener.
     */
    getOrientation(): Mat4;
    /**
     * Get the listener.
     *
     * @type {AudioListener|null}
     */
    get listener(): AudioListener | null;
}

/**
 * The SoundManager is used to load and play audio. It also applies system-wide settings like
 * global volume, suspend and resume.
 *
 * @category Sound
 */
declare class SoundManager extends EventHandler {
    /**
     * The underlying AudioContext, lazy loaded in the 'context' property.
     *
     * @type {AudioContext|null}
     * @private
     */
    private _context;
    AudioContext: any;
    _unlockHandlerFunc: any;
    _userSuspended: boolean;
    listener: Listener;
    _volume: number;
    /**
     * Sets the global volume for the manager. All {@link SoundInstance}s will scale their volume
     * with this volume. Valid between [0, 1].
     *
     * @type {number}
     */
    set volume(volume: number);
    /**
     * Gets the global volume for the manager.
     *
     * @type {number}
     */
    get volume(): number;
    get suspended(): boolean;
    /**
     * Get the Web Audio API context.
     *
     * @type {AudioContext}
     * @ignore
     */
    get context(): AudioContext;
    suspend(): void;
    resume(): void;
    destroy(): void;
    _resume(): void;
    _suspend(): void;
    _unlockHandler(): void;
    _registerUnlockListeners(): void;
    _removeUnlockListeners(): void;
}

/**
 * A SoundInstance plays a {@link Sound}.
 *
 * @category Sound
 */
declare class SoundInstance extends EventHandler {
    /**
     * Fired when the instance starts playing its source.
     *
     * @event
     * @example
     * instance.on('play', () => {
     *     console.log('Instance started playing');
     * });
     */
    static EVENT_PLAY: string;
    /**
     * Fired when the instance is paused.
     *
     * @event
     * @example
     * instance.on('pause', () => {
     *     console.log('Instance paused');
     * });
     */
    static EVENT_PAUSE: string;
    /**
     * Fired when the instance is resumed.
     *
     * @event
     * @example
     * instance.on('resume', () => {
     *     console.log('Instance resumed');
     * });
     */
    static EVENT_RESUME: string;
    /**
     * Fired when the instance is stopped.
     *
     * @event
     * @example
     * instance.on('stop', () => {
     *     console.log('Instance stopped');
     * });
     */
    static EVENT_STOP: string;
    /**
     * Fired when the sound currently played by the instance ends.
     *
     * @event
     * @example
     * instance.on('end', () => {
     *     console.log('Instance ended');
     * });
     */
    static EVENT_END: string;
    /**
     * Create a new SoundInstance instance.
     *
     * @param {SoundManager} manager - The sound manager.
     * @param {Sound} sound - The sound to play.
     * @param {object} options - Options for the instance.
     * @param {number} [options.volume] - The playback volume, between 0 and 1. Defaults to 1.
     * @param {number} [options.pitch] - The relative pitch. Defaults to 1 (plays at normal pitch).
     * @param {boolean} [options.loop] - Whether the sound should loop when it reaches the end or
     * not. Defaults to false.
     * @param {number} [options.startTime] - The time from which the playback will start in
     * seconds. Default is 0 to start at the beginning. Defaults to 0.
     * @param {number} [options.duration] - The total time after the startTime in seconds when
     * playback will stop or restart if loop is true. Defaults to 0.
     * @param {Function} [options.onPlay] - Function called when the instance starts playing.
     * @param {Function} [options.onPause] - Function called when the instance is paused.
     * @param {Function} [options.onResume] - Function called when the instance is resumed.
     * @param {Function} [options.onStop] - Function called when the instance is stopped.
     * @param {Function} [options.onEnd] - Function called when the instance ends.
     */
    constructor(manager: SoundManager, sound: Sound, options: {
        volume?: number;
        pitch?: number;
        loop?: boolean;
        startTime?: number;
        duration?: number;
        onPlay?: Function;
        onPause?: Function;
        onResume?: Function;
        onStop?: Function;
        onEnd?: Function;
    });
    /**
     * Gets the source that plays the sound resource. If the Web Audio API is not supported the
     * type of source is [Audio](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/audio).
     * Source is only available after calling play.
     *
     * @type {AudioBufferSourceNode}
     */
    source: AudioBufferSourceNode;
    /**
     * @type {SoundManager}
     * @private
     */
    private _manager;
    /**
     * @type {number}
     * @private
     */
    private _volume;
    /**
     * @type {number}
     * @private
     */
    private _pitch;
    /**
     * @type {boolean}
     * @private
     */
    private _loop;
    /**
     * @type {Sound}
     * @private
     */
    private _sound;
    /**
     * Start at 'stopped'.
     *
     * @type {number}
     * @private
     */
    private _state;
    /**
     * True if the manager was suspended.
     *
     * @type {boolean}
     * @private
     */
    private _suspended;
    /**
     * Greater than 0 if we want to suspend the event handled to the 'onended' event.
     * When an 'onended' event is suspended, this counter is decremented by 1.
     * When a future 'onended' event is to be suspended, this counter is incremented by 1.
     *
     * @type {number}
     * @private
     */
    private _suspendEndEvent;
    /**
     * True if we want to suspend firing instance events.
     *
     * @type {boolean}
     * @private
     */
    private _suspendInstanceEvents;
    /**
     * If true then the instance will start playing its source when its created.
     *
     * @type {boolean}
     * @private
     */
    private _playWhenLoaded;
    /**
     * @type {number}
     * @private
     */
    private _startTime;
    /**
     * @type {number}
     * @private
     */
    private _duration;
    /**
     * @type {number|null}
     * @private
     */
    private _startOffset;
    /** @private */
    private _onPlayCallback;
    /** @private */
    private _onPauseCallback;
    /** @private */
    private _onResumeCallback;
    /** @private */
    private _onStopCallback;
    /** @private */
    private _onEndCallback;
    /**
     * @type {number}
     * @private
     */
    private _startedAt;
    /**
     * Manually keep track of the playback position because the Web Audio API does not
     * provide a way to do this accurately if the playbackRate is not 1.
     *
     * @type {number}
     * @private
     */
    private _currentTime;
    /**
     * @type {number}
     * @private
     */
    private _currentOffset;
    /**
     * The input node is the one that is connected to the source.
     *
     * @type {AudioNode|null}
     * @private
     */
    private _inputNode;
    /**
     * The connected node is the one that is connected to the destination (speakers). Any
     * external nodes will be connected to this node.
     *
     * @type {AudioNode|null}
     * @private
     */
    private _connectorNode;
    /**
     * The first external node set by a user.
     *
     * @type {AudioNode|null}
     * @private
     */
    private _firstNode;
    /**
     * The last external node set by a user.
     *
     * @type {AudioNode|null}
     * @private
     */
    private _lastNode;
    /**
     * Set to true if a play() request was issued when the AudioContext was still suspended,
     * and will therefore wait until it is resumed to play the audio.
     *
     * @type {boolean}
     * @private
     */
    private _waitingContextSuspension;
    /** @private */
    private _endedHandler;
    /** @private */
    private _isReady;
    /** @private */
    private _loadedMetadataHandler;
    /** @private */
    private _timeUpdateHandler;
    /**
     * Sets the current time of the sound that is playing. If the value provided is bigger than the
     * duration of the instance it will wrap from the beginning.
     *
     * @type {number}
     */
    set currentTime(value: number);
    /**
     * Gets the current time of the sound that is playing.
     *
     * @type {number}
     */
    get currentTime(): number;
    /**
     * Sets the duration of the sound that the instance will play starting from startTime.
     *
     * @type {number}
     */
    set duration(value: number);
    /**
     * Gets the duration of the sound that the instance will play starting from startTime.
     *
     * @type {number}
     */
    get duration(): number;
    /**
     * Gets whether the instance is currently paused.
     *
     * @type {boolean}
     */
    get isPaused(): boolean;
    /**
     * Gets whether the instance is currently playing.
     *
     * @type {boolean}
     */
    get isPlaying(): boolean;
    /**
     * Gets whether the instance is currently stopped.
     *
     * @type {boolean}
     */
    get isStopped(): boolean;
    /**
     * Gets whether the instance is currently suspended because the window is not focused.
     *
     * @type {boolean}
     */
    get isSuspended(): boolean;
    /**
     * Sets whether the instance will restart when it finishes playing.
     *
     * @type {boolean}
     */
    set loop(value: boolean);
    /**
     * Gets whether the instance will restart when it finishes playing.
     *
     * @type {boolean}
     */
    get loop(): boolean;
    /**
     * Sets the pitch modifier to play the sound with. Must be larger than 0.01.
     *
     * @type {number}
     */
    set pitch(pitch: number);
    /**
     * Gets the pitch modifier to play the sound with.
     *
     * @type {number}
     */
    get pitch(): number;
    /**
     * Sets the sound resource that the instance will play.
     *
     * @type {Sound}
     */
    set sound(value: Sound);
    /**
     * Gets the sound resource that the instance will play.
     *
     * @type {Sound}
     */
    get sound(): Sound;
    /**
     * Sets the start time from which the sound will start playing.
     *
     * @type {number}
     */
    set startTime(value: number);
    /**
     * Gets the start time from which the sound will start playing.
     *
     * @type {number}
     */
    get startTime(): number;
    /**
     * Sets the volume modifier to play the sound with. In range 0-1.
     *
     * @type {number}
     */
    set volume(volume: number);
    /**
     * Gets the volume modifier to play the sound with. In range 0-1.
     *
     * @type {number}
     */
    get volume(): number;
    /** @private */
    private _onPlay;
    /** @private */
    private _onPause;
    /** @private */
    private _onResume;
    /** @private */
    private _onStop;
    /** @private */
    private _onEnded;
    /**
     * Handle the manager's 'volumechange' event.
     *
     * @private
     */
    private _onManagerVolumeChange;
    /**
     * Handle the manager's 'suspend' event.
     *
     * @private
     */
    private _onManagerSuspend;
    /**
     * Handle the manager's 'resume' event.
     *
     * @private
     */
    private _onManagerResume;
    /**
     * Creates internal audio nodes and connects them.
     *
     * @private
     */
    private _initializeNodes;
    gain: GainNode;
    /**
     * Attempt to begin playback the sound.
     * If the AudioContext is suspended, the audio will only start once it's resumed.
     * If the sound is already playing, this will restart the sound.
     *
     * @returns {boolean} True if the sound was started immediately.
     */
    play(): boolean;
    /**
     * Immediately play the sound.
     * This method assumes the AudioContext is ready (not suspended or locked).
     *
     * @private
     */
    private _playAudioImmediate;
    /**
     * Pauses playback of sound. Call resume() to resume playback from the same position.
     *
     * @returns {boolean} Returns true if the sound was paused.
     */
    pause(): boolean;
    /**
     * Resumes playback of the sound. Playback resumes at the point that the audio was paused.
     *
     * @returns {boolean} Returns true if the sound was resumed.
     */
    resume(): boolean;
    /**
     * Stops playback of sound. Calling play() again will restart playback from the beginning of
     * the sound.
     *
     * @returns {boolean} Returns true if the sound was stopped.
     */
    stop(): boolean;
    /**
     * Connects external Web Audio API nodes. You need to pass the first node of the node graph
     * that you created externally and the last node of that graph. The first node will be
     * connected to the audio source and the last node will be connected to the destination of the
     * AudioContext (e.g. speakers). Requires Web Audio API support.
     *
     * @param {AudioNode} firstNode - The first node that will be connected to the audio source of sound instances.
     * @param {AudioNode} [lastNode] - The last node that will be connected to the destination of the AudioContext.
     * If unspecified then the firstNode will be connected to the destination instead.
     * @example
     * const context = app.systems.sound.context;
     * const analyzer = context.createAnalyzer();
     * const distortion = context.createWaveShaper();
     * const filter = context.createBiquadFilter();
     * analyzer.connect(distortion);
     * distortion.connect(filter);
     * instance.setExternalNodes(analyzer, filter);
     */
    setExternalNodes(firstNode: AudioNode, lastNode?: AudioNode): void;
    /**
     * Clears any external nodes set by {@link SoundInstance#setExternalNodes}.
     */
    clearExternalNodes(): void;
    /**
     * Gets any external nodes set by {@link SoundInstance#setExternalNodes}.
     *
     * @returns {AudioNode[]} Returns an array that contains the two nodes set by
     * {@link SoundInstance#setExternalNodes}.
     */
    getExternalNodes(): AudioNode[];
    /**
     * Creates the source for the instance.
     *
     * @returns {AudioBufferSourceNode|null} Returns the created source or null if the sound
     * instance has no {@link Sound} associated with it.
     * @private
     */
    private _createSource;
    /**
     * Sets the current time taking into account the time the instance started playing, the current
     * pitch and the current time offset.
     *
     * @private
     */
    private _updateCurrentTime;
    /**
     * Handle the manager's 'destroy' event.
     *
     * @private
     */
    private _onManagerDestroy;
}

/**
 * The SoundSlot controls the playback of {@link SoundInstance}s. SoundSlots are managed by
 * {@link SoundComponent}s. To add and remove SoundSlots on a SoundComponent, use
 * {@link SoundComponent#addSlot} and {@link SoundComponent#removeSlot} respectively.
 *
 * @hideconstructor
 * @category Sound
 */
declare class SoundSlot extends EventHandler {
    /**
     * Fired when a {@link SoundInstance} starts playing on a slot. The handler is passed the sound
     * instance that started playing.
     *
     * @event
     * @example
     * slot.on('play', (instance) => {
     *     console.log('Sound instance started playing');
     * });
     */
    static EVENT_PLAY: string;
    /**
     * Fired when a {@link SoundInstance} is paused on a slot. The handler is passed the sound
     * instance that is paused.
     *
     * @event
     * @example
     * slot.on('pause', (instance) => {
     *     console.log('Sound instance paused');
     * });
     */
    static EVENT_PAUSE: string;
    /**
     * Fired when a {@link SoundInstance} is resumed on a slot. The handler is passed the sound
     * instance that is resumed.
     *
     * @event
     * @example
     * slot.on('resume', (instance) => {
     *     console.log('Sound instance resumed');
     * });
     */
    static EVENT_RESUME: string;
    /**
     * Fired when a {@link SoundInstance} is stopped on a slot. The handler is passed the sound
     * instance that is stopped.
     *
     * @event
     * @example
     * slot.on('stop', (instance) => {
     *     console.log('Sound instance stopped');
     * });
     */
    static EVENT_STOP: string;
    /**
     * Fired when a sound instance stops playing because it reached its end. The handler is passed
     * the {@link SoundInstance} that ended.
     *
     * @event
     * @example
     * slot.on('end', (instance) => {
     *     console.log('Sound instance playback ended');
     * });
     */
    static EVENT_END: string;
    /**
     * Fired when the sound {@link Asset} assigned to the slot is loaded. The handler is passed the
     * loaded {@link Sound} resource.
     *
     * @event
     * @example
     * slot.on('load', (sound) => {
     *     console.log('Sound resource loaded');
     * });
     */
    static EVENT_LOAD: string;
    /**
     * Create a new SoundSlot.
     *
     * @param {SoundComponent} component - The Component that created this slot.
     * @param {string} [name] - The name of the slot. Defaults to 'Untitled'.
     * @param {object} [options] - Settings for the slot.
     * @param {number} [options.volume] - The playback volume, between 0 and 1.
     * @param {number} [options.pitch] - The relative pitch, default of 1, plays at normal pitch.
     * @param {boolean} [options.loop] - If true, the sound will restart when it reaches the end.
     * @param {number} [options.startTime] - The start time from which the sound will start
     * playing.
     * @param {number} [options.duration] - The duration of the sound that the slot will play
     * starting from startTime.
     * @param {boolean} [options.overlap] - If true, then sounds played from slot will be played
     * independently of each other. Otherwise the slot will first stop the current sound before
     * starting the new one.
     * @param {boolean} [options.autoPlay] - If true, the slot will start playing as soon as its
     * audio asset is loaded.
     * @param {number} [options.asset] - The asset id of the audio asset that is going to be played
     * by this slot.
     */
    constructor(component: SoundComponent, name?: string, options?: {
        volume?: number;
        pitch?: number;
        loop?: boolean;
        startTime?: number;
        duration?: number;
        overlap?: boolean;
        autoPlay?: boolean;
        asset?: number;
    });
    /**
     * The name of the slot.
     *
     * @type {string}
     */
    name: string;
    /**
     * An array that contains all the {@link SoundInstance}s currently being played by the slot.
     *
     * @type {SoundInstance[]}
     */
    instances: SoundInstance[];
    _component: SoundComponent;
    _assets: AssetRegistry;
    _manager: any;
    _volume: number;
    _pitch: number;
    _loop: boolean;
    _duration: number;
    _startTime: number;
    _overlap: boolean;
    _autoPlay: boolean;
    _firstNode: AudioNode;
    _lastNode: AudioNode;
    _asset: number;
    _onInstancePlayHandler: any;
    _onInstancePauseHandler: any;
    _onInstanceResumeHandler: any;
    _onInstanceStopHandler: any;
    _onInstanceEndHandler: any;
    /**
     * Plays a sound. If {@link overlap} is true the new sound instance will be played
     * independently of any other instances already playing. Otherwise existing sound instances
     * will stop before playing the new sound.
     *
     * @returns {SoundInstance} The new sound instance.
     */
    play(): SoundInstance;
    /**
     * Pauses all sound instances. To continue playback call {@link resume}.
     *
     * @returns {boolean} True if the sound instances paused successfully, false otherwise.
     */
    pause(): boolean;
    /**
     * Resumes playback of all paused sound instances.
     *
     * @returns {boolean} True if any instances were resumed.
     */
    resume(): boolean;
    /**
     * Stops playback of all sound instances.
     *
     * @returns {boolean} True if any instances were stopped.
     */
    stop(): boolean;
    /**
     * Loads the asset assigned to this slot.
     */
    load(): void;
    /**
     * Connect external Web Audio API nodes. Any sound played by this slot will automatically
     * attach the specified nodes to the source that plays the sound. You need to pass the first
     * node of the node graph that you created externally and the last node of that graph. The
     * first node will be connected to the audio source and the last node will be connected to the
     * destination of the AudioContext (e.g. speakers).
     *
     * @param {AudioNode} firstNode - The first node that will be connected to the audio source of
     * sound instances.
     * @param {AudioNode} [lastNode] - The last node that will be connected to the destination of
     * the AudioContext. If unspecified then the firstNode will be connected to the destination
     * instead.
     * @example
     * const context = app.systems.sound.context;
     * const analyzer = context.createAnalyzer();
     * const distortion = context.createWaveShaper();
     * const filter = context.createBiquadFilter();
     * analyzer.connect(distortion);
     * distortion.connect(filter);
     * slot.setExternalNodes(analyzer, filter);
     */
    setExternalNodes(firstNode: AudioNode, lastNode?: AudioNode): void;
    /**
     * Clears any external nodes set by {@link setExternalNodes}.
     */
    clearExternalNodes(): void;
    /**
     * Gets an array that contains the two external nodes set by {@link setExternalNodes}.
     *
     * @returns {AudioNode[]} An array of 2 elements that contains the first and last nodes set by
     * {@link setExternalNodes}.
     */
    getExternalNodes(): AudioNode[];
    /**
     * Reports whether an asset is set on this slot.
     *
     * @returns {boolean} Returns true if the slot has an asset assigned.
     * @private
     */
    private _hasAsset;
    /**
     * Creates a new {@link SoundInstance} with the properties of the slot.
     *
     * @returns {SoundInstance} The new instance.
     * @private
     */
    private _createInstance;
    _onInstancePlay(instance: any): void;
    _onInstancePause(instance: any): void;
    _onInstanceResume(instance: any): void;
    _onInstanceStop(instance: any): void;
    _onInstanceEnd(instance: any): void;
    _onAssetAdd(asset: any): void;
    _onAssetLoad(asset: any): void;
    _onAssetRemoved(asset: any): void;
    updatePosition(position: any): void;
    /**
     * Sets the asset id.
     *
     * @type {number|null}
     */
    set asset(value: number | null);
    /**
     * Gets the asset id.
     *
     * @type {number|null}
     */
    get asset(): number | null;
    /**
     * Sets whether the slot will begin playing as soon as it is loaded.
     *
     * @type {boolean}
     */
    set autoPlay(value: boolean);
    /**
     * Gets whether the slot will begin playing as soon as it is loaded.
     *
     * @type {boolean}
     */
    get autoPlay(): boolean;
    /**
     * Sets the duration of the sound that the slot will play starting from startTime.
     *
     * @type {number}
     */
    set duration(value: number);
    /**
     * Gets the duration of the sound that the slot will play starting from startTime.
     *
     * @type {number}
     */
    get duration(): number;
    /**
     * Gets whether the asset of the slot is loaded.
     *
     * @type {boolean}
     */
    get isLoaded(): boolean;
    /**
     * Gets whether the slot is currently paused.
     *
     * @type {boolean}
     */
    get isPaused(): boolean;
    /**
     * Gets whether the slot is currently playing.
     *
     * @type {boolean}
     */
    get isPlaying(): boolean;
    /**
     * Gets whether the slot is currently stopped.
     *
     * @type {boolean}
     */
    get isStopped(): boolean;
    /**
     * Sets whether the slot will restart when it finishes playing.
     *
     * @type {boolean}
     */
    set loop(value: boolean);
    /**
     * Gets whether the slot will restart when it finishes playing.
     *
     * @type {boolean}
     */
    get loop(): boolean;
    /**
     * Sets whether the sounds played from this slot will be played independently of each other.
     * Otherwise, the slot will first stop the current sound before starting the new one.
     *
     * @type {boolean}
     */
    set overlap(value: boolean);
    /**
     * Gets whether the sounds played from this slot will be played independently of each other.
     *
     * @type {boolean}
     */
    get overlap(): boolean;
    /**
     * Sets the pitch modifier to play the sound with. Must be larger than 0.01.
     *
     * @type {number}
     */
    set pitch(value: number);
    /**
     * Gets the pitch modifier to play the sound with.
     *
     * @type {number}
     */
    get pitch(): number;
    /**
     * Sets the start time from which the sound will start playing.
     *
     * @type {number}
     */
    set startTime(value: number);
    /**
     * Gets the start time from which the sound will start playing.
     *
     * @type {number}
     */
    get startTime(): number;
    /**
     * Sets the volume modifier to play the sound with. In range 0-1.
     *
     * @type {number}
     */
    set volume(value: number);
    /**
     * Gets the volume modifier to play the sound with.
     *
     * @type {number}
     */
    get volume(): number;
}

/**
 * @import { Entity } from '../../entity.js'
 * @import { SoundInstance } from '../../../platform/sound/instance.js'
 */
/**
 * The SoundComponent enables an {@link Entity} to play audio. The SoundComponent can manage
 * multiple {@link SoundSlot}s, each of which can play a different audio asset with its own set
 * of properties such as volume, pitch, and looping behavior.
 *
 * The SoundComponent supports positional audio, meaning that the sound can be played relative
 * to the Entity's position in 3D space. This is useful for creating immersive audio experiences
 * where the sound's volume and panning are affected by the listener's position and orientation.
 * Positional audio requires that an Entity with an {@link AudioListenerComponent} be added to the
 * scene.
 *
 * You should never need to use the SoundComponent constructor directly. To add a SoundComponent
 * to an Entity, use {@link Entity#addComponent}:
 *
 * ```javascript
 * const entity = new pc.Entity();
 * entity.addComponent('sound', {
 *     volume: 0.8,
 *     positional: true
 * });
 * ```
 *
 * Once the SoundComponent is added to the entity, you can access it via the {@link Entity#sound}
 * property:
 *
 * ```javascript
 * entity.sound.volume = 0.9;  // Set the volume for all sounds
 *
 * console.log(entity.sound.volume); // Get the volume and print it
 * ```
 *
 * Add individual sounds by creating sound slots on the component:
 *
 * ```javascript
 * entity.sound.addSlot('beep', {
 *     asset: asset
 * });
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Positional Sound](https://playcanvas.github.io/#/sound/positional)
 *
 * @hideconstructor
 * @category Sound
 */
declare class SoundComponent extends Component {
    /**
     * Fired when a sound instance starts playing. The handler is passed the {@link SoundSlot} and
     * the {@link SoundInstance} that started playing.
     *
     * @event
     * @example
     * entity.sound.on('play', (slot, instance) => {
     *     console.log(`Sound ${slot.name} started playing`);
     * });
     */
    static EVENT_PLAY: string;
    /**
     * Fired when a sound instance is paused. The handler is passed the {@link SoundSlot} and the
     * {@link SoundInstance} that was paused.
     *
     * @event
     * @example
     * entity.sound.on('pause', (slot, instance) => {
     *     console.log(`Sound ${slot.name} paused`);
     * });
     */
    static EVENT_PAUSE: string;
    /**
     * Fired when a sound instance is resumed. The handler is passed the {@link SoundSlot} and the
     * {@link SoundInstance} that was resumed.
     *
     * @event
     * @example
     * entity.sound.on('resume', (slot, instance) => {
     *     console.log(`Sound ${slot.name} resumed`);
     * });
     */
    static EVENT_RESUME: string;
    /**
     * Fired when a sound instance is stopped. The handler is passed the {@link SoundSlot} and the
     * {@link SoundInstance} that was stopped.
     *
     * @event
     * @example
     * entity.sound.on('stop', (slot, instance) => {
     *     console.log(`Sound ${slot.name} stopped`);
     * });
     */
    static EVENT_STOP: string;
    /**
     * Fired when a sound instance stops playing because it reached its end. The handler is passed
     * the {@link SoundSlot} and the {@link SoundInstance} that ended.
     *
     * @event
     * @example
     * entity.sound.on('end', (slot, instance) => {
     *     console.log(`Sound ${slot.name} ended`);
     * });
     */
    static EVENT_END: string;
    /** @private */
    private _volume;
    /** @private */
    private _pitch;
    /** @private */
    private _positional;
    /** @private */
    private _refDistance;
    /** @private */
    private _maxDistance;
    /** @private */
    private _rollOffFactor;
    /** @private */
    private _distanceModel;
    /**
     * @type {Object<string, SoundSlot>}
     * @private
     */
    private _slots;
    /** @private */
    private _playingBeforeDisable;
    /**
     * Update the specified property on all sound instances.
     *
     * @param {string} property - The name of the SoundInstance property to update.
     * @param {string|number} value - The value to set the property to.
     * @param {boolean} isFactor - True if the value is a factor of the slot property or false
     * if it is an absolute value.
     * @private
     */
    private _updateSoundInstances;
    /**
     * Sets which algorithm to use to reduce the volume of the sound as it moves away from the
     * listener. Can be:
     *
     * - {@link DISTANCE_LINEAR}
     * - {@link DISTANCE_INVERSE}
     * - {@link DISTANCE_EXPONENTIAL}
     *
     * Defaults to {@link DISTANCE_LINEAR}.
     *
     * @type {string}
     */
    set distanceModel(value: string);
    /**
     * Gets which algorithm to use to reduce the volume of the sound as it moves away from the
     * listener.
     *
     * @type {string}
     */
    get distanceModel(): string;
    /**
     * Sets the maximum distance from the listener at which audio falloff stops. Note that the
     * volume of the audio is not 0 after this distance, but just doesn't fall off anymore.
     * Defaults to 10000.
     *
     * @type {number}
     */
    set maxDistance(value: number);
    /**
     * Gets the maximum distance from the listener at which audio falloff stops.
     *
     * @type {number}
     */
    get maxDistance(): number;
    /**
     * Sets the reference distance for reducing volume as the sound source moves further from the
     * listener. Defaults to 1.
     *
     * @type {number}
     */
    set refDistance(value: number);
    /**
     * Gets the reference distance for reducing volume as the sound source moves further from the
     * listener.
     *
     * @type {number}
     */
    get refDistance(): number;
    /**
     * Sets the factor used in the falloff equation. Defaults to 1.
     *
     * @type {number}
     */
    set rollOffFactor(value: number);
    /**
     * Gets the factor used in the falloff equation.
     *
     * @type {number}
     */
    get rollOffFactor(): number;
    /**
     * Sets the pitch modifier to play the audio with. Must be larger than 0.01. Defaults to 1.
     *
     * @type {number}
     */
    set pitch(value: number);
    /**
     * Gets the pitch modifier to play the audio with.
     *
     * @type {number}
     */
    get pitch(): number;
    /**
     * Sets the volume modifier to play the audio with. In range 0-1. Defaults to 1.
     *
     * @type {number}
     */
    set volume(value: number);
    /**
     * Gets the volume modifier to play the audio with.
     *
     * @type {number}
     */
    get volume(): number;
    /**
     * Sets whether the component plays positional sound. If true, the audio will play back at the
     * location of the Entity in space, so the audio will be affected by the position of the
     * {@link AudioListenerComponent}. Defaults to true.
     *
     * @type {boolean}
     */
    set positional(newValue: boolean);
    /**
     * Gets whether the component plays positional sound.
     *
     * @type {boolean}
     */
    get positional(): boolean;
    /**
     * Sets a dictionary that contains the {@link SoundSlot}s managed by this SoundComponent.
     *
     * @type {Object<string, SoundSlot>}
     */
    set slots(newValue: {
        [x: string]: SoundSlot;
    });
    /**
     * Gets a dictionary that contains the {@link SoundSlot}s managed by this SoundComponent.
     *
     * @type {Object<string, SoundSlot>}
     */
    get slots(): {
        [x: string]: SoundSlot;
    };
    onRemove(): void;
    /**
     * Creates a new {@link SoundSlot} with the specified name.
     *
     * @param {string} name - The name of the slot.
     * @param {object} [options] - Settings for the slot.
     * @param {number} [options.volume] - The playback volume, between 0 and 1. Defaults to 1.
     * @param {number} [options.pitch] - The relative pitch. Defaults to 1 (plays at normal pitch).
     * @param {boolean} [options.loop] - If true, the sound will restart when it reaches the end.
     * Defaults to false.
     * @param {number} [options.startTime] - The start time from which the sound will start playing.
     * Defaults to 0 to start at the beginning.
     * @param {number} [options.duration] - The duration of the sound that the slot will play
     * starting from startTime. Defaults to `null` which means play to end of the sound.
     * @param {boolean} [options.overlap] - If true, then sounds played from slot will be played
     * independently of each other. Otherwise the slot will first stop the current sound before
     * starting the new one. Defaults to false.
     * @param {boolean} [options.autoPlay] - If true, the slot will start playing as soon as its
     * audio asset is loaded. Defaults to false.
     * @param {number} [options.asset] - The asset id of the audio asset that is going to be played
     * by this slot.
     * @returns {SoundSlot|null} The new slot or null if the slot already exists.
     * @example
     * // get an asset by id
     * const asset = app.assets.get(10);
     * // add a slot
     * this.entity.sound.addSlot('beep', {
     *     asset: asset
     * });
     * // play
     * this.entity.sound.play('beep');
     */
    addSlot(name: string, options?: {
        volume?: number;
        pitch?: number;
        loop?: boolean;
        startTime?: number;
        duration?: number;
        overlap?: boolean;
        autoPlay?: boolean;
        asset?: number;
    }): SoundSlot | null;
    /**
     * Removes the {@link SoundSlot} with the specified name.
     *
     * @param {string} name - The name of the slot.
     * @example
     * // remove a slot called 'beep'
     * this.entity.sound.removeSlot('beep');
     */
    removeSlot(name: string): void;
    /**
     * Returns the slot with the specified name.
     *
     * @param {string} name - The name of the slot.
     * @returns {SoundSlot|undefined} The slot.
     * @example
     * // get a slot and set its volume
     * this.entity.sound.slot('beep').volume = 0.5;
     *
     */
    slot(name: string): SoundSlot | undefined;
    /**
     * Return a property from the slot with the specified name.
     *
     * @param {string} name - The name of the {@link SoundSlot} to look for.
     * @param {string} property - The name of the property to look for.
     * @returns {*} The value from the looked property inside the slot with specified name. May be
     * undefined if slot does not exist.
     * @private
     */
    private _getSlotProperty;
    /**
     * Returns true if the slot with the specified name is currently playing.
     *
     * @param {string} name - The name of the {@link SoundSlot} to look for.
     * @returns {boolean} True if the slot with the specified name exists and is currently playing.
     */
    isPlaying(name: string): boolean;
    /**
     * Returns true if the asset of the slot with the specified name is loaded..
     *
     * @param {string} name - The name of the {@link SoundSlot} to look for.
     * @returns {boolean} True if the slot with the specified name exists and its asset is loaded.
     */
    isLoaded(name: string): boolean;
    /**
     * Returns true if the slot with the specified name is currently paused.
     *
     * @param {string} name - The name of the {@link SoundSlot} to look for.
     * @returns {boolean} True if the slot with the specified name exists and is currently paused.
     */
    isPaused(name: string): boolean;
    /**
     * Returns true if the slot with the specified name is currently stopped.
     *
     * @param {string} name - The name of the {@link SoundSlot} to look for.
     * @returns {boolean} True if the slot with the specified name exists and is currently stopped.
     */
    isStopped(name: string): boolean;
    /**
     * Begins playing the sound slot with the specified name. The slot will restart playing if it
     * is already playing unless the overlap field is true in which case a new sound will be
     * created and played.
     *
     * @param {string} name - The name of the {@link SoundSlot} to play.
     * @returns {SoundInstance|null} The sound instance that will be played. Returns null if the
     * component or its parent entity is disabled or if the SoundComponent has no slot with the
     * specified name.
     * @example
     * // get asset by id
     * const asset = app.assets.get(10);
     * // create a slot and play it
     * this.entity.sound.addSlot('beep', {
     *     asset: asset
     * });
     * this.entity.sound.play('beep');
     */
    play(name: string): SoundInstance | null;
    /**
     * Pauses playback of the slot with the specified name. If the name is undefined then all slots
     * currently played will be paused. The slots can be resumed by calling {@link SoundComponent#resume}.
     *
     * @param {string} [name] - The name of the slot to pause. Leave undefined to pause everything.
     * @example
     * // pause all sounds
     * this.entity.sound.pause();
     * // pause a specific sound
     * this.entity.sound.pause('beep');
     */
    pause(name?: string): void;
    /**
     * Resumes playback of the sound slot with the specified name if it's paused. If no name is
     * specified all slots will be resumed.
     *
     * @param {string} [name] - The name of the slot to resume. Leave undefined to resume everything.
     * @example
     * // resume all sounds
     * this.entity.sound.resume();
     * // resume a specific sound
     * this.entity.sound.resume('beep');
     */
    resume(name?: string): void;
    /**
     * Stops playback of the sound slot with the specified name if it's paused. If no name is
     * specified all slots will be stopped.
     *
     * @param {string} [name] - The name of the slot to stop. Leave undefined to stop everything.
     * @example
     * // stop all sounds
     * this.entity.sound.stop();
     * // stop a specific sound
     * this.entity.sound.stop('beep');
     */
    stop(name?: string): void;
}

/**
 * @import { EventHandle } from '../../../core/event-handle.js'
 * @import { SpriteComponent } from './component.js'
 * @import { Sprite } from '../../../scene/sprite.js'
 */
/**
 * Handles playing of sprite animations and loading of relevant sprite assets.
 *
 * @category Graphics
 */
declare class SpriteAnimationClip extends EventHandler {
    /**
     * Fired when the clip starts playing.
     *
     * @event
     * @example
     * clip.on('play', () => {
     *     console.log('Clip started playing');
     * });
     */
    static EVENT_PLAY: string;
    /**
     * Fired when the clip is paused.
     *
     * @event
     * @example
     * clip.on('pause', () => {
     *     console.log('Clip paused');
     * });
     */
    static EVENT_PAUSE: string;
    /**
     * Fired when the clip is resumed.
     *
     * @event
     * @example
     * clip.on('resume', () => {
     *     console.log('Clip resumed');
     * });
     */
    static EVENT_RESUME: string;
    /**
     * Fired when the clip is stopped.
     *
     * @event
     * @example
     * clip.on('stop', () => {
     *     console.log('Clip stopped');
     * });
     */
    static EVENT_STOP: string;
    /**
     * Fired when the clip stops playing because it reached its end.
     *
     * @event
     * @example
     * clip.on('end', () => {
     *     console.log('Clip ended');
     * });
     */
    static EVENT_END: string;
    /**
     * Fired when the clip reached the end of its current loop.
     *
     * @event
     * @example
     * clip.on('loop', () => {
     *     console.log('Clip looped');
     * });
     */
    static EVENT_LOOP: string;
    /**
     * Create a new SpriteAnimationClip instance.
     *
     * @param {SpriteComponent} component - The sprite component managing this clip.
     * @param {object} data - Data for the new animation clip.
     * @param {number} [data.fps] - Frames per second for the animation clip.
     * @param {boolean} [data.loop] - Whether to loop the animation clip.
     * @param {string} [data.name] - The name of the new animation clip.
     * @param {number} [data.spriteAsset] - The id of the sprite asset that this clip will play.
     */
    constructor(component: SpriteComponent, data: {
        fps?: number;
        loop?: boolean;
        name?: string;
        spriteAsset?: number;
    });
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtSetMeshes;
    _component: SpriteComponent;
    _frame: number;
    _sprite: Sprite;
    _spriteAsset: any;
    /**
     * Sets the id of the sprite asset used to play the animation.
     *
     * @type {number}
     */
    set spriteAsset(value: number);
    /**
     * Gets the id of the sprite asset used to play the animation.
     *
     * @type {number}
     */
    get spriteAsset(): number;
    name: string;
    fps: number;
    loop: boolean;
    _playing: boolean;
    _paused: boolean;
    _time: number;
    /**
     * Gets the total duration of the animation in seconds.
     *
     * @type {number}
     */
    get duration(): number;
    /**
     * Sets the index of the frame of the {@link Sprite} currently being rendered.
     *
     * @type {number}
     */
    set frame(value: number);
    /**
     * Gets the index of the frame of the {@link Sprite} currently being rendered.
     *
     * @type {number}
     */
    get frame(): number;
    /**
     * Sets whether the animation is currently paused.
     *
     * @type {boolean}
     */
    get isPaused(): boolean;
    /**
     * Sets whether the animation is currently playing.
     *
     * @type {boolean}
     */
    get isPlaying(): boolean;
    /**
     * Sets the current sprite used to play the animation.
     *
     * @type {Sprite}
     */
    set sprite(value: Sprite);
    /**
     * Gets the current sprite used to play the animation.
     *
     * @type {Sprite}
     */
    get sprite(): Sprite;
    /**
     * Sets the current time of the animation in seconds.
     *
     * @type {number}
     */
    set time(value: number);
    /**
     * Gets the current time of the animation in seconds.
     *
     * @type {number}
     */
    get time(): number;
    _onSpriteAssetAdded(asset: any): void;
    _bindSpriteAsset(asset: any): void;
    _unbindSpriteAsset(asset: any): void;
    _onSpriteAssetLoad(asset: any): void;
    _onTextureAtlasLoad(atlasAsset: any): void;
    _onSpriteAssetRemove(asset: any): void;
    _onSpriteMeshesChange(): void;
    _onSpritePpuChanged(): void;
    /**
     * Advances the animation, looping if necessary.
     *
     * @param {number} dt - The delta time.
     * @private
     */
    private _update;
    _setTime(value: any): void;
    _setFrame(value: any): void;
    _destroy(): void;
    /**
     * Plays the animation. If it's already playing then this does nothing.
     */
    play(): void;
    /**
     * Pauses the animation.
     */
    pause(): void;
    /**
     * Resumes the paused animation.
     */
    resume(): void;
    /**
     * Stops the animation and resets the animation to the first frame.
     */
    stop(): void;
}

declare class SpriteComponentData {
    enabled: boolean;
}

/**
 * Manages creation of {@link SpriteComponent}s.
 *
 * @category Graphics
 */
declare class SpriteComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof SpriteComponent;
    DataType: typeof SpriteComponentData;
    schema: string[];
    _defaultTexture: Texture;
    _defaultMaterial: any;
    _default9SlicedMaterialSlicedMode: any;
    _default9SlicedMaterialTiledMode: any;
    set defaultMaterial(material: any);
    get defaultMaterial(): any;
    set default9SlicedMaterialSlicedMode(material: any);
    get default9SlicedMaterialSlicedMode(): any;
    set default9SlicedMaterialTiledMode(material: any);
    get default9SlicedMaterialTiledMode(): any;
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onUpdate(dt: any): void;
    onBeforeRemove(entity: any, component: any): void;
}

/**
 * Enables an Entity to render a simple static sprite or sprite animations.
 *
 * @hideconstructor
 * @category Graphics
 */
declare class SpriteComponent extends Component {
    /**
     * Fired when an animation clip starts playing. The handler is passed the
     * {@link SpriteAnimationClip} that started playing.
     *
     * @event
     * @example
     * entity.sprite.on('play', (clip) => {
     *     console.log(`Animation clip ${clip.name} started playing.`);
     * });
     */
    static EVENT_PLAY: string;
    /**
     * Fired when an animation clip is paused. The handler is passed the
     * {@link SpriteAnimationClip} that was paused.
     *
     * @event
     * @example
     * entity.sprite.on('pause', (clip) => {
     *     console.log(`Animation clip ${clip.name} paused.`);
     * });
     */
    static EVENT_PAUSE: string;
    /**
     * Fired when an animation clip is resumed. The handler is passed the
     * {@link SpriteAnimationClip} that was resumed.
     *
     * @event
     * @example
     * entity.sprite.on('resume', (clip) => {
     *     console.log(`Animation clip ${clip.name} resumed.`);
     * });
     */
    static EVENT_RESUME: string;
    /**
     * Fired when an animation clip is stopped. The handler is passed the
     * {@link SpriteAnimationClip} that was stopped.
     *
     * @event
     * @example
     * entity.sprite.on('stop', (clip) => {
     *     console.log(`Animation clip ${clip.name} stopped.`);
     * });
     */
    static EVENT_STOP: string;
    /**
     * Fired when an animation clip stops playing because it reached its end. The handler is passed
     * the {@link SpriteAnimationClip} that ended.
     *
     * @event
     * @example
     * entity.sprite.on('end', (clip) => {
     *     console.log(`Animation clip ${clip.name} ended.`);
     * });
     */
    static EVENT_END: string;
    /**
     * Fired when an animation clip reached the end of its current loop. The handler is passed the
     * {@link SpriteAnimationClip} that looped.
     *
     * @event
     * @example
     * entity.sprite.on('loop', (clip) => {
     *     console.log(`Animation clip ${clip.name} looped.`);
     * });
     */
    static EVENT_LOOP: string;
    /**
     * Create a new SpriteComponent instance.
     *
     * @param {SpriteComponentSystem} system - The ComponentSystem that
     * created this Component.
     * @param {Entity} entity - The Entity that this Component is
     * attached to.
     */
    constructor(system: SpriteComponentSystem, entity: Entity);
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayersChanged;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerAdded;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLayerRemoved;
    _type: string;
    _material: any;
    _color: Color;
    _colorUniform: Float32Array<ArrayBuffer>;
    _speed: number;
    _flipX: boolean;
    _flipY: boolean;
    _width: number;
    _height: number;
    _drawOrder: number;
    _layers: number[];
    _outerScale: Vec2;
    _outerScaleUniform: Float32Array<ArrayBuffer>;
    _innerOffset: Vec4;
    _innerOffsetUniform: Float32Array<ArrayBuffer>;
    _atlasRect: Vec4;
    _atlasRectUniform: Float32Array<ArrayBuffer>;
    _batchGroupId: number;
    _batchGroup: any;
    _node: GraphNode;
    _model: Model;
    _meshInstance: MeshInstance;
    _updateAabbFunc: any;
    _addedModel: boolean;
    _autoPlayClip: string;
    /**
     * Dictionary of sprite animation clips.
     *
     * @type {Object<string, SpriteAnimationClip>}
     * @private
     */
    private _clips;
    _defaultClip: SpriteAnimationClip;
    /**
     * The sprite animation clip currently playing.
     *
     * @type {SpriteAnimationClip}
     * @private
     */
    private _currentClip;
    /**
     * Sets the type of the SpriteComponent. Can be:
     *
     * - {@link SPRITETYPE_SIMPLE}: The component renders a single frame from a sprite asset.
     * - {@link SPRITETYPE_ANIMATED}: The component can play sprite animation clips.
     *
     * Defaults to {@link SPRITETYPE_SIMPLE}.
     *
     * @type {string}
     */
    set type(value: string);
    /**
     * Gets the type of the SpriteComponent.
     *
     * @type {string}
     */
    get type(): string;
    /**
     * Sets which frame from the current sprite asset to render.
     *
     * @type {number}
     */
    set frame(value: number);
    /**
     * Gets which frame from the current sprite asset to render.
     *
     * @type {number}
     */
    get frame(): number;
    /**
     * Sets the asset id or the {@link Asset} of the sprite to render. Only works for
     * {@link SPRITETYPE_SIMPLE} sprites.
     *
     * @type {number|Asset}
     */
    set spriteAsset(value: number | Asset);
    /**
     * Gets the asset id or the {@link Asset} of the sprite to render.
     *
     * @type {number|Asset}
     */
    get spriteAsset(): number | Asset;
    /**
     * Sets the current sprite.
     *
     * @type {Sprite}
     */
    set sprite(value: Sprite);
    /**
     * Gets the current sprite.
     *
     * @type {Sprite}
     */
    get sprite(): Sprite;
    set material(value: any);
    get material(): any;
    /**
     * Sets the color tint of the sprite.
     *
     * @type {Color}
     */
    set color(value: Color);
    /**
     * Gets the color tint of the sprite.
     *
     * @type {Color}
     */
    get color(): Color;
    /**
     * Sets the opacity of the sprite.
     *
     * @type {number}
     */
    set opacity(value: number);
    /**
     * Gets the opacity of the sprite.
     *
     * @type {number}
     */
    get opacity(): number;
    /**
     * Sets the dictionary that contains {@link SpriteAnimationClip}s.
     *
     * @type {Object<string, SpriteAnimationClip>}
     */
    set clips(value: {
        [x: string]: SpriteAnimationClip;
    });
    /**
     * Gets the dictionary that contains {@link SpriteAnimationClip}s.
     *
     * @type {Object<string, SpriteAnimationClip>}
     */
    get clips(): {
        [x: string]: SpriteAnimationClip;
    };
    /**
     * Gets the current clip being played.
     *
     * @type {SpriteAnimationClip}
     */
    get currentClip(): SpriteAnimationClip;
    /**
     * Sets the global speed modifier used when playing sprite animation clips.
     *
     * @type {number}
     */
    set speed(value: number);
    /**
     * Gets the global speed modifier used when playing sprite animation clips.
     *
     * @type {number}
     */
    get speed(): number;
    /**
     * Sets whether to flip the X axis when rendering a sprite.
     *
     * @type {boolean}
     */
    set flipX(value: boolean);
    /**
     * Gets whether to flip the X axis when rendering a sprite.
     *
     * @type {boolean}
     */
    get flipX(): boolean;
    /**
     * Sets whether to flip the Y axis when rendering a sprite.
     *
     * @type {boolean}
     */
    set flipY(value: boolean);
    /**
     * Gets whether to flip the Y axis when rendering a sprite.
     *
     * @type {boolean}
     */
    get flipY(): boolean;
    /**
     * Sets the width of the sprite when rendering using 9-Slicing. The width and height are only
     * used when the render mode of the sprite asset is Sliced or Tiled.
     *
     * @type {number}
     */
    set width(value: number);
    /**
     * Gets the width of the sprite when rendering using 9-Slicing.
     *
     * @type {number}
     */
    get width(): number;
    /**
     * Sets the height of the sprite when rendering using 9-Slicing. The width and height are only
     * used when the render mode of the sprite asset is Sliced or Tiled.
     *
     * @type {number}
     */
    set height(value: number);
    /**
     * Gets the height of the sprite when rendering using 9-Slicing.
     *
     * @type {number}
     */
    get height(): number;
    /**
     * Sets the batch group for the sprite (see {@link BatchGroup}). Default is -1 (no group).
     *
     * @type {number}
     */
    set batchGroupId(value: number);
    /**
     * Gets the batch group for the sprite.
     *
     * @type {number}
     */
    get batchGroupId(): number;
    /**
     * Sets the name of the clip to play automatically when the component is enabled.
     *
     * @type {string}
     */
    set autoPlayClip(value: string);
    /**
     * Gets the name of the clip to play automatically when the component is enabled.
     *
     * @type {string}
     */
    get autoPlayClip(): string;
    /**
     * Sets the draw order of the component. A higher value means that the component will be
     * rendered on top of other components in the same layer. This is not used unless the layer's
     * sort order is set to {@link SORTMODE_MANUAL}.
     *
     * @type {number}
     */
    set drawOrder(value: number);
    /**
     * Gets the draw order of the component.
     *
     * @type {number}
     */
    get drawOrder(): number;
    /**
     * Sets the array of layer IDs ({@link Layer#id}) to which this sprite should belong.
     *
     * @type {number[]}
     */
    set layers(value: number[]);
    /**
     * Gets the array of layer IDs ({@link Layer#id}) to which this sprite belongs.
     *
     * @type {number[]}
     */
    get layers(): number[];
    get aabb(): BoundingBox;
    onDestroy(): void;
    _showModel(): void;
    _hideModel(): void;
    _showFrame(frame: any): void;
    _updateTransform(): void;
    _updateAabb(aabb: any): any;
    _tryAutoPlay(): void;
    _onLayersChanged(oldComp: any, newComp: any): void;
    _onLayerAdded(layer: any): void;
    _onLayerRemoved(layer: any): void;
    removeModelFromLayers(): void;
    /**
     * Creates and adds a new {@link SpriteAnimationClip} to the component's clips.
     *
     * @param {object} data - Data for the new animation clip.
     * @param {string} [data.name] - The name of the new animation clip.
     * @param {number} [data.fps] - Frames per second for the animation clip.
     * @param {boolean} [data.loop] - Whether to loop the animation clip.
     * @param {number|Asset} [data.spriteAsset] - The asset id or
     * the {@link Asset} of the sprite that this clip will play.
     * @returns {SpriteAnimationClip} The new clip that was added.
     */
    addClip(data: {
        name?: string;
        fps?: number;
        loop?: boolean;
        spriteAsset?: number | Asset;
    }): SpriteAnimationClip;
    /**
     * Removes a clip by name.
     *
     * @param {string} name - The name of the animation clip to remove.
     */
    removeClip(name: string): void;
    /**
     * Get an animation clip by name.
     *
     * @param {string} name - The name of the clip.
     * @returns {SpriteAnimationClip} The clip.
     */
    clip(name: string): SpriteAnimationClip;
    /**
     * Plays a sprite animation clip by name. If the animation clip is already playing then this
     * will do nothing.
     *
     * @param {string} name - The name of the clip to play.
     * @returns {SpriteAnimationClip} The clip that started playing.
     */
    play(name: string): SpriteAnimationClip;
    /**
     * Pauses the current animation clip.
     */
    pause(): void;
    /**
     * Resumes the current paused animation clip.
     */
    resume(): void;
    /**
     * Stops the current animation clip and resets it to the first frame.
     */
    stop(): void;
}

/**
 * The Entity is a core primitive of a PlayCanvas application. Generally speaking, any object in
 * your application will be represented by an Entity, along with a set of {@link Component}s. Each
 * component enables a particular capability. For example, the {@link RenderComponent} enables an
 * entity to render a 3D model, and the {@link ScriptComponent} enables an entity to run code that
 * implements custom behavior.
 *
 * Entity is a subclass of {@link GraphNode} which allows entities to form a tree-like hierarchy
 * (based on parent/child relationships). The root of the entity hierarchy can be queried with
 * {@link AppBase#root}. Entities inherit a 3D transform from {@link GraphNode} which allows them
 * to be positioned, rotated and scaled.
 */
declare class Entity extends GraphNode {
    /**
     * Fired after the entity is destroyed.
     *
     * @event
     * @example
     * entity.on('destroy', (e) => {
     *     console.log(`Entity ${e.name} has been destroyed`);
     * });
     */
    static EVENT_DESTROY: string;
    /**
     * Create a new Entity.
     *
     * @param {string} [name] - The non-unique name of the entity, default is "Untitled".
     * @param {AppBase} [app] - The application the entity belongs to, default is the current
     * application.
     * @example
     * const entity = new pc.Entity();
     *
     * // Add a Component to the Entity
     * entity.addComponent('camera', {
     *     fov: 45,
     *     nearClip: 1,
     *     farClip: 10000
     * });
     *
     * // Add the Entity into the scene graph
     * app.root.addChild(entity);
     *
     * // Move the entity
     * entity.translate(10, 0, 0);
     *
     * // Or translate it by setting its position directly
     * const p = entity.getPosition();
     * entity.setPosition(p.x + 10, p.y, p.z);
     *
     * // Change the entity's rotation in local space
     * const e = entity.getLocalEulerAngles();
     * entity.setLocalEulerAngles(e.x, e.y + 90, e.z);
     *
     * // Or use rotateLocal
     * entity.rotateLocal(0, 90, 0);
     */
    constructor(name?: string, app?: AppBase);
    /**
     * Gets the {@link AnimComponent} attached to this entity.
     *
     * @type {AnimComponent|undefined}
     * @readonly
     */
    readonly anim: AnimComponent | undefined;
    /**
     * Gets the {@link AnimationComponent} attached to this entity.
     *
     * @type {AnimationComponent|undefined}
     * @readonly
     */
    readonly animation: AnimationComponent | undefined;
    /**
     * Gets the {@link AudioListenerComponent} attached to this entity.
     *
     * @type {AudioListenerComponent|undefined}
     * @readonly
     */
    readonly audiolistener: AudioListenerComponent | undefined;
    /**
     * Gets the {@link ButtonComponent} attached to this entity.
     *
     * @type {ButtonComponent|undefined}
     * @readonly
     */
    readonly button: ButtonComponent | undefined;
    /**
     * Gets the {@link CameraComponent} attached to this entity.
     *
     * @type {CameraComponent|undefined}
     * @readonly
     */
    readonly camera: CameraComponent | undefined;
    /**
     * Gets the {@link CollisionComponent} attached to this entity.
     *
     * @type {CollisionComponent|undefined}
     * @readonly
     */
    readonly collision: CollisionComponent | undefined;
    /**
     * Gets the {@link ElementComponent} attached to this entity.
     *
     * @type {ElementComponent|undefined}
     * @readonly
     */
    readonly element: ElementComponent | undefined;
    /**
     * Gets the {@link GSplatComponent} attached to this entity.
     *
     * @type {GSplatComponent|undefined}
     * @readonly
     */
    readonly gsplat: GSplatComponent | undefined;
    /**
     * Gets the {@link LayoutChildComponent} attached to this entity.
     *
     * @type {LayoutChildComponent|undefined}
     * @readonly
     */
    readonly layoutchild: LayoutChildComponent | undefined;
    /**
     * Gets the {@link LayoutGroupComponent} attached to this entity.
     *
     * @type {LayoutGroupComponent|undefined}
     * @readonly
     */
    readonly layoutgroup: LayoutGroupComponent | undefined;
    /**
     * Gets the {@link LightComponent} attached to this entity.
     *
     * @type {LightComponent|undefined}
     * @readonly
     */
    readonly light: LightComponent | undefined;
    /**
     * Gets the {@link ModelComponent} attached to this entity.
     *
     * @type {ModelComponent|undefined}
     * @readonly
     */
    readonly model: ModelComponent | undefined;
    /**
     * Gets the {@link ParticleSystemComponent} attached to this entity.
     *
     * @type {ParticleSystemComponent|undefined}
     * @readonly
     */
    readonly particlesystem: ParticleSystemComponent | undefined;
    /**
     * Gets the {@link RenderComponent} attached to this entity.
     *
     * @type {RenderComponent|undefined}
     * @readonly
     */
    readonly render: RenderComponent | undefined;
    /**
     * Gets the {@link RigidBodyComponent} attached to this entity.
     *
     * @type {RigidBodyComponent|undefined}
     * @readonly
     */
    readonly rigidbody: RigidBodyComponent | undefined;
    /**
     * Gets the {@link ScreenComponent} attached to this entity.
     *
     * @type {ScreenComponent|undefined}
     * @readonly
     */
    readonly screen: ScreenComponent | undefined;
    /**
     * Gets the {@link ScriptComponent} attached to this entity.
     *
     * @type {ScriptComponent|undefined}
     * @readonly
     */
    readonly script: ScriptComponent | undefined;
    /**
     * Gets the {@link ScrollbarComponent} attached to this entity.
     *
     * @type {ScrollbarComponent|undefined}
     * @readonly
     */
    readonly scrollbar: ScrollbarComponent | undefined;
    /**
     * Gets the {@link ScrollViewComponent} attached to this entity.
     *
     * @type {ScrollViewComponent|undefined}
     * @readonly
     */
    readonly scrollview: ScrollViewComponent | undefined;
    /**
     * Gets the {@link SoundComponent} attached to this entity.
     *
     * @type {SoundComponent|undefined}
     * @readonly
     */
    readonly sound: SoundComponent | undefined;
    /**
     * Gets the {@link SpriteComponent} attached to this entity.
     *
     * @type {SpriteComponent|undefined}
     * @readonly
     */
    readonly sprite: SpriteComponent | undefined;
    /**
     * Component storage.
     *
     * @type {Object<string, Component>}
     * @ignore
     */
    c: {
        [x: string]: Component;
    };
    /**
     * @type {AppBase}
     * @private
     */
    private _app;
    /**
     * Used by component systems to speed up destruction.
     *
     * @type {boolean}
     * @ignore
     */
    _destroying: boolean;
    /**
     * @type {string|null}
     * @private
     */
    private _guid;
    /**
     * Used to differentiate between the entities of a template root instance, which have it set to
     * true, and the cloned instance entities (set to false).
     *
     * @type {boolean}
     * @ignore
     */
    _template: boolean;
    /**
     * Create a new component and add it to the entity. Use this to add functionality to the entity
     * like rendering a model, playing sounds and so on.
     *
     * @param {string} type - The name of the component to add. Valid strings are:
     *
     * - "anim" - see {@link AnimComponent}
     * - "animation" - see {@link AnimationComponent}
     * - "audiolistener" - see {@link AudioListenerComponent}
     * - "button" - see {@link ButtonComponent}
     * - "camera" - see {@link CameraComponent}
     * - "collision" - see {@link CollisionComponent}
     * - "element" - see {@link ElementComponent}
     * - "gsplat" - see {@link GSplatComponent}
     * - "layoutchild" - see {@link LayoutChildComponent}
     * - "layoutgroup" - see {@link LayoutGroupComponent}
     * - "light" - see {@link LightComponent}
     * - "model" - see {@link ModelComponent}
     * - "particlesystem" - see {@link ParticleSystemComponent}
     * - "render" - see {@link RenderComponent}
     * - "rigidbody" - see {@link RigidBodyComponent}
     * - "screen" - see {@link ScreenComponent}
     * - "script" - see {@link ScriptComponent}
     * - "scrollbar" - see {@link ScrollbarComponent}
     * - "scrollview" - see {@link ScrollViewComponent}
     * - "sound" - see {@link SoundComponent}
     * - "sprite" - see {@link SpriteComponent}
     *
     * @param {object} [data] - The initialization data for the specific component type. Refer to
     * each specific component's API reference page for details on valid values for this parameter.
     * @returns {Component|null} The new Component that was attached to the entity or null if there
     * was an error.
     * @example
     * const entity = new pc.Entity();
     *
     * // Add a light component with default properties
     * entity.addComponent("light");
     *
     * // Add a camera component with some specified properties
     * entity.addComponent("camera", {
     *     fov: 45,
     *     clearColor: new pc.Color(1, 0, 0)
     * });
     */
    addComponent(type: string, data?: object): Component | null;
    /**
     * Remove a component from the Entity.
     *
     * @param {string} type - The name of the Component type.
     * @example
     * const entity = new pc.Entity();
     * entity.addComponent("light"); // add new light component
     *
     * entity.removeComponent("light"); // remove light component
     */
    removeComponent(type: string): void;
    /**
     * Search the entity and all of its descendants for the first component of specified type.
     *
     * @param {string} type - The name of the component type to retrieve.
     * @returns {Component} A component of specified type, if the entity or any of its descendants
     * has one. Returns undefined otherwise.
     * @example
     * // Get the first found light component in the hierarchy tree that starts with this entity
     * const light = entity.findComponent("light");
     */
    findComponent(type: string): Component;
    /**
     * Search the entity and all of its descendants for all components of specified type.
     *
     * @param {string} type - The name of the component type to retrieve.
     * @returns {Component[]} All components of specified type in the entity or any of its
     * descendants. Returns empty array if none found.
     * @example
     * // Get all light components in the hierarchy tree that starts with this entity
     * const lights = entity.findComponents("light");
     */
    findComponents(type: string): Component[];
    /**
     * Search the entity and all of its descendants for the first script instance of specified type.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @returns {ScriptType|undefined} A script instance of specified type, if the entity or any of
     * its descendants has one. Returns undefined otherwise.
     * @example
     * // Get the first found "playerController" instance in the hierarchy tree that starts with this entity
     * const controller = entity.findScript("playerController");
     */
    findScript(nameOrType: string | typeof ScriptType): ScriptType | undefined;
    /**
     * Search the entity and all of its descendants for all script instances of specified type.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type of {@link ScriptType}.
     * @returns {ScriptType[]} All script instances of specified type in the entity or any of its
     * descendants. Returns empty array if none found.
     * @example
     * // Get all "playerController" instances in the hierarchy tree that starts with this entity
     * const controllers = entity.findScripts("playerController");
     */
    findScripts(nameOrType: string | typeof ScriptType): ScriptType[];
    /**
     * Get the GUID value for this Entity.
     *
     * @returns {string} The GUID of the Entity.
     * @ignore
     */
    getGuid(): string;
    /**
     * Set the GUID value for this Entity. Note that it is unlikely that you should need to change
     * the GUID value of an Entity at run-time. Doing so will corrupt the graph this Entity is in.
     *
     * @param {string} guid - The GUID to assign to the Entity.
     * @ignore
     */
    setGuid(guid: string): void;
    /** @private */
    private _onHierarchyStatePostChanged;
    /**
     * Find a descendant of this entity with the GUID.
     *
     * @param {string} guid - The GUID to search for.
     * @returns {Entity|null} The entity with the matching GUID or null if no entity is found.
     */
    findByGuid(guid: string): Entity | null;
    /**
     * Create a deep copy of the Entity. Duplicate the full Entity hierarchy, with all Components
     * and all descendants. Note, this Entity is not in the hierarchy and must be added manually.
     *
     * @returns {this} A new Entity which is a deep copy of the original.
     * @example
     * const e = this.entity.clone();
     *
     * // Add clone as a sibling to the original
     * this.entity.parent.addChild(e);
     */
    clone(): this;
    _getSortedComponents(): Component[];
    /**
     * @param {Object<string, Entity>} duplicatedIdsMap - A map of original entity GUIDs to cloned
     * entities.
     * @returns {this} A new Entity which is a deep copy of the original.
     * @private
     */
    private _cloneRecursively;
}

/**
 * @import { FramePass } from '../platform/graphics/frame-pass.js'
 * @import { RenderPass } from '../platform/graphics/render-pass.js'
 * @import { RenderTarget } from '../platform/graphics/render-target.js'
 * @import { Texture } from '../platform/graphics/texture.js'
 */
/**
 * A frame graph represents a single rendering frame as a sequence of frame passes.
 *
 * @ignore
 */
declare class FrameGraph {
    /** @type {FramePass[]} */
    renderPasses: FramePass[];
    /**
     * Map used during frame graph compilation. It maps a render target to its previous occurrence.
     *
     *  @type {Map<RenderTarget, RenderPass>}
     */
    renderTargetMap: Map<RenderTarget, RenderPass>;
    /**
     * Add a frame pass to the frame.
     *
     * @param {FramePass} renderPass - The frame pass to add.
     */
    addRenderPass(renderPass: FramePass): void;
    reset(): void;
    compile(): void;
    render(device: any): void;
}

declare class LightsBuffer {
    constructor(device: any);
    areaLightsEnabled: boolean;
    device: any;
    cookiesEnabled: boolean;
    shadowsEnabled: boolean;
    maxLights: number;
    lightsFloat: Float32Array<ArrayBuffer>;
    lightsUint: Uint32Array<ArrayBuffer>;
    lightsTexture: Texture;
    _lightsTextureId: any;
    invMaxColorValue: number;
    invMaxAttenuation: number;
    boundsMin: Vec3;
    boundsDelta: Vec3;
    destroy(): void;
    createTexture(device: any, width: any, height: any, format: any, name: any): Texture;
    setBounds(min: any, delta: any): void;
    uploadTextures(): void;
    updateUniforms(): void;
    getSpotDirection(direction: any, spot: any): void;
    getLightAreaSizes(light: any): Float32Array<ArrayBuffer>;
    addLightData(light: any, lightIndex: any): void;
}

declare class WorldClusters {
    constructor(device: any);
    /** @type {Texture} */
    clusterTexture: Texture;
    device: any;
    name: string;
    reportCount: number;
    boundsMin: Vec3;
    boundsMax: Vec3;
    boundsDelta: Vec3;
    _cells: Vec3;
    _cellsLimit: Vec3;
    set cells(value: Vec3);
    get cells(): Vec3;
    set maxCellLightCount(count: any);
    get maxCellLightCount(): any;
    _usedLights: ClusterLight[];
    lightsBuffer: LightsBuffer;
    _maxCellLightCount: any;
    _cellsDirty: boolean;
    destroy(): void;
    releaseClusterTexture(): void;
    registerUniforms(device: any): void;
    _numClusteredLightsId: any;
    _clusterMaxCellsId: any;
    _clusterWorldTextureId: any;
    _clusterBoundsMinId: any;
    _clusterBoundsMinData: Float32Array<ArrayBuffer>;
    _clusterBoundsDeltaId: any;
    _clusterBoundsDeltaData: Float32Array<ArrayBuffer>;
    _clusterCellsCountByBoundsSizeId: any;
    _clusterCellsCountByBoundsSizeData: Float32Array<ArrayBuffer>;
    _clusterCellsDotId: any;
    _clusterCellsDotData: Int32Array<ArrayBuffer>;
    _clusterCellsMaxId: any;
    _clusterCellsMaxData: Int32Array<ArrayBuffer>;
    _clusterTextureWidthId: any;
    updateParams(lightingParams: any): void;
    updateCells(): void;
    clusters: Uint8ClampedArray<ArrayBuffer>;
    counts: Int32Array<ArrayBuffer>;
    uploadTextures(): void;
    updateUniforms(): void;
    evalLightCellMinMax(clusteredLight: any, min: any, max: any): void;
    collectLights(lights: any): void;
    evaluateBounds(): void;
    updateClusters(lightingParams: any): void;
    update(lights: any, lightingParams?: any): void;
    activate(): void;
}

declare class ClusterLight {
    light: any;
    min: Vec3;
    max: Vec3;
}

/**
 * A class managing instances of world clusters used by the renderer for layers with
 * unique sets of clustered lights.
 *
 * @ignore
 */
declare class WorldClustersAllocator {
    /**
     * Create a new instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device.
     */
    constructor(graphicsDevice: GraphicsDevice);
    /**
     * Empty cluster with no lights.
     *
     * @type {WorldClusters|null}
     */
    _empty: WorldClusters | null;
    /**
     * All allocated clusters
     *
     * @type {WorldClusters[]}
     */
    _allocated: WorldClusters[];
    /**
     * Render actions with all unique light clusters. The key is the hash of lights on a layer, the
     * value is a render action with unique light clusters.
     *
     * @type {Map<number, RenderAction>}
     */
    _clusters: Map<number, RenderAction>;
    device: GraphicsDevice;
    destroy(): void;
    get count(): number;
    get empty(): WorldClusters;
    assign(renderPasses: any): void;
    update(renderPasses: any, lighting: any): void;
}

/**
 * Blue noise based random numbers API.
 *
 * @ignore
 */
declare class BlueNoise {
    constructor(seed?: number);
    seed: number;
    _next(): void;
    value(): number;
    vec4(dest?: Vec4): Vec4;
}

/**
 * Helper that caches derived fisheye projection values from a normalized slider value, camera FOV,
 * and projection matrix. Each consumer (renderer, culling, future skydome) creates its own instance
 * and calls {@link FisheyeProjection#update} when it needs current values. The instance only
 * mutates its own cached fields, with no external side effects.
 *
 * Uses the generalized fisheye model g(θ) = k·tan(θ/k), where k controls the projection
 * characteristic: k=1 is rectilinear perspective, lower k increases barrel distortion.
 *
 * @ignore
 */
declare class FisheyeProjection {
    /**
     * Whether fisheye is active (t > 0).
     *
     * @type {boolean}
     */
    enabled: boolean;
    /**
     * The fisheye k parameter controlling projection curvature.
     *
     * @type {number}
     */
    k: number;
    /**
     * Precomputed 1/k to avoid per-splat division in shaders.
     *
     * @type {number}
     */
    invK: number;
    /**
     * Scale factor blending from edge-fit (1.0) to corner-fit (sqrt(2)) based on t.
     *
     * @type {number}
     */
    cornerScale: number;
    /**
     * Fisheye-adjusted horizontal projection scale for NDC conversion.
     *
     * @type {number}
     */
    projMat00: number;
    /**
     * Fisheye-adjusted vertical projection scale for NDC conversion.
     *
     * @type {number}
     */
    projMat11: number;
    /**
     * Maximum viewing angle before singularity, used for cone culling.
     *
     * @type {number}
     */
    maxTheta: number;
    /** @private */
    private _lastT;
    /** @private */
    private _lastFov;
    /** @private */
    private _lastP00;
    /** @private */
    private _lastP11;
    /**
     * Recomputes all derived fisheye values. Short-circuits if inputs haven't changed.
     *
     * @param {number} t - Normalized fisheye slider value in [0, 1]. 0 = rectilinear, 1 = max distortion.
     * @param {number} fov - Camera vertical FOV in degrees.
     * @param {import('../../core/math/mat4.js').Mat4} projMatrix - The camera's projection matrix.
     */
    update(t: number, fov: number, projMatrix: Mat4): void;
}

/**
 * @import { StorageBuffer } from '../../platform/graphics/storage-buffer.js'
 * @import { ShaderMaterial } from '../materials/shader-material.js'
 * @import { Layer } from '../layer.js'
 * @import { GraphNode } from '../graph-node.js'
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { GSplatWorkBuffer } from './gsplat-work-buffer.js'
 * @import { FogParams } from '../fog-params.js'
 */
/**
 * Base class for splat renderers. Holds common state shared by all renderer
 * implementations (instanced-quad, compute-based, etc.). Derived classes
 * implement the actual rendering strategy.
 *
 * @ignore
 */
declare class GSplatRenderer {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GraphNode} node - The graph node.
     * @param {GraphNode} cameraNode - The camera node.
     * @param {Layer} layer - The layer to add mesh instances to.
     * @param {GSplatWorkBuffer} workBuffer - The work buffer containing splat data.
     */
    constructor(device: GraphicsDevice, node: GraphNode, cameraNode: GraphNode, layer: Layer, workBuffer: GSplatWorkBuffer);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {GraphNode} */
    node: GraphNode;
    /** @type {GraphNode} */
    cameraNode: GraphNode;
    /** @type {Layer} */
    layer: Layer;
    /** @type {GSplatWorkBuffer} */
    workBuffer: GSplatWorkBuffer;
    /** @type {number|undefined} */
    renderMode: number | undefined;
    /**
     * Cached work buffer format version for detecting extra stream changes.
     *
     * @type {number}
     * @protected
     */
    protected _workBufferFormatVersion: number;
    /**
     * Fisheye projection helper shared by all renderer paths.
     * The manager calls update() during culling; renderers read the computed values
     * when binding uniforms.
     *
     * @type {FisheyeProjection}
     * @ignore
     */
    fisheyeProj: FisheyeProjection;
    destroy(): void;
    /**
     * Sets the render mode for this renderer.
     *
     * @param {number} renderMode - Bitmask flags controlling render passes (GSPLAT_FORWARD, GSPLAT_SHADOW, or both).
     */
    setRenderMode(renderMode: number): void;
    /**
     * Returns the material used by this renderer, or null if not applicable.
     *
     * @type {ShaderMaterial|null}
     */
    get material(): ShaderMaterial | null;
    /**
     * Sets the data source providing format and texture access. The base implementation updates
     * the workBuffer and notifies derived classes of the format change. Derived classes (e.g.
     * the compute renderer) may override this to decouple from the work buffer entirely.
     *
     * The source object must provide:
     * - `format` — a {@link GSplatFormat} describing the texture streams and shader read code.
     * - `getTexture(name)` — a function returning a {@link Texture} for a given stream name.
     *
     * @param {object} source - The data source (typically a {@link GSplatWorkBuffer}).
     */
    setDataSource(source: object): void;
    /**
     * Called when the work buffer format has changed. Derived classes reconfigure
     * their rendering resources (materials, pipelines, bindings, etc.).
     */
    onWorkBufferFormatChanged(): void;
    /**
     * Updates the renderer with the current splat count and texture size.
     *
     * @param {number} count - The number of visible splats.
     * @param {number} textureSize - The work buffer texture size.
     */
    update(count: number, textureSize: number): void;
    /**
     * Configures the renderer to use GPU-sorted data for rendering.
     *
     * @param {number} drawSlot - The indirect draw slot index.
     * @param {StorageBuffer} sortedIds - Buffer containing sorted visible splat IDs.
     * @param {StorageBuffer} numSplatsBuffer - Buffer containing the visible splat count.
     * @param {number} textureSize - The work buffer texture size.
     */
    setGpuSortedRendering(drawSlot: number, sortedIds: StorageBuffer, numSplatsBuffer: StorageBuffer, textureSize: number): void;
    /**
     * Switches the renderer to CPU-sorted rendering mode.
     */
    setCpuSortedRendering(): void;
    /**
     * Binds the current order data (texture or storage buffer) for CPU-sorted rendering.
     */
    setOrderData(): void;
    /**
     * Per-frame update for the renderer (material syncing, parameter updates).
     *
     * @param {object} params - The gsplat parameters.
     * @param {number} [exposure] - Scene exposure value.
     * @param {FogParams} [fogParams] - Fog parameters.
     */
    frameUpdate(params: object, exposure?: number, fogParams?: FogParams): void;
    /**
     * Updates the overdraw visualization mode.
     *
     * @param {object} params - The gsplat parameters.
     */
    updateOverdrawMode(params: object): void;
    /**
     * Populates a cincludes map with tonemapping, gamma, decode and gsplatOutput
     * shader chunks needed by compute tile-count shaders.
     *
     * @param {Map<string, string>} cincludes - The shader includes map to populate.
     * @protected
     */
    protected _createTonemapIncludes(cincludes: Map<string, string>): void;
}

/**
 * A general-purpose 1D block allocator backed by a doubly-linked list with segregated free-list
 * buckets. Manages a linear address space where contiguous blocks can be allocated and freed.
 * Supports incremental defragmentation and automatic growth.
 *
 * Free blocks are organized into power-of-2 size buckets for best-fit allocation, which reduces
 * fragmentation compared to a single first-fit free list.
 *
 * @ignore
 */
declare class BlockAllocator {
    /**
     * Create a new BlockAllocator.
     *
     * @param {number} [capacity] - Initial address space capacity. Defaults to 0.
     * @param {number} [growMultiplier] - Multiplicative growth factor for auto-grow in
     * {@link BlockAllocator#updateAllocation}. Defaults to 1.1 (10% extra).
     */
    constructor(capacity?: number, growMultiplier?: number);
    /**
     * Head of the main list (all blocks, offset-ordered).
     *
     * @type {MemBlock|null}
     * @private
     */
    private _headAll;
    /**
     * Tail of the main list.
     *
     * @type {MemBlock|null}
     * @private
     */
    private _tailAll;
    /**
     * Segregated free-list bucket heads. Each entry is the head of a doubly-linked list of free
     * blocks whose size falls in that power-of-2 range. Bucket i covers sizes [2^i, 2^(i+1)).
     * The array grows dynamically as larger free blocks appear.
     *
     * @type {Array<MemBlock|null>}
     * @private
     */
    private _freeBucketHeads;
    /**
     * Pool of recycled MemBlock objects.
     *
     * @type {MemBlock[]}
     * @private
     */
    private _pool;
    /**
     * Total address space.
     *
     * @type {number}
     * @private
     */
    private _capacity;
    /**
     * Sum of all allocated block sizes.
     *
     * @type {number}
     * @private
     */
    private _usedSize;
    /**
     * Sum of all free region sizes.
     *
     * @type {number}
     * @private
     */
    private _freeSize;
    /**
     * Number of free regions. Maintained O(1) for the fragmentation metric.
     *
     * @type {number}
     * @private
     */
    private _freeRegionCount;
    /**
     * Multiplicative growth factor used by {@link BlockAllocator#updateAllocation}.
     * When growing, the new capacity is at least `capacity * growMultiplier`.
     *
     * @type {number}
     * @private
     */
    private _growMultiplier;
    /**
     * Total address space capacity.
     *
     * @type {number}
     */
    get capacity(): number;
    /**
     * Total size of all allocated blocks.
     *
     * @type {number}
     */
    get usedSize(): number;
    /**
     * Total size of all free regions.
     *
     * @type {number}
     */
    get freeSize(): number;
    /**
     * Fragmentation ratio in the range [0, 1]. Returns 0 when all free space is one contiguous
     * block (ideal), and approaches 1 when free space is split into many pieces. Computed O(1)
     * from the internally maintained free region count.
     *
     * @type {number}
     */
    get fragmentation(): number;
    /**
     * Compute the bucket index for a given block size. Uses floor(log2(size)) via the CLZ
     * intrinsic for integer math.
     *
     * @param {number} size - Block size (must be > 0).
     * @returns {number} Bucket index.
     * @private
     */
    private _bucketFor;
    /**
     * Add a free block to the appropriate size bucket. Prepends to the bucket list for O(1)
     * insertion. Grows the bucket array if needed.
     *
     * @param {MemBlock} block - The free block to add.
     * @private
     */
    private _addToBucket;
    /**
     * Remove a free block from its current size bucket.
     *
     * @param {MemBlock} block - The free block to remove.
     * @private
     */
    private _removeFromBucket;
    /**
     * Move a free block to the correct bucket after its size changed (e.g. due to merging or
     * splitting). Only performs the remove+add if the bucket actually changed.
     *
     * @param {MemBlock} block - The free block whose size has changed.
     * @private
     */
    private _rebucket;
    /**
     * Obtain a MemBlock from the pool or create a new one.
     *
     * @param {number} offset - The offset.
     * @param {number} size - The size.
     * @param {boolean} free - Whether the block is free.
     * @returns {MemBlock} The block.
     * @private
     */
    private _obtain;
    /**
     * Return a MemBlock to the pool.
     *
     * @param {MemBlock} block - The block to release.
     * @private
     */
    private _release;
    /**
     * Insert a block into the main list after a given node.
     *
     * @param {MemBlock} block - The block to insert.
     * @param {MemBlock|null} after - Insert after this node (null = insert at head).
     * @private
     */
    private _insertAfterInMainList;
    /**
     * Remove a block from the main list.
     *
     * @param {MemBlock} block - The block to remove.
     * @private
     */
    private _removeFromMainList;
    /**
     * Find the best-fit free block for the requested size using segregated buckets. Scans the
     * target bucket for the smallest block >= size (best-fit), then falls through to higher
     * buckets where any block is guaranteed large enough (first-fit).
     *
     * @param {number} size - Minimum size needed.
     * @returns {MemBlock|null} The best fitting free block, or null.
     * @private
     */
    private _findFreeBlock;
    /**
     * Allocate a contiguous block of the given size.
     *
     * @param {number} size - The number of units to allocate. Must be > 0.
     * @returns {MemBlock|null} A MemBlock handle, or null if no space is available.
     */
    allocate(size: number): MemBlock | null;
    /**
     * Free a previously allocated block. Adjacent free regions are merged automatically.
     *
     * @param {MemBlock} block - The block to free (must have been returned by
     * {@link BlockAllocator#allocate}).
     */
    free(block: MemBlock): void;
    /**
     * Grow the address space. Only increases capacity, never decreases.
     *
     * @param {number} newCapacity - The new capacity. Must be > current capacity.
     */
    grow(newCapacity: number): void;
    /**
     * Defragment the allocator by moving allocated blocks to reduce fragmentation.
     *
     * When maxMoves is 0, performs a full compaction in a single O(n) pass: all allocated blocks
     * are packed contiguously from offset 0 and a single free block is placed at the end.
     *
     * When maxMoves > 0, performs incremental defragmentation in two phases:
     * - Phase 1 (up to maxMoves/2): relocates the last allocated block to the first fitting free
     *   gap (maximizes tail free space).
     * - Phase 2 (up to maxMoves/2): slides allocated blocks left into adjacent free gaps
     *   (cleans up interior fragmentation).
     *
     * @param {number} [maxMoves] - Maximum number of block moves. 0 = full compaction. Defaults
     * to 0.
     * @param {Set<MemBlock>} [result] - Optional Set to receive moved blocks. Defaults to a new
     * Set.
     * @returns {Set<MemBlock>} The set of MemBlocks that were moved.
     */
    defrag(maxMoves?: number, result?: Set<MemBlock>): Set<MemBlock>;
    /**
     * Full compaction: single-pass, pack all allocated blocks from offset 0.
     *
     * @param {Set<MemBlock>} result - Set to receive moved blocks.
     * @private
     */
    private _defragFull;
    /**
     * Incremental defragmentation with two phases.
     *
     * @param {number} maxMoves - Maximum total moves.
     * @param {Set<MemBlock>} result - Set to receive moved blocks.
     * @private
     */
    private _defragIncremental;
    /**
     * Move an allocated block to a free gap. The block's offset is updated in-place so caller
     * handles stay valid.
     *
     * @param {MemBlock} block - The allocated block to move.
     * @param {MemBlock} gap - The free gap to move into (must be >= block size).
     * @private
     */
    private _moveBlock;
    /**
     * Batch update: free a set of blocks and allocate new ones. Handles growth and compaction
     * internally when allocations cannot be satisfied.
     *
     * The `toAllocate` array is modified in-place: each numeric size entry is replaced with the
     * allocated {@link MemBlock}.
     *
     * @param {MemBlock[]} toFree - Blocks to release.
     * @param {Array<number|MemBlock>} toAllocate - Sizes to allocate. Modified in-place: numbers
     * are replaced with MemBlock instances.
     * @returns {boolean} True if a full defrag was performed (all existing blocks have new
     * offsets and must be re-rendered), false if only incremental allocations were made.
     */
    updateAllocation(toFree: MemBlock[], toAllocate: Array<number | MemBlock>): boolean;
}
/**
 * A node in the {@link BlockAllocator}'s linked list, representing either an allocated block or a
 * free region. Callers receive MemBlock instances as handles from {@link BlockAllocator#allocate}
 * and must not modify any properties directly.
 *
 * @ignore
 */
declare class MemBlock {
    /**
     * Position in the address space.
     *
     * @type {number}
     * @private
     */
    private _offset;
    /**
     * Size of this block.
     *
     * @type {number}
     * @private
     */
    private _size;
    /**
     * True if this is a free region, false if allocated.
     *
     * @type {boolean}
     * @private
     */
    private _free;
    /**
     * Previous node in the main (all-nodes) list.
     *
     * @type {MemBlock|null}
     * @private
     */
    private _prev;
    /**
     * Next node in the main (all-nodes) list.
     *
     * @type {MemBlock|null}
     * @private
     */
    private _next;
    /**
     * Previous node in the bucket free-list.
     *
     * @type {MemBlock|null}
     * @private
     */
    private _prevFree;
    /**
     * Next node in the bucket free-list.
     *
     * @type {MemBlock|null}
     * @private
     */
    private _nextFree;
    /**
     * Index of the size bucket this free block belongs to, or -1 if not in any bucket.
     *
     * @type {number}
     * @private
     */
    private _bucket;
    /**
     * The offset of this block in the address space.
     *
     * @type {number}
     */
    get offset(): number;
    /**
     * The size of this block.
     *
     * @type {number}
     */
    get size(): number;
}

declare class GSplatWorldState {
    /**
     * @param {import('../../platform/graphics/graphics-device.js').GraphicsDevice} device - The graphics device.
     * @param {number} version - The version number.
     * @param {GSplatInfo[]} splats - The splats for this world state.
     * @param {BlockAllocator} allocator - Persistent block allocator (owned by GSplatManager).
     * @param {Map<number, MemBlock>} allocationMap - Persistent allocId-to-MemBlock map (owned by GSplatManager).
     */
    constructor(device: GraphicsDevice, version: number, splats: GSplatInfo[], allocator: BlockAllocator, allocationMap: Map<number, MemBlock>);
    /**
     * The version of the world state.
     *
     * @type {number}
     */
    version: number;
    /**
     * Whether the sort parameters have been set on the sorter.
     *
     * @type {boolean}
     */
    sortParametersSet: boolean;
    /**
     * Whether the world state has been sorted before.
     *
     * @type {boolean}
     */
    sortedBefore: boolean;
    /**
     * An array of all splats managed by this world state.
     *
     * @type {GSplatInfo[]}
     */
    splats: GSplatInfo[];
    /**
     * The texture size of work buffer.
     *
     * @type {number}
     */
    textureSize: number;
    /**
     * Total number of active splats across all placements.
     *
     * @type {number}
     */
    totalActiveSplats: number;
    /**
     * Total number of intervals across all placements. Each placement contributes
     * either its interval count (intervals.length / 2) or 1 if it has no intervals.
     *
     * @type {number}
     */
    totalIntervals: number;
    /**
     * Deduplicated list of splat groups sharing the same parent placement. Multiple child
     * placements (e.g. octree file nodes) that reference the same parent share a single
     * set of bounding spheres and a single world transform, so they are grouped together.
     * Each entry contains a representative splat, the starting index into the bounds/transforms
     * textures (boundsBaseIndex), and the number of bounding sphere entries for the group.
     *
     * @type {Array<{splat: GSplatInfo, boundsBaseIndex: number, numBoundsEntries: number}>}
     */
    boundsGroups: Array<{
        splat: GSplatInfo;
        boundsBaseIndex: number;
        numBoundsEntries: number;
    }>;
    /**
     * Files to decrement when this state becomes active.
     * Array of tuples: [octree, fileIndex]
     * @type {Array<[GSplatOctree, number]>}
     */
    pendingReleases: Array<[GSplatOctree, number]>;
    /**
     * Splats that need to be rendered to the work buffer. Contains newly allocated or
     * re-allocated splats, or all splats when fullRebuild is true.
     *
     * @type {GSplatInfo[]}
     */
    needsUpload: GSplatInfo[];
    /**
     * AllocIds of splats in needsUpload, for fast membership checks during merge.
     *
     * @type {Set<number>}
     */
    needsUploadIds: Set<number>;
    /**
     * Reverse map from allocId to the GSplatInfo that owns it, for efficient merge lookups
     * in cleanupOldWorldStates without scanning all splats.
     *
     * @type {Map<number, GSplatInfo>}
     */
    allocIdToSplat: Map<number, GSplatInfo>;
    /**
     * True when the allocator grew or defragmented, meaning all block offsets may have
     * changed and every splat must be re-rendered to the work buffer.
     *
     * @type {boolean}
     */
    fullRebuild: boolean;
    destroy(): void;
    /**
     * Populates module-scope scratch arrays with allocations to free/create by diffing the
     * current splat set against the existing allocation map.
     *
     * @param {GSplatInfo[]} splats - Active splats for this state.
     * @param {Map<number, MemBlock>} allocationMap - Persistent allocId-to-MemBlock map.
     * @private
     */
    private computeAllocationDiff;
    /**
     * Process a single allocId/size pair: mark as seen, check for size changes, and
     * queue allocations or frees as needed.
     *
     * @param {number} allocId - The allocation identifier.
     * @param {number} size - Required size for this allocation.
     * @param {Map<number, MemBlock>} allocationMap - Persistent allocId-to-MemBlock map.
     * @private
     */
    private _diffAlloc;
    /**
     * Executes pending allocation changes via the BlockAllocator, runs incremental defrag,
     * derives the texture size, and releases scratch arrays.
     *
     * @param {import('../../platform/graphics/graphics-device.js').GraphicsDevice} device - The graphics device.
     * @param {BlockAllocator} allocator - The block allocator.
     * @param {Map<number, MemBlock>} allocationMap - Persistent allocId-to-MemBlock map.
     * @returns {{ fullRebuild: boolean, changedAllocIds: Set<number>|null }} Whether a full
     * rebuild was triggered and the set of changed allocation ids.
     * @private
     */
    private applyAllocations;
    /**
     * Assigns work-buffer offsets to each splat from allocated blocks and builds the
     * needsUpload list for splats that require re-rendering.
     *
     * @param {GSplatInfo[]} splats - Active splats for this state.
     * @param {Map<number, MemBlock>} allocationMap - Persistent allocId-to-MemBlock map.
     * @param {boolean} fullRebuild - Whether all splats must be re-rendered.
     * @param {Set<number>|null} changedAllocIds - Allocation ids that were newly allocated or moved.
     * @private
     */
    private assignSplatOffsets;
    /**
     * Builds boundsGroups by grouping splats that share a parentPlacementId, assigns
     * sequential boundsBaseIndex to each group, and propagates it back to splats.
     *
     * @param {GSplatInfo[]} splats - Active splats for this state.
     * @private
     */
    private buildBoundsGroups;
}

declare class GSplatUnifiedSorter extends EventHandler {
    /**
     * @param {Scene} [scene] - The scene to fire sort timing events on.
     */
    constructor(scene?: Scene);
    worker: Worker;
    bufferLength: number;
    availableOrderData: any[];
    jobsInFlight: number;
    hasNewVersion: boolean;
    /**
     * Pending sorted result to be applied next frame. If multiple sorted results are received from
     * the worker, the latest result is stored here.
     *
     * @type {{ count: number, version: number, orderData: Uint32Array }|null}
     */
    pendingSorted: {
        count: number;
        version: number;
        orderData: Uint32Array;
    } | null;
    /** @type {Set<number>} */
    centersSet: Set<number>;
    /** @type {boolean} */
    _destroyed: boolean;
    /** @type {Scene|null} */
    scene: Scene | null;
    onSorted(message: any): void;
    applyPendingSorted(): void;
    releaseOrderData(orderData: any): void;
    destroy(): void;
    /**
     * Adds or removes centers from the sorter.
     *
     * @param {number} id - The id of the centers.
     * @param {Float32Array|null} centers - The centers buffer.
     */
    setCenters(id: number, centers: Float32Array | null): void;
    /**
     * Updates centers in the worker based on current splats.
     * Adds new centers and removes centers no longer needed.
     *
     * @param {GSplatInfo[]} splats - Array of active splat infos.
     */
    updateCentersForSplats(splats: GSplatInfo[]): void;
    /**
     * Sets sort parameters data for sorting of splats.
     *
     * @param {object} payload - The sort parameters payload to send.
     */
    setSortParameters(payload: object): void;
    /**
     * Sends sorting parameters to the sorter. Called every frame sorting is needed.
     *
     * @param {object} params - The sorting parameters - per-splat directions, offsets, scales, AABBs.
     * @param {boolean} radialSorting - Whether to use radial distance sorting.
     */
    setSortParams(params: object, radialSorting: boolean): void;
}

/**
 * A utility class for computing camera-relative bin weights used in GSplat sorting.
 * Pre-allocates a single interleaved Float32Array that is reused across frames.
 * Used by both GPU (GSplatSortKeyCompute) and CPU worker sorting paths.
 *
 * This class is stringified and injected into the worker blob, so it must be
 * fully self-contained: no imports, and all constants as static properties.
 *
 * @ignore
 */
declare class GSplatSortBinWeights {
    /**
     * Number of bins for camera-relative precision weighting.
     *
     * @type {number}
     */
    static get NUM_BINS(): number;
    /**
     * Weight tiers for camera-relative precision (distance from camera bin -> weight multiplier).
     * Closer bins get more precision for better visual quality near the camera.
     *
     * @type {Array<{maxDistance: number, weight: number}>}
     */
    static get WEIGHT_TIERS(): Array<{
        maxDistance: number;
        weight: number;
    }>;
    /**
     * Computes the camera bin index based on sort mode and distance range.
     *
     * @param {boolean} radialSort - Whether using radial sort mode.
     * @param {number} minDist - Minimum distance.
     * @param {number} range - Distance range (maxDist - minDist).
     * @returns {number} The camera bin index (0 to NUM_BINS-1).
     */
    static computeCameraBin(radialSort: boolean, minDist: number, range: number): number;
    /**
     * Pre-allocated interleaved array [base0, divider0, base1, divider1, ...].
     *
     * @type {Float32Array}
     */
    binWeights: Float32Array;
    /**
     * Pre-computed weight lookup table by distance from camera (constant).
     *
     * @type {Float32Array}
     */
    weightByDistance: Float32Array;
    /**
     * Pre-allocated scratch array for bits per bin calculation.
     *
     * @type {Float32Array}
     */
    bitsPerBin: Float32Array;
    /**
     * Cached cameraBin from last compute call.
     *
     * @type {number}
     */
    lastCameraBin: number;
    /**
     * Cached bucketCount from last compute call.
     *
     * @type {number}
     */
    lastBucketCount: number;
    /**
     * Computes bin weights for the given camera bin and bucket count.
     * Results are cached - returns immediately if inputs haven't changed.
     *
     * @param {number} cameraBin - The bin index where the camera is located (0 to NUM_BINS-1).
     * @param {number} bucketCount - Total number of sorting buckets (typically 2^numBits).
     * @returns {Float32Array} The same binWeights array with computed values.
     */
    compute(cameraBin: number, bucketCount: number): Float32Array;
}

/**
 * A class for generating GPU sort keys from GSplat world-space positions using compute shaders.
 * Supports both linear (forward vector) and radial (distance) sorting modes with camera-relative
 * bin weighting for precision optimization near the camera.
 *
 * @ignore
 */
declare class GSplatSortKeyCompute {
    /**
     * Creates a new GSplatSortKeyCompute instance.
     *
     * @param {GraphicsDevice} device - The graphics device (must support compute).
     */
    constructor(device: GraphicsDevice);
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * Allocated capacity for sort keys (grow-only).
     *
     * @type {number}
     */
    allocatedCount: number;
    /**
     * Output sort keys storage buffer.
     *
     * @type {StorageBuffer|null}
     */
    keysBuffer: StorageBuffer | null;
    /**
     * Storage buffer for combined bin weights (binBase + binDivider).
     *
     * @type {StorageBuffer|null}
     */
    binWeightsBuffer: StorageBuffer | null;
    /**
     * Current compute instance.
     *
     * @type {Compute|null}
     */
    compute: Compute | null;
    /**
     * Whether the current compute instance is for radial sorting.
     *
     * @type {boolean}
     */
    computeRadialSort: boolean;
    /**
     * Whether the current compute instance uses indirect sort (with compaction).
     *
     * @type {boolean}
     */
    computeUseIndirectSort: boolean;
    /**
     * Bind group format for the compute shader (without compaction).
     *
     * @type {BindGroupFormat|null}
     */
    bindGroupFormat: BindGroupFormat | null;
    /**
     * Bind group format for the compute shader (with indirect sort + compaction).
     *
     * @type {BindGroupFormat|null}
     */
    bindGroupFormatIndirect: BindGroupFormat | null;
    /**
     * Uniform buffer format.
     *
     * @type {UniformBufferFormat|null}
     */
    uniformBufferFormat: UniformBufferFormat | null;
    /**
     * Shared bin weights utility for computing camera-relative precision weighting.
     *
     * @type {GSplatSortBinWeights}
     */
    binWeightsUtil: GSplatSortBinWeights;
    /**
     * Reusable array for camera position uniform.
     *
     * @type {Float32Array}
     */
    cameraPositionData: Float32Array;
    /**
     * Reusable array for camera direction uniform.
     *
     * @type {Float32Array}
     */
    cameraDirectionData: Float32Array;
    /**
     * Destroys all resources.
     */
    destroy(): void;
    /**
     * Gets or creates the compute instance for the specified sort mode.
     * Destroys and recreates the compute instance if the mode changes.
     *
     * @param {boolean} computeRadialSort - Whether to get the radial sort variant.
     * @param {boolean} computeUseIndirectSort - Whether indirect dispatch with compaction is used.
     * @returns {Compute} The compute instance.
     * @private
     */
    private _getCompute;
    /**
     * Creates the bind group formats for the compute shaders.
     *
     * @private
     */
    private _createBindGroupFormat;
    /**
     * Ensures the keys buffer has at least the required capacity.
     *
     * @param {number} elementCount - Required number of elements.
     * @private
     */
    private _ensureCapacity;
    /**
     * Generates sort keys from the work buffer using direct dispatch (no culling/compaction).
     *
     * @param {GSplatWorkBuffer} workBuffer - The work buffer containing world-space splat data.
     * @param {GraphNode} cameraNode - The camera node for position and direction.
     * @param {boolean} computeRadialSort - Whether to use radial sorting mode.
     * @param {number} elementCount - Number of splats to process.
     * @param {number} numBits - Number of bits for sort keys (determines bucket count).
     * @param {number} minDist - Minimum distance value for normalization.
     * @param {number} maxDist - Maximum distance value for normalization.
     * @returns {StorageBuffer} The storage buffer containing generated sort keys.
     */
    generate(workBuffer: GSplatWorkBuffer, cameraNode: GraphNode, computeRadialSort: boolean, elementCount: number, numBits: number, minDist: number, maxDist: number): StorageBuffer;
    /**
     * Generates sort keys using indirect dispatch. Only `visibleCount` threads are launched
     * (GPU-determined), reducing key generation work proportionally to the culled fraction.
     *
     * @param {GSplatWorkBuffer} workBuffer - The work buffer containing world-space splat data.
     * @param {GraphNode} cameraNode - The camera node for position and direction.
     * @param {boolean} computeRadialSort - Whether to use radial sorting mode.
     * @param {number} maxElementCount - Maximum number of splats (buffer allocation size).
     * @param {number} numBits - Number of bits for sort keys.
     * @param {number} minDist - Minimum distance value for normalization.
     * @param {number} maxDist - Maximum distance value for normalization.
     * @param {StorageBuffer} compactedSplatIds - Compacted visible splat IDs.
     * @param {StorageBuffer} sortElementCountBuffer - GPU-written buffer containing visible count.
     * @param {number} dispatchSlot - Slot index in the device's indirect dispatch buffer.
     * @returns {StorageBuffer} The storage buffer containing generated sort keys.
     */
    generateIndirect(workBuffer: GSplatWorkBuffer, cameraNode: GraphNode, computeRadialSort: boolean, maxElementCount: number, numBits: number, minDist: number, maxDist: number, compactedSplatIds: StorageBuffer, sortElementCountBuffer: StorageBuffer, dispatchSlot: number): StorageBuffer;
}

/**
 * Helper class for recursive parallel prefix sum (scan) operations.
 * Uses Blelloch algorithm with up-sweep and down-sweep phases.
 *
 * @ignore
 */
declare class PrefixSumKernel {
    /**
     * Creates a new PrefixSumKernel instance.
     * Call resize() to initialize passes with the desired count.
     *
     * @param {GraphicsDevice} device - The graphics device.
     */
    constructor(device: GraphicsDevice);
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * List of pipeline passes (scan + add_block for each level).
     *
     * @type {Array<{scanCompute: Compute, addBlockCompute: Compute|null, blockSumBuffer: StorageBuffer, dispatchX: number, dispatchY: number, count: number, allocatedCount: number}>}
     */
    passes: Array<{
        scanCompute: Compute;
        addBlockCompute: Compute | null;
        blockSumBuffer: StorageBuffer;
        dispatchX: number;
        dispatchY: number;
        count: number;
        allocatedCount: number;
    }>;
    /**
     * Uniform buffer format (shared across all passes).
     *
     * @type {UniformBufferFormat|null}
     */
    _uniformBufferFormat: UniformBufferFormat | null;
    /**
     * Bind group format (shared across all passes).
     *
     * @type {BindGroupFormat|null}
     */
    _bindGroupFormat: BindGroupFormat | null;
    /**
     * Scan shader (shared, element count is a uniform).
     *
     * @type {Shader|null}
     */
    _scanShader: Shader | null;
    /**
     * Add block shader (shared, element count is a uniform).
     *
     * @type {Shader|null}
     */
    _addBlockShader: Shader | null;
    /**
     * Destroys the kernel and releases resources.
     */
    destroy(): void;
    /**
     * Creates bind group format and shaders (called once in constructor).
     *
     * @private
     */
    private _createFormatsAndShaders;
    /**
     * Recursively creates passes for the prefix sum.
     *
     * @param {StorageBuffer} dataBuffer - Buffer containing data to scan.
     * @param {number} count - Number of elements.
     * @private
     */
    private createPassesRecursive;
    /**
     * Creates a shader for prefix sum operations.
     *
     * @param {string} name - Shader name.
     * @param {string} entryPoint - Entry point function name.
     * @returns {Shader} The created shader.
     * @private
     */
    private _createShader;
    /**
     * Find optimal dispatch dimensions to minimize unused workgroups.
     *
     * @param {number} workgroupCount - Total workgroups needed.
     * @returns {{x: number, y: number}} Dispatch dimensions.
     * @private
     */
    private findOptimalDispatchSize;
    /**
     * Resizes the kernel for a new element count. Grows capacity internally if needed.
     *
     * @param {StorageBuffer} dataBuffer - The buffer to perform prefix sum on.
     * @param {number} count - New element count.
     */
    resize(dataBuffer: StorageBuffer, count: number): void;
    /**
     * Destroys passes but keeps shaders and formats.
     *
     * @ignore
     */
    destroyPasses(): void;
    /**
     * Counts how many recursive passes are needed for a given element count.
     *
     * @param {number} count - Element count.
     * @returns {number} Number of passes needed.
     * @private
     */
    private _countPassesNeeded;
    /**
     * Dispatches all prefix sum passes.
     *
     * @param {GraphicsDevice} device - The graphics device.
     */
    dispatch(device: GraphicsDevice): void;
}

/**
 * A compute-based GPU radix sort implementation using 4-bit radix (16 buckets).
 * Provides stable sorting of 32-bit unsigned integer keys, returning sorted indices.
 * WebGPU only.
 *
 * **Performance characteristics:**
 * - 4 passes for 16-bit keys, 8 passes for 32-bit keys
 * - Each pass processes 4 bits (16 buckets)
 * - Workgroup size: 16x16 = 256 threads, 8 elements per thread = 2048 elements/workgroup
 *
 * **Algorithm (per pass):**
 * 1. **Histogram**: Each thread extracts 4-bit digits from its elements and
 *    contributes to a per-workgroup histogram using shared memory atomics.
 * 2. **Prefix Sum**: Hierarchical Blelloch scan on block histograms to compute
 *    global offsets for each (digit, workgroup) pair.
 * 3. **Ranked Scatter**: Re-reads keys in rounds, computes local ranks using
 *    per-digit 256-bit bitmasks and hardware popcount, then scatters using:
 *    `position = global_prefix[digit][workgroup] + cumulative_local_rank`
 *
 * Based on "Fast 4-way parallel radix sorting on GPUs" algorithm, implemented
 * following [WebGPU-Radix-Sort](https://github.com/kishimisu/WebGPU-Radix-Sort)
 * by kishimisu (MIT License).
 *
 * @example
 * // Create the radix sort instance (reusable)
 * const radixSort = new ComputeRadixSort(device);
 *
 * // Create a storage buffer with keys to sort
 * const keys = new Uint32Array([5, 2, 8, 1, 9, 3]);
 * const keysBuffer = new StorageBuffer(device, keys.byteLength, BUFFERUSAGE_COPY_DST);
 * keysBuffer.write(keys);
 *
 * // Sort and get indices buffer (keys with values [5,2,8,1,9,3] → indices [3,1,5,0,2,4])
 * const sortedIndices = radixSort.sort(keysBuffer, keys.length, 16); // 16-bit sort
 *
 * // Use sortedIndices buffer in subsequent GPU operations
 * // Clean up when done
 * radixSort.destroy();
 *
 * @category Graphics
 * @ignore
 */
declare class ComputeRadixSort {
    /**
     * Creates a new ComputeRadixSort instance.
     *
     * @param {GraphicsDevice} device - The graphics device (must support compute).
     */
    constructor(device: GraphicsDevice);
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * Current element count.
     *
     * @type {number}
     */
    _elementCount: number;
    /**
     * Number of workgroups for current sort.
     *
     * @type {number}
     */
    _workgroupCount: number;
    /**
     * Allocated workgroup capacity. Tracks the last allocated size; reallocation is triggered
     * when the effective workgroup count (derived from element count and capacity) differs.
     *
     * @type {number}
     */
    _allocatedWorkgroupCount: number;
    /**
     * Minimum element capacity for internal buffers. When set, `_allocateBuffers` uses
     * `max(elementCount, capacity)` as the effective size. The caller can lower this value
     * to request shrinkage; actual reallocation is deferred to the next sort call.
     * After allocation, this is updated to reflect the effective element count.
     *
     * @type {number}
     */
    capacity: number;
    /**
     * Current number of bits for which passes are created.
     *
     * @type {number}
     */
    _numBits: number;
    /**
     * Internal keys buffer 0 (ping-pong).
     *
     * @type {StorageBuffer|null}
     */
    _keys0: StorageBuffer | null;
    /**
     * Internal keys buffer 1 (ping-pong).
     *
     * @type {StorageBuffer|null}
     */
    _keys1: StorageBuffer | null;
    /**
     * Internal values/indices buffer 0 (ping-pong).
     *
     * @type {StorageBuffer|null}
     */
    _values0: StorageBuffer | null;
    /**
     * Internal values/indices buffer 1 (ping-pong).
     *
     * @type {StorageBuffer|null}
     */
    _values1: StorageBuffer | null;
    /**
     * Block sums buffer (16 per workgroup).
     *
     * @type {StorageBuffer|null}
     */
    _blockSums: StorageBuffer | null;
    /**
     * Output sorted indices buffer.
     *
     * @type {StorageBuffer|null}
     */
    _sortedIndices: StorageBuffer | null;
    /**
     * Prefix sum kernel for block sums.
     *
     * @type {PrefixSumKernel|null}
     */
    _prefixSumKernel: PrefixSumKernel | null;
    /**
     * Dispatch dimensions.
     *
     * @type {Vec2}
     */
    _dispatchSize: Vec2;
    /**
     * Cached bind group format for histogram shader (created lazily for current mode).
     *
     * @type {BindGroupFormat|null}
     */
    _histogramBindGroupFormat: BindGroupFormat | null;
    /**
     * Cached bind group format for reorder shader (created lazily for current mode).
     *
     * @type {BindGroupFormat|null}
     */
    _reorderBindGroupFormat: BindGroupFormat | null;
    /**
     * Uniform buffer format for runtime uniforms.
     *
     * @type {UniformBufferFormat|null}
     */
    _uniformBufferFormat: UniformBufferFormat | null;
    /**
     * Cached compute passes. Each entry contains {histogramCompute, reorderCompute} for one pass.
     *
     * @type {Array<{histogramCompute: Compute, reorderCompute: Compute}>}
     */
    _passes: Array<{
        histogramCompute: Compute;
        reorderCompute: Compute;
    }>;
    /**
     * Whether the current passes are for indirect sort mode.
     *
     * @type {boolean}
     */
    _indirect: boolean;
    /**
     * Whether the current passes expect caller-supplied initial values on pass 0.
     *
     * @type {boolean}
     */
    _hasInitialValues: boolean;
    /**
     * Whether the last pass skips writing sorted keys (only values are written).
     * When true, `sortedKeys` will contain stale data after sorting.
     *
     * @type {boolean}
     */
    _skipLastPassKeyWrite: boolean;
    /**
     * Destroys the ComputeRadixSort instance and releases all resources.
     */
    destroy(): void;
    /**
     * Destroys all cached passes and their shaders.
     *
     * @private
     */
    private _destroyPasses;
    /**
     * Destroys internal buffers (not passes or bind group formats).
     *
     * @private
     */
    private _destroyBuffers;
    /**
     * Gets the sorted indices (or values) buffer.
     *
     * @type {StorageBuffer|null}
     */
    get sortedIndices(): StorageBuffer | null;
    /**
     * Gets the sorted keys buffer after the last sort operation. The keys end up
     * in one of the internal ping-pong buffers depending on the number of passes.
     *
     * @type {StorageBuffer|null}
     */
    get sortedKeys(): StorageBuffer | null;
    /**
     * Ensures bind group formats exist for the given mode. Destroys and recreates
     * them if switching between direct and indirect modes.
     *
     * @param {boolean} indirect - Whether to create indirect sort formats.
     * @private
     */
    private _ensureBindGroupFormats;
    /**
     * Creates cached compute passes for all bit offsets.
     *
     * @param {number} numBits - Number of bits to sort.
     * @param {boolean} indirect - Whether to create indirect sort passes.
     * @param {boolean} hasInitialValues - Whether pass 0 reads from caller-supplied initial values.
     * @param {boolean} skipLastPassKeyWrite - Whether the last pass skips writing keys.
     * @private
     */
    private _createPasses;
    /**
     * Allocates or resizes internal buffers and creates passes if needed.
     *
     * @param {number} elementCount - Number of elements to sort.
     * @param {number} numBits - Number of bits to sort.
     * @param {boolean} indirect - Whether passes should use indirect dispatch.
     * @param {boolean} hasInitialValues - Whether pass 0 reads caller-supplied initial values.
     * @param {boolean} skipLastPassKeyWrite - Whether the last pass skips writing keys.
     * @private
     */
    private _allocateBuffers;
    /**
     * Creates a shader with constants embedded.
     *
     * @param {string} name - Shader name.
     * @param {string} source - Shader source.
     * @param {number} currentBit - Current bit offset for this pass.
     * @param {boolean} isFirstPass - Whether this is the first pass (uses GID for indices).
     * @param {BindGroupFormat} bindGroupFormat - Bind group format.
     * @param {boolean} indirect - Whether to add the USE_INDIRECT_SORT define.
     * @returns {Shader} The created shader.
     * @private
     */
    private _createShader;
    /**
     * Executes the GPU radix sort using direct dispatch.
     *
     * @param {StorageBuffer} keysBuffer - Input storage buffer containing u32 keys.
     * @param {number} elementCount - Number of elements to sort.
     * @param {number} [numBits] - Number of bits to sort (must be multiple of 4). Defaults to 16.
     * @param {StorageBuffer} [initialValues] - Optional buffer of initial values for pass 0.
     * When provided, the sort produces output values derived from this buffer instead of
     * sequential indices. The buffer is only read, never modified.
     * @param {boolean} [skipLastPassKeyWrite] - When true, the last pass skips writing sorted
     * keys for a small performance gain. Only use when sorted keys are not needed after sorting.
     * @returns {StorageBuffer} Storage buffer containing sorted indices (or values if
     * initialValues was provided).
     */
    sort(keysBuffer: StorageBuffer, elementCount: number, numBits?: number, initialValues?: StorageBuffer, skipLastPassKeyWrite?: boolean): StorageBuffer;
    /**
     * Executes the GPU radix sort using indirect dispatch. Only sorts `visibleCount`
     * elements (GPU-written) instead of the full buffer, reducing sort cost proportionally.
     *
     * @param {StorageBuffer} keysBuffer - Input storage buffer containing u32 keys.
     * @param {number} maxElementCount - Maximum number of elements (buffer allocation size).
     * @param {number} numBits - Number of bits to sort (must be multiple of 4).
     * @param {number} dispatchSlot - Slot index in the device's indirect dispatch buffer.
     * @param {StorageBuffer} sortElementCountBuffer - GPU-written buffer containing visible count.
     * @param {StorageBuffer} [initialValues] - Optional buffer of initial values for pass 0.
     * When provided, the sort produces output values derived from this buffer instead of
     * sequential indices. The buffer is only read, never modified.
     * @param {boolean} [skipLastPassKeyWrite] - When true, the last pass skips writing sorted
     * keys for a small performance gain. Only use when sorted keys are not needed after sorting.
     * @returns {StorageBuffer} Storage buffer containing sorted values.
     */
    sortIndirect(keysBuffer: StorageBuffer, maxElementCount: number, numBits: number, dispatchSlot: number, sortElementCountBuffer: StorageBuffer, initialValues?: StorageBuffer, skipLastPassKeyWrite?: boolean): StorageBuffer;
    /**
     * Shared execution logic for both direct and indirect radix sort.
     *
     * @param {StorageBuffer} keysBuffer - Input keys buffer.
     * @param {number} elementCount - Number of elements (or max elements for indirect).
     * @param {number} numBits - Number of bits to sort.
     * @param {boolean} indirect - Whether to use indirect dispatch.
     * @param {number} dispatchSlot - Indirect dispatch slot index (-1 for direct).
     * @param {StorageBuffer|null} sortElementCountBuffer - GPU-written element count (null for direct).
     * @param {StorageBuffer} [initialValues] - Optional initial values buffer for pass 0.
     * @param {boolean} [skipLastPassKeyWrite] - When true, the last pass skips writing sorted
     * keys for a small performance gain. Only use when sorted keys are not needed after sorting.
     * @returns {StorageBuffer} Storage buffer containing sorted values.
     * @private
     */
    private _execute;
}

/**
 * Interval-based GPU stream compaction for the GSplat GPU sort path. Replaces the
 * per-pixel flag+scatter approach with an O(numIntervals) cull pass and a
 * workgroup-per-interval scatter pass. Always active when GPU sorting is enabled,
 * regardless of the culling toggle.
 *
 * @ignore
 */
declare class GSplatIntervalCompaction {
    /**
     * @param {GraphicsDevice} device - The graphics device (must support compute).
     */
    constructor(device: GraphicsDevice);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {StorageBuffer|null} */
    compactedSplatIds: StorageBuffer | null;
    /** @type {StorageBuffer|null} */
    intervalsBuffer: StorageBuffer | null;
    /** @type {StorageBuffer|null} */
    countBuffer: StorageBuffer | null;
    /** @type {PrefixSumKernel|null} */
    prefixSumKernel: PrefixSumKernel | null;
    /** @type {StorageBuffer|null} */
    numSplatsBuffer: StorageBuffer | null;
    /** @type {StorageBuffer|null} */
    sortElementCountBuffer: StorageBuffer | null;
    /** @type {number} */
    allocatedCompactedCount: number;
    /** @type {number} */
    allocatedIntervalCount: number;
    /** @type {number} */
    allocatedCountBufferSize: number;
    /**
     * World state version for which intervals were last uploaded. Avoids redundant
     * uploads when sortGpu is called repeatedly with the same world state.
     *
     * @type {number}
     */
    _uploadedVersion: number;
    /** @type {Compute|null} */
    _cullComputePerspective: Compute | null;
    /** @type {Compute|null} */
    _cullComputeFisheye: Compute | null;
    /** @type {Compute|null} */
    _scatterCompute: Compute | null;
    /** @type {Compute|null} */
    _writeIndirectArgsCompute: Compute | null;
    /** @type {BindGroupFormat|null} */
    _cullBindGroupFormatPerspective: BindGroupFormat | null;
    /** @type {BindGroupFormat|null} */
    _cullBindGroupFormatFisheye: BindGroupFormat | null;
    /** @type {BindGroupFormat|null} */
    _scatterBindGroupFormat: BindGroupFormat | null;
    /** @type {BindGroupFormat|null} */
    _writeArgsBindGroupFormat: BindGroupFormat | null;
    /** @type {UniformBufferFormat|null} */
    _scatterUniformBufferFormat: UniformBufferFormat | null;
    /** @type {UniformBufferFormat|null} */
    _writeArgsUniformBufferFormat: UniformBufferFormat | null;
    destroy(): void;
    /**
     * @private
     */
    private _destroyCullPass;
    /**
     * @private
     */
    private _createUniformBufferFormats;
    /**
     * Creates a cull compute pass for the given mode.
     *
     * @param {boolean} fisheye - Whether to create the fisheye (cone) variant.
     * @returns {{ compute: Compute, bindGroupFormat: BindGroupFormat }} The created compute and bind group format.
     * @private
     */
    private _createCullPass;
    /**
     * Returns the cached cull Compute for the given mode, lazily creating it on first use.
     *
     * @param {boolean} fisheye - Whether fisheye is active.
     * @returns {Compute} The cached Compute instance.
     * @private
     */
    private _getCullCompute;
    /**
     * @private
     */
    private _createScatterCompute;
    /**
     * @private
     */
    private _createWriteIndirectArgsCompute;
    /**
     * Ensures all buffers have sufficient capacity.
     *
     * @param {number} numIntervals - Number of intervals.
     * @param {number} totalActiveSplats - Total active splats (max compacted output size).
     * @private
     */
    private _ensureCapacity;
    /**
     * Builds and uploads interval metadata from the world state. Called once per
     * world state change (not every frame).
     *
     * @param {GSplatWorldState} worldState - The world state to extract intervals from.
     */
    uploadIntervals(worldState: GSplatWorldState): void;
    /**
     * Runs the full interval compaction pipeline: cull+count, prefix sum, scatter.
     *
     * @param {GSplatFrustumCuller} frustumCuller - Frustum culler providing bounds/transforms storage buffers and frustum planes.
     * @param {number} numIntervals - Total number of intervals.
     * @param {number} totalActiveSplats - Total active splats across all intervals.
     * @param {boolean} fisheyeEnabled - Whether fisheye cone culling should be used instead of frustum planes.
     */
    dispatchCompact(frustumCuller: GSplatFrustumCuller, numIntervals: number, totalActiveSplats: number, fisheyeEnabled: boolean): void;
    /**
     * Writes indirect draw and dispatch arguments from the prefix sum visible count.
     *
     * @param {number} drawSlot - Slot index in the device's indirect draw buffer.
     * @param {number} dispatchSlot - Slot index in the device's indirect dispatch buffer.
     * @param {number} numIntervals - Total interval count (index into prefix sum for visible count).
     */
    writeIndirectArgs(drawSlot: number, dispatchSlot: number, numIntervals: number): void;
}

/**
 * GSplatManager manages the rendering of splats using a work buffer, where all active splats are
 * stored and rendered from.
 *
 * Shared culling + compaction (GPU sorting and compute renderer, WebGPU only):
 *   Interval compaction operates on contiguous intervals of splats (one per octree node).
 *   1. Cull + count (compute): each interval's bounding sphere is tested against frustum
 *      planes (or a fisheye cone). The pass writes the interval's splat count (or 0 if
 *      culled) into a count buffer.
 *   2. Prefix sum: exclusive prefix sum over the count buffer produces output offsets.
 *      The last element gives visibleCount.
 *   3. Scatter (compute): one workgroup per interval expands visible intervals into
 *      compactedSplatIds (flat list of work-buffer pixel indices).
 *
 * Raster renderer — GPU sorting (WebGPU, {@link GSplatQuadRenderer}):
 *   Uses shared steps 1-3 above, then:
 *   4. Generate sort keys: an indirect compute dispatch (visibleCount threads) reads each
 *      compactedSplatIds[i] to look up the splat's depth and writes a sort key to keysBuffer.
 *   5. Radix sort: an indirect GPU radix sort over keysBuffer, with compactedSplatIds supplied
 *      as initial values, produces a buffer of sorted splat IDs directly.
 *   6. Render: the vertex shader reads sortedSplatIds[vertexId] → splatId.
 *
 * Raster renderer — CPU sorting (WebGPU and WebGL, {@link GSplatQuadRenderer}):
 *   1. Sort on worker: camera position and splat centers are sent to a web worker which
 *      performs a counting sort and returns the sorted order as orderBuffer.
 *   2. Render: the vertex shader reads orderBuffer[vertexId] → splatId.
 *      No culling or compaction is used.
 *
 * Compute tiled renderer (WebGPU only, {@link GSplatComputeLocalRenderer}):
 *   Uses shared steps 1-3 above, then runs a fully compute-based tiled pipeline:
 *   project splats into a cache, bin into screen tiles, sort per-tile by depth, and rasterize
 *   front-to-back. See {@link GSplatComputeLocalRenderer} for the full pass breakdown.
 *
 * @ignore
 */
declare class GSplatManager {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatDirector} director - The director.
     * @param {Layer} layer - The layer.
     * @param {GraphNode} cameraNode - The camera node.
     */
    constructor(device: GraphicsDevice, director: GSplatDirector, layer: Layer, cameraNode: GraphNode);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {GraphNode} */
    node: GraphNode;
    /** @type {GSplatWorkBuffer} */
    workBuffer: GSplatWorkBuffer;
    /** @type {GSplatRenderer} */
    renderer: GSplatRenderer;
    /**
     * A map of versioned world states, keyed by version.
     *
     * @type {Map<number, GSplatWorldState>}
     */
    worldStates: Map<number, GSplatWorldState>;
    /**
     * The version of the last world state.
     *
     * @type {number}
     */
    lastWorldStateVersion: number;
    /**
     * The currently active renderer mode. Starts as undefined so the first
     * prepareRendererMode() call always creates the appropriate resources.
     *
     * @type {number|undefined}
     */
    activeRenderer: number | undefined;
    /**
     * CPU-based sorter (when not using GPU sorting).
     *
     * @type {GSplatUnifiedSorter|null}
     */
    cpuSorter: GSplatUnifiedSorter | null;
    /**
     * GPU-based key generator (when using GPU sorting).
     *
     * @type {GSplatSortKeyCompute|null}
     */
    keyGenerator: GSplatSortKeyCompute | null;
    /**
     * GPU-based radix sorter (when using GPU sorting).
     *
     * @type {ComputeRadixSort|null}
     */
    gpuSorter: ComputeRadixSort | null;
    /**
     * Interval-based GPU compaction (always-on for GPU sort path).
     *
     * @type {GSplatIntervalCompaction|null}
     */
    intervalCompaction: GSplatIntervalCompaction | null;
    /**
     * Indirect draw slot index for the current frame (-1 when not using indirect draw).
     *
     * @type {number}
     */
    indirectDrawSlot: number;
    /**
     * Indirect dispatch slot index for GPU-sort indirect dispatch args.
     * Slot +0 = key gen, slot +1 = sort. The compute local renderer builds
     * its own indirect args in private buffers and does not use these slots.
     *
     * @type {number}
     */
    indirectDispatchSlot: number;
    /**
     * Total intervals from the last interval compaction dispatch. Needed for
     * writeIndirectArgs to index into the prefix sum buffer for visible count.
     *
     * @type {number}
     */
    lastCompactedNumIntervals: number;
    /** @type {number} */
    sortedVersion: number;
    /**
     * When true, suppresses ready=true in frame:ready until a fullUpdate cycle runs.
     * Only set when octreeInstances exist and params change (dirty).
     *
     * @type {boolean}
     * @private
     */
    private _awaitingLodUpdate;
    /**
     * Cached work buffer format version for detecting extra stream changes.
     *
     * @type {number}
     * @private
     */
    private _workBufferFormatVersion;
    /**
     * Flag set when the work buffer needs a full rebuild due to format changes.
     *
     * @type {boolean}
     * @private
     */
    private _workBufferRebuildRequired;
    /**
     * Number of blocks uploaded to the work buffer this frame.
     *
     * @type {number}
     */
    bufferCopyUploaded: number;
    /**
     * Total number of blocks in the work buffer this frame.
     *
     * @type {number}
     */
    bufferCopyTotal: number;
    /**
     * Tracks placement state changes (format version, modifier hash, numSplats, centersVersion).
     *
     * @type {GSplatPlacementStateTracker}
     * @private
     */
    private _stateTracker;
    /**
     * Tracks last seen centersVersion per resource ID for detecting centers updates.
     *
     * @type {Map<number, number>}
     * @private
     */
    private _centersVersions;
    /** @type {number} */
    framesTillFullUpdate: number;
    /** @type {Vec3} */
    lastLodCameraPos: Vec3;
    /** @type {Vec3} */
    lastLodCameraFwd: Vec3;
    /** @type {number} */
    lastLodCameraFov: number;
    /** @type {Vec3} */
    lastSortCameraPos: Vec3;
    /** @type {Vec3} */
    lastSortCameraFwd: Vec3;
    /** @type {Vec3} */
    lastCullingCameraFwd: Vec3;
    /** @type {Mat4} */
    lastCullingProjMat: Mat4;
    /** @type {boolean} */
    sortNeeded: boolean;
    /**
     * Budget balancer for global splat budget enforcement.
     *
     * @type {GSplatBudgetBalancer}
     * @private
     */
    private _budgetBalancer;
    /**
     * Dynamic scale factor applied to LOD parameters during budget enforcement. Shifts all
     * LOD boundaries uniformly to bring the initial estimate closer to the budget target,
     * reducing balancer work. Applied directly to lodBaseDistance and gently to lodMultiplier.
     * Values > 1 push boundaries outward (more splats), values < 1 pull them inward
     * (fewer splats).
     *
     * @type {number}
     * @private
     */
    private _budgetScale;
    /**
     * Persistent block allocator for work buffer pixel allocations. Grows on demand.
     *
     * @type {BlockAllocator}
     * @private
     */
    private _allocator;
    /**
     * Maps allocId (from GSplatPlacement) to the corresponding MemBlock in the allocator.
     * Shared with GSplatWorldState constructors which mutate it during diff.
     *
     * @type {Map<number, MemBlock>}
     * @private
     */
    private _allocationMap;
    /** @type {Vec3} */
    lastColorUpdateCameraPos: Vec3;
    /** @type {GraphNode} */
    cameraNode: GraphNode;
    /** @type {Scene} */
    scene: Scene;
    /**
     * Layer placements, only non-octree placements are included.
     *
     * @type {GSplatPlacement[]}
     */
    layerPlacements: GSplatPlacement[];
    /** @type {boolean} */
    layerPlacementsDirty: boolean;
    /**
     * True when placements have been added or removed since the last world state was created.
     * Triggers a full work buffer rebuild so boundsBaseIndex stays consistent.
     *
     * @type {boolean}
     * @private
     */
    private _placementSetChanged;
    /** @type {Map<GSplatPlacement, GSplatOctreeInstance>} */
    octreeInstances: Map<GSplatPlacement, GSplatOctreeInstance>;
    /**
     * Octree instances scheduled for destruction. We collect their releases and destroy them
     * when creating the next world state
     *
     * @type {GSplatOctreeInstance[]}
     */
    octreeInstancesToDestroy: GSplatOctreeInstance[];
    /**
     * Flag set when new octree instances are added, to trigger immediate LOD evaluation.
     *
     * @type {boolean}
     */
    hasNewOctreeInstances: boolean;
    /**
     * Bitmask flags controlling which render passes this manager participates in.
     *
     * @type {number|undefined}
     */
    renderMode: number | undefined;
    director: GSplatDirector;
    layer: Layer;
    destroy(): void;
    _destroyed: boolean;
    /**
     * Destroys GPU sorting resources (key generator, radix sorter, compaction).
     *
     * @private
     */
    private destroyGpuSorting;
    /**
     * Destroys interval compaction resources.
     *
     * @param {boolean} [useCpuSort] - Whether to switch the renderer to CPU-sorted mode.
     * @private
     */
    private destroyIntervalCompaction;
    /**
     * Destroys CPU sorting resources (worker-based sorter).
     *
     * @private
     */
    private destroyCpuSorting;
    /**
     * Creates GPU sorting resources (key generator, radix sorter) if not already present.
     *
     * @private
     */
    private initGpuSorting;
    /**
     * Creates the CPU sorter and prepares it for the current world state. Disables any
     * GPU-side indirect draw and hides the mesh until the first sort result arrives.
     *
     * @private
     */
    private initCpuSorting;
    get material(): ShaderMaterial;
    /**
     * Dispatches compute pick pipeline and returns the configured pick mesh instance.
     * Only works when the local compute renderer is active.
     *
     * @param {object} camera - The camera.
     * @param {number} width - Pick target width.
     * @param {number} height - Pick target height.
     * @returns {import('../mesh-instance.js').MeshInstance|null} The pick mesh instance, or null.
     */
    prepareForPicking(camera: object, width: number, height: number): MeshInstance | null;
    /**
     * Creates the CPU sorter (Web Worker based).
     *
     * @returns {GSplatUnifiedSorter} The created sorter.
     */
    createSorter(): GSplatUnifiedSorter;
    /**
     * Sets the render mode for this manager and its renderer.
     *
     * @param {number} renderMode - Bitmask flags controlling render passes (GSPLAT_FORWARD, GSPLAT_SHADOW, or both).
     * @ignore
     */
    setRenderMode(renderMode: number): void;
    /**
     * True when frustum culling can run (bounds data available).
     *
     * @type {boolean}
     * @private
     */
    private get canCull();
    /**
     * Creates the renderer and sort resources for the given mode. Used at init time.
     *
     * @param {number} mode - The GSPLAT_RENDERER_* constant.
     * @private
     */
    private _createRenderer;
    /**
     * Checks whether the resolved renderer mode has changed and transitions to the new mode.
     * Handles both sort-mode transitions (CPU <-> GPU sort) and full renderer swaps
     * (quad <-> compute).
     *
     * @private
     */
    private prepareRendererMode;
    /**
     * Supply the manager with the placements to use. This is used to update the manager when the
     * layer's placements have changed, called infrequently.
     *
     * @param {GSplatPlacement[]} placements - The placements to reconcile with.
     */
    reconcile(placements: GSplatPlacement[]): void;
    updateWorldState(): void;
    onSorted(count: any, version: any, orderData: any): void;
    /**
     * Rebuilds the work buffer for a world state on its first sort.
     * Resizes buffer, renders changed splats, syncs transforms, and handles pending releases.
     *
     * @param {GSplatWorldState} worldState - The world state to rebuild for.
     * @param {number} count - The number of splats.
     * @param {boolean} [forceFullRebuild] - Force rendering all splats (e.g. format change).
     */
    rebuildWorkBuffer(worldState: GSplatWorldState, count: number, forceFullRebuild?: boolean): void;
    /**
     * Cleans up old world states between the last sorted version and the new version.
     * Merges upload requirements from skipped states into the active state, then
     * decrements ref counts and destroys old states.
     *
     * @param {number} newVersion - The new version to clean up to.
     */
    cleanupOldWorldStates(newVersion: number): void;
    /**
     * Applies incremental work buffer updates for splats that have changed.
     * Detects transform changes and color update thresholds, then batch renders updates.
     * Sets sortNeeded = true when splats move.
     *
     * @param {GSplatWorldState} state - The world state to update.
     */
    applyWorkBufferUpdates(state: GSplatWorldState): void;
    /**
     * Tests if the camera has moved or rotated enough to require LOD update.
     *
     * @returns {boolean} True if camera moved/rotated over thresholds, otherwise false.
     */
    testCameraMovedForLod(): boolean;
    /**
     * Tests if the camera has moved enough to require re-sorting.
     * - For radial sorting: only position matters (rotation doesn't affect sort order)
     * - For directional sorting: only forward direction matters (position doesn't affect sort order)
     *
     * @returns {boolean} True if camera moved enough to require re-sorting, otherwise false.
     */
    testCameraMovedForSort(): boolean;
    /**
     * Tests if the camera frustum has changed since the last sort or compaction. Checks both
     * projection matrix and camera rotation. Used to trigger re-culling/compaction independently of
     * sort-key changes.
     *
     * @returns {boolean} True if the frustum changed.
     */
    testFrustumChanged(): boolean;
    /**
     * Updates the camera tracking state for color accumulation calculations.
     * Called after any render that updates colors (full or color-only).
     */
    updateColorCameraTracking(): void;
    /**
     * Determines the colorization mode for rendering based on debug flags.
     *
     * @returns {Array<number[]>|undefined} Color array for debug visualization, or undefined for normal rendering
     */
    getDebugColors(): Array<number[]> | undefined;
    /**
     * Calculates camera translation delta since last color update.
     * Updates and returns the shared _cameraDeltas object.
     *
     * @returns {{ translationDelta: number }} Shared camera movement deltas object
     */
    calculateColorCameraDeltas(): {
        translationDelta: number;
    };
    /**
     * Fires the frame:ready event with current sorting and loading state.
     */
    fireFrameReadyEvent(): void;
    /**
     * Computes max world-space distance across all octree instances. Used for sqrt-based bucket
     * distribution in budget balancing. Non-octree placements are excluded since they have fixed
     * splat counts and don't participate in LOD-based budget balancing.
     *
     * @returns {number} Maximum world-space distance, minimum 1 to avoid division by zero.
     * @private
     */
    private computeGlobalMaxDistance;
    /**
     * Enforces global splat budget across all octree instances using phased approach.
     *
     * @param {number} budget - Target splat budget from GSplatParams.splatBudget.
     * @private
     */
    private _enforceBudget;
    /**
     * Detects if the work buffer format has been replaced (e.g. dataFormat changed) and
     * recreates the work buffer if needed.
     *
     * @private
     */
    private handleFormatChange;
    update(): number;
    /**
     * Sorts the splats using GPU compute shaders
     *
     * @param {GSplatWorldState} worldState - The world state to sort.
     */
    sortGpu(worldState: GSplatWorldState): void;
    /**
     * Runs frustum culling and interval compaction on the GPU, then passes the compacted
     * splat ID buffer directly to the local compute renderer (no key generation or radix sort).
     *
     * @param {GSplatWorldState} worldState - The world state to compact.
     * @private
     */
    private compactGpu;
    /**
     * Allocates per-frame indirect draw and dispatch slots and runs writeIndirectArgs
     * for interval compaction.
     *
     * @param {number} numIntervals - Total interval count (index into prefix sum for visible count).
     * @private
     */
    private allocateAndWriteIntervalIndirectArgs;
    /**
     * Generates sort keys and runs GPU radix sort using indirect dispatch
     * (sorting only the visible splat count determined by interval compaction).
     *
     * @param {number} elementCount - Total number of splats.
     * @param {number} roundedNumBits - Number of sort bits (rounded to multiple of 4).
     * @param {number} minDist - Minimum distance for key normalization.
     * @param {number} maxDist - Maximum distance for key normalization.
     * @param {StorageBuffer|null} compactedSplatIds - Compacted splat IDs from interval compaction.
     * @returns {StorageBuffer} The sorted indices buffer.
     * @private
     */
    private dispatchGpuSort;
    /**
     * Applies GPU sort results to the renderer with indirect draw from interval compaction.
     * The sortedIndices buffer already contains actual splat IDs (single indirection) because
     * compactedSplatIds were fed as initial values to the radix sort.
     *
     * @param {GSplatWorldState} worldState - The world state being sorted.
     * @param {StorageBuffer} sortedIndices - Buffer containing sorted splat IDs.
     * @private
     */
    private applyGpuSortResults;
    /**
     * Prepares frustum culling data: updates the GPU transform buffers and computes
     * frustum planes from the camera. The actual culling test runs inline in the
     * interval compaction compute shader.
     *
     * @param {GSplatWorldState} worldState - The world state whose splats provide transforms.
     * @private
     */
    private _runFrustumCulling;
    /**
     * Refreshes indirect draw parameters on non-sort frames.
     * Allocates a new per-frame draw slot and re-runs writeIndirectArgs to write
     * draw args from the visibleCount that was established during the last sort.
     * Does NOT re-run compaction (the compacted buffer must stay stable).
     *
     * @private
     */
    private refreshIndirectDraw;
    /**
     * Computes the min/max effective distances for the current world state.
     *
     * @param {GSplatWorldState} worldState - The world state.
     * @returns {{minDist: number, maxDist: number}} The distance range.
     */
    computeDistanceRange(worldState: GSplatWorldState): {
        minDist: number;
        maxDist: number;
    };
    /**
     * Sorts the splats using CPU worker (asynchronous).
     *
     * @param {GSplatWorldState} lastState - The last world state.
     */
    sortCpu(lastState: GSplatWorldState): void;
    /**
     * Prepares sort parameters data for the sorter worker.
     *
     * @param {GSplatWorldState} worldState - The world state containing all needed data.
     * @returns {object} - Data for sorter worker.
     */
    prepareSortParameters(worldState: GSplatWorldState): object;
}

/**
 * Class responsible for managing {@link GSplatManager} instances for Cameras and their Layers.
 *
 * @ignore
 */
declare class GSplatDirector {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {Renderer} renderer - The renderer.
     * @param {Scene} scene - The scene.
     * @param {EventHandler} eventHandler - Event handler for firing events.
     */
    constructor(device: GraphicsDevice, renderer: Renderer, scene: Scene, eventHandler: EventHandler);
    /**
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * Per camera data.
     *
     * @type {Map<Camera, GSplatCameraData>}
     */
    camerasMap: Map<Camera, GSplatCameraData>;
    /**
     * @type {Scene}
     */
    scene: Scene;
    /**
     * @type {EventHandler}
     */
    eventHandler: EventHandler;
    renderer: Renderer;
    destroy(): void;
    getCameraData(camera: any): GSplatCameraData;
    /**
     * Dispatches pick compute for the given camera and layer, returning a ready-to-render
     * pick mesh instance (or null if no gsplat data exists for this camera/layer pair).
     *
     * @param {Camera} camera - The camera.
     * @param {number} width - Pick target width.
     * @param {number} height - Pick target height.
     * @param {Layer} layer - The layer to pick from.
     * @returns {import('../mesh-instance.js').MeshInstance|null} The configured pick mesh instance.
     */
    prepareForPicking(camera: Camera, width: number, height: number, layer: Layer): MeshInstance | null;
    /**
     * Updates the director for the given layer composition cameras and layers.
     *
     * @param {LayerComposition} comp - The layer composition.
     */
    update(comp: LayerComposition): void;
}

/**
 * Per camera data the director keeps track of.
 *
 * @ignore
 */
declare class GSplatCameraData {
    /**
     * @type {Map<Layer, GSplatLayerData>}
     */
    layersMap: Map<Layer, GSplatLayerData>;
    destroy(): void;
    removeLayerData(layer: any): void;
    getLayerData(device: any, director: any, layer: any, camera: any): GSplatLayerData;
}

/**
 * Per layer data the director keeps track of.
 *
 * @ignore
 */
declare class GSplatLayerData {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatDirector} director - The director.
     * @param {Layer} layer - The layer.
     * @param {Camera} camera - The camera.
     */
    constructor(device: GraphicsDevice, director: GSplatDirector, layer: Layer, camera: Camera);
    /**
     * @type {GSplatManager|null}
     */
    gsplatManager: GSplatManager | null;
    /**
     * @type {GSplatManager|null}
     */
    gsplatManagerShadow: GSplatManager | null;
    /**
     * Creates a new GSplatManager, sets its render mode, and fires the material:created event.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatDirector} director - The director.
     * @param {Layer} layer - The layer.
     * @param {GraphNode} cameraNode - The camera node.
     * @param {Camera} camera - The camera.
     * @param {number} renderMode - The render mode flags.
     * @returns {GSplatManager} The created manager.
     * @private
     */
    private createManager;
    /**
     * Updates the manager configuration based on current layer placements.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatDirector} director - The director.
     * @param {Layer} layer - The layer.
     * @param {Camera} camera - The camera.
     */
    updateConfiguration(device: GraphicsDevice, director: GSplatDirector, layer: Layer, camera: Camera): void;
    destroy(): void;
}

declare class ShadowMap {
    static create(device: any, light: any): ShadowMap;
    static createAtlas(device: any, resolution: any, shadowType: any): ShadowMap;
    static create2dMap(device: any, size: any, shadowType: any): ShadowMap;
    static createCubemap(device: any, size: any, shadowType: any): ShadowMap;
    constructor(texture: any, targets: any);
    texture: any;
    cached: boolean;
    renderTargets: any;
    destroy(): void;
}

declare class LightTextureAtlas {
    constructor(device: any);
    device: any;
    version: number;
    shadowAtlasResolution: number;
    shadowAtlas: ShadowMap;
    shadowEdgePixels: number;
    cookieAtlasResolution: number;
    cookieAtlas: Texture;
    cookieRenderTarget: RenderTarget;
    slots: any[];
    atlasSplit: any[];
    cubeSlotsOffsets: Vec2[];
    scissorVec: Vec4;
    destroy(): void;
    destroyShadowAtlas(): void;
    destroyCookieAtlas(): void;
    allocateShadowAtlas(resolution: any, shadowType?: number): void;
    allocateCookieAtlas(resolution: any): void;
    allocateUniforms(): void;
    _shadowAtlasTextureId: any;
    _shadowAtlasParamsId: any;
    _shadowAtlasParams: Float32Array<ArrayBuffer>;
    _cookieAtlasTextureId: any;
    updateUniforms(): void;
    subdivide(numLights: any, lightingParams: any): void;
    collectLights(localLights: any, lightingParams: any): any[];
    setupSlot(light: any, rect: any): void;
    assignSlot(light: any, slotIndex: any, slotReassigned: any): void;
    update(localLights: any, lightingParams: any): void;
}

declare class ShadowMapCache {
    cache: Map<any, any>;
    destroy(): void;
    clear(): void;
    getKey(light: any): string;
    get(device: any, light: any): any;
    add(light: any, shadowMap: any): void;
}

declare class ShadowRenderer {
    static createShadowCamera(shadowType: any, type: any, face: any): Camera;
    /**
     * @param {Renderer} renderer - The renderer.
     * @param {LightTextureAtlas} lightTextureAtlas - The shadow map atlas.
     */
    constructor(renderer: Renderer, lightTextureAtlas: LightTextureAtlas);
    /**
     * A cache of shadow passes. First index is looked up by light type, second by shadow type.
     *
     * @type {ShaderPassInfo[][]}
     * @private
     */
    private shadowPassCache;
    device: GraphicsDevice;
    /** @type {Renderer} */
    renderer: Renderer;
    /** @type {LightTextureAtlas} */
    lightTextureAtlas: LightTextureAtlas;
    sourceId: ScopeId;
    pixelOffsetId: ScopeId;
    weightId: ScopeId;
    blurVsmShader: {}[];
    blurVsmWeights: {};
    shadowMapLightRadiusId: ScopeId;
    viewUniformFormat: UniformBufferFormat;
    viewBindGroupFormat: BindGroupFormat;
    blendStateWrite: BlendState;
    blendStateNoWrite: BlendState;
    _cullShadowCastersInternal(meshInstances: any, visible: any, camera: any): void;
    /**
     * Culls the list of shadow casters used by the light by the camera, storing visible mesh
     * instances in the specified array.
     *
     * @param {LayerComposition} comp - The layer composition used as a source of shadow casters,
     * if those are not provided directly.
     * @param {Light} light - The light.
     * @param {MeshInstance[]} visible - The array to store visible mesh instances in.
     * @param {Camera} camera - The camera.
     * @param {MeshInstance[]} [casters] - Optional array of mesh instances to use as casters.
     */
    cullShadowCasters(comp: LayerComposition, light: Light, visible: MeshInstance[], camera: Camera, casters?: MeshInstance[]): void;
    sortCompareShader(drawCallA: any, drawCallB: any): number;
    setupRenderState(device: any, light: any): void;
    dispatchUniforms(light: any, shadowCam: any, lightRenderData: any, face: any): void;
    /**
     * @param {Light} light - The light.
     * @returns {number} Index of shadow pass info.
     */
    getShadowPass(light: Light): number;
    /**
     * @param {MeshInstance[]} visibleCasters - Visible mesh instances.
     * @param {Light} light - The light.
     * @param {Camera} camera - The camera.
     */
    submitCasters(visibleCasters: MeshInstance[], light: Light, camera: Camera): void;
    needsShadowRendering(light: any): any;
    getLightRenderData(light: any, camera: any, face: any): any;
    setupRenderPass(renderPass: any, shadowCamera: any, clearRenderTarget: any): void;
    prepareFace(light: any, camera: any, face: any): any;
    renderFace(light: any, camera: any, face: any, clear: any): void;
    renderVsm(light: any, camera: any): void;
    getVsmBlurShader(blurMode: any, filterSize: any): any;
    applyVsmBlur(light: any, camera: any): void;
    initViewBindGroupFormat(): void;
    frameUpdate(): void;
}

/**
 * @import { FrameGraph } from '../../scene/frame-graph.js'
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Light } from '../../scene/light.js'
 * @import { Renderer } from './renderer.js'
 * @import { ShadowRenderer } from './shadow-renderer.js'
 */
declare class ShadowRendererLocal {
    constructor(renderer: any, shadowRenderer: any);
    shadowLights: any[];
    /** @type {Renderer} */
    renderer: Renderer;
    /** @type {ShadowRenderer} */
    shadowRenderer: ShadowRenderer;
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    cull(light: any, comp: any, casters?: any): void;
    prepareLights(shadowLights: any, lights: any): any;
    /**
     * Prepare render passes for rendering of shadows for local non-clustered lights. Each shadow face
     * is a separate render pass as it renders to a separate render target.
     *
     * @param {FrameGraph} frameGraph - The frame graph.
     * @param {Light[]} localLights - The list of local lights.
     */
    buildNonClusteredRenderPasses(frameGraph: FrameGraph, localLights: Light[]): void;
}

/**
 * A render pass used to render directional shadows.
 *
 * @ignore
 */
declare class RenderPassShadowDirectional extends RenderPass {
    constructor(device: any, shadowRenderer: any, light: any, camera: any, allCascadesRendering: any);
    shadowRenderer: any;
    light: any;
    camera: any;
    allCascadesRendering: any;
}

declare class ShadowRendererDirectional {
    constructor(renderer: any, shadowRenderer: any);
    /** @type {Renderer} */
    renderer: Renderer;
    /** @type {ShadowRenderer} */
    shadowRenderer: ShadowRenderer;
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    cull(light: any, comp: any, camera: any, casters?: any): void;
    generateSplitDistances(light: any, nearDist: any, farDist: any): void;
    /**
     * Create a render pass for directional light shadow rendering for a specified camera.
     *
     * @param {Light} light - The directional light.
     * @param {Camera} camera - The camera.
     * @returns {RenderPassShadowDirectional|null} - The render pass if the shadow rendering is
     * required, or null otherwise.
     */
    getLightRenderPass(light: Light, camera: Camera): RenderPassShadowDirectional | null;
}

/**
 * A render pass used to render cookie textures (both 2D and Cubemap) into the texture atlas.
 *
 * @ignore
 */
declare class RenderPassCookieRenderer extends RenderPass {
    static create(renderTarget: any, cubeSlotsOffsets: any): RenderPassCookieRenderer;
    constructor(device: any, cubeSlotsOffsets: any);
    /** @type {QuadRender|null} */
    _quadRenderer2D: QuadRender | null;
    /** @type {QuadRender|null} */
    _quadRendererCube: QuadRender | null;
    _filteredLights: any[];
    _forceCopy: boolean;
    /**
     * Event handle for device restored event.
     *
     * @type {EventHandle|null}
     * @private
     */
    private _evtDeviceRestored;
    _cubeSlotsOffsets: any;
    blitTextureId: any;
    invViewProjId: any;
    onDeviceRestored(): void;
    update(lights: any): void;
    filter(lights: any, filteredLights: any): void;
    initInvViewProjMatrices(): void;
    get quadRenderer2D(): QuadRender;
    get quadRendererCube(): QuadRender;
}

/**
 * A render pass used to render local clustered shadows. This is done inside a single render pass,
 * as all shadows are part of a single render target atlas.
 *
 * @ignore
 */
declare class RenderPassShadowLocalClustered extends RenderPass {
    constructor(device: any, shadowRenderer: any, shadowRendererLocal: any);
    shadowRenderer: any;
    shadowRendererLocal: any;
    update(localLights: any): void;
}

/**
 * A render pass used to update clustered lighting data - shadows, cookies, world clusters.
 *
 * @ignore
 */
declare class FramePassUpdateClustered extends FramePass {
    constructor(device: any, renderer: any, shadowRenderer: any, shadowRendererLocal: any, lightTextureAtlas: any);
    renderer: any;
    frameGraph: any;
    cookiesRenderPass: RenderPassCookieRenderer;
    shadowRenderPass: RenderPassShadowLocalClustered;
    update(frameGraph: any, shadowsEnabled: any, cookiesEnabled: any, lights: any, localLights: any): void;
}

/**
 * A lighting cube represented by 6 colors, one per cube direction. Use for simple lighting on the
 * particle system.
 *
 * @ignore
 */
declare class LightCube {
    colors: Float32Array<ArrayBuffer>;
    update(ambientLight: any, lights: any): void;
}

/**
 * The base renderer functionality to allow implementation of specialized renderers.
 *
 * @ignore
 */
declare class Renderer {
    /**
     * Create a new instance.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used by the renderer.
     * @param {Scene} scene - The scene.
     */
    constructor(graphicsDevice: GraphicsDevice, scene: Scene);
    /** @type {boolean} */
    clustersDebugRendered: boolean;
    /** @type {Scene} */
    scene: Scene;
    /**
     * A set of visible mesh instances which need further processing before being rendered, e.g.
     * skinning or morphing. Extracted during culling.
     *
     * @type {Set<MeshInstance>}
     * @protected
     */
    protected processingMeshInstances: Set<MeshInstance>;
    /**
     * @type {WorldClustersAllocator}
     * @ignore
     */
    worldClustersAllocator: WorldClustersAllocator;
    /**
     * A list of all unique lights in the layer composition.
     *
     * @type {Light[]}
     */
    lights: Light[];
    /**
     * A list of all unique local lights (spot & omni) in the layer composition.
     *
     * @type {Light[]}
     */
    localLights: Light[];
    /**
     * A list of unique directional shadow casting lights for each enabled camera. This is generated
     * each frame during light culling.
     *
     * @type {Map<Camera, Array<Light>>}
     */
    cameraDirShadowLights: Map<Camera, Array<Light>>;
    /**
     * A mapping of a directional light to a camera, for which the shadow is currently valid. This
     * is cleared each frame, and updated each time a directional light shadow is rendered for a
     * camera, and allows us to manually schedule shadow passes when a new camera needs a shadow.
     *
     * @type {Map<Light, Camera>}
     */
    dirLightShadows: Map<Light, Camera>;
    blueNoise: BlueNoise;
    /**
     * A gsplat director for unified splat rendering.
     *
     * @type {GSplatDirector|null}
     */
    gsplatDirector: GSplatDirector | null;
    device: GraphicsDevice;
    lightTextureAtlas: LightTextureAtlas;
    shadowMapCache: ShadowMapCache;
    shadowRenderer: ShadowRenderer;
    _shadowRendererLocal: ShadowRendererLocal;
    _shadowRendererDirectional: ShadowRendererDirectional;
    _renderPassUpdateClustered: FramePassUpdateClustered;
    viewUniformFormat: UniformBufferFormat;
    viewBindGroupFormat: BindGroupFormat;
    _skinTime: number;
    _morphTime: number;
    _cullTime: number;
    _shadowMapTime: number;
    _lightClustersTime: number;
    _layerCompositionUpdateTime: number;
    _shadowDrawCalls: number;
    _skinDrawCalls: number;
    _instancedDrawCalls: number;
    _shadowMapUpdates: number;
    _numDrawCallsCulled: number;
    _camerasRendered: number;
    _lightClusters: number;
    _gsplatCount: number;
    boneTextureId: ScopeId;
    modelMatrixId: ScopeId;
    normalMatrixId: ScopeId;
    viewInvId: ScopeId;
    viewPos: Float32Array<ArrayBuffer>;
    viewPosId: ScopeId;
    projId: ScopeId;
    projSkyboxId: ScopeId;
    viewId: ScopeId;
    viewId3: ScopeId;
    viewProjId: ScopeId;
    flipYId: ScopeId;
    tbnBasis: ScopeId;
    cameraParams: Float32Array<ArrayBuffer>;
    cameraParamsId: ScopeId;
    viewportSize: Float32Array<ArrayBuffer>;
    viewportSizeId: ScopeId;
    viewIndexId: ScopeId;
    blueNoiseJitterVersion: number;
    blueNoiseJitterVec: Vec4;
    blueNoiseJitterData: Float32Array<ArrayBuffer>;
    blueNoiseJitterId: ScopeId;
    blueNoiseTextureId: ScopeId;
    alphaTestId: ScopeId;
    opacityMapId: ScopeId;
    exposureId: ScopeId;
    morphPositionTex: ScopeId;
    morphNormalTex: ScopeId;
    morphTexParams: ScopeId;
    lightCube: LightCube;
    constantLightCube: ScopeId;
    destroy(): void;
    /**
     * Set up the viewport and the scissor for camera rendering.
     *
     * @param {Camera} camera - The camera containing the viewport information.
     * @param {RenderTarget} [renderTarget] - The render target. NULL for the default one.
     */
    setupViewport(camera: Camera, renderTarget?: RenderTarget): void;
    setCameraUniforms(camera: any, target: any): any;
    /**
     * Clears the active render target. If the viewport is already set up, only its area is cleared.
     *
     * @param {Camera} camera - The camera supplying the value to clear to.
     * @param {boolean} [clearColor] - True if the color buffer should be cleared. Uses the value
     * from the camera if not supplied.
     * @param {boolean} [clearDepth] - True if the depth buffer should be cleared. Uses the value
     * from the camera if not supplied.
     * @param {boolean} [clearStencil] - True if the stencil buffer should be cleared. Uses the
     * value from the camera if not supplied.
     */
    clear(camera: Camera, clearColor?: boolean, clearDepth?: boolean, clearStencil?: boolean): void;
    setupCullModeAndFrontFace(cullFaces: any, flipFactor: any, drawCall: any): void;
    setupCullMode(cullFaces: any, flipFactor: any, drawCall: any): void;
    updateCameraFrustum(camera: any): void;
    setBaseConstants(device: any, material: any): void;
    updateCpuSkinMatrices(drawCalls: any): void;
    /**
     * Update skin matrices ahead of rendering.
     *
     * @param {MeshInstance[]|Set<MeshInstance>} drawCalls - MeshInstances containing skinInstance.
     * @ignore
     */
    updateGpuSkinMatrices(drawCalls: MeshInstance[] | Set<MeshInstance>): void;
    /**
     * Update morphing ahead of rendering.
     *
     * @param {MeshInstance[]|Set<MeshInstance>} drawCalls - MeshInstances containing morphInstance.
     * @ignore
     */
    updateMorphing(drawCalls: MeshInstance[] | Set<MeshInstance>): void;
    /**
     * Update gsplats ahead of rendering.
     *
     * @param {MeshInstance[]|Set<MeshInstance>} drawCalls - MeshInstances containing gsplatInstances.
     * @ignore
     */
    updateGSplats(drawCalls: MeshInstance[] | Set<MeshInstance>): void;
    /**
     * Update draw calls ahead of rendering.
     *
     * @param {MeshInstance[]|Set<MeshInstance>} drawCalls - MeshInstances requiring updates.
     * @ignore
     */
    gpuUpdate(drawCalls: MeshInstance[] | Set<MeshInstance>): void;
    setVertexBuffers(device: any, mesh: any): void;
    setMorphing(device: any, morphInstance: any): void;
    setSkinning(device: any, meshInstance: any): void;
    dispatchViewPos(position: any): void;
    initViewBindGroupFormat(isClustered: any): void;
    /**
     * Set up uniforms for an XR view.
     */
    setupViewUniforms(view: any, index: any): void;
    setupViewUniformBuffers(viewBindGroups: any, viewUniformFormat: any, viewBindGroupFormat: any, viewList: any): void;
    setupMeshUniformBuffers(shaderInstance: any): void;
    setMeshInstanceMatrices(meshInstance: any, setNormalMatrix?: boolean): void;
    /**
     * @param {Camera} camera - The camera used for culling.
     * @param {MeshInstance[]} drawCalls - Draw calls to cull.
     * @param {CulledInstances} culledInstances - Stores culled instances.
     */
    cull(camera: Camera, drawCalls: MeshInstance[], culledInstances: CulledInstances): void;
    collectLights(comp: any): void;
    cullLights(camera: any, lights: any): void;
    /**
     * Shadow map culling for directional and visible local lights visible meshInstances are
     * collected into light._renderData, and are marked as visible for directional lights also
     * shadow camera matrix is set up.
     *
     * @param {LayerComposition} comp - The layer composition.
     */
    cullShadowmaps(comp: LayerComposition): void;
    /**
     * visibility culling of lights, meshInstances, shadows casters. Also applies
     * `meshInstance.visible`.
     *
     * @param {LayerComposition} comp - The layer composition.
     */
    cullComposition(comp: LayerComposition): void;
    /**
     * @param {MeshInstance[]} drawCalls - Mesh instances.
     * @param {boolean} onlyLitShaders - Limits the update to shaders affected by lighting.
     */
    updateShaders(drawCalls: MeshInstance[], onlyLitShaders: boolean): void;
    updateFrameUniforms(): void;
    /**
     * @param {LayerComposition} comp - The layer composition to update.
     */
    beginFrame(comp: LayerComposition): void;
    updateLightTextureAtlas(): void;
    /**
     * Updates the layer composition for rendering.
     *
     * @param {LayerComposition} comp - The layer composition to update.
     */
    updateLayerComposition(comp: LayerComposition): void;
    frameUpdate(): void;
}

/**
 * The forward renderer renders {@link Scene}s.
 *
 * @ignore
 */
declare class ForwardRenderer extends Renderer {
    static skipRenderCamera: any;
    static _skipRenderCounter: number;
    static skipRenderAfter: number;
    _forwardDrawCalls: number;
    _materialSwitches: number;
    _depthMapTime: number;
    _forwardTime: number;
    _sortTime: number;
    fogColorId: ScopeId;
    fogStartId: ScopeId;
    fogEndId: ScopeId;
    fogDensityId: ScopeId;
    ambientId: ScopeId;
    skyboxIntensityId: ScopeId;
    cubeMapRotationMatrixId: ScopeId;
    pcssDiskSamplesId: ScopeId;
    pcssSphereSamplesId: ScopeId;
    lightColorId: any[];
    lightDir: any[];
    lightDirId: any[];
    lightShadowMapId: any[];
    lightShadowMatrixId: any[];
    lightShadowParamsId: any[];
    lightShadowIntensity: any[];
    lightRadiusId: any[];
    lightPos: any[];
    lightPosId: any[];
    lightWidth: any[];
    lightWidthId: any[];
    lightHeight: any[];
    lightHeightId: any[];
    lightInAngleId: any[];
    lightOutAngleId: any[];
    lightCookieId: any[];
    lightCookieIntId: any[];
    lightCookieMatrixId: any[];
    lightCookieOffsetId: any[];
    lightShadowSearchAreaId: any[];
    lightCameraParamsId: any[];
    lightSoftShadowParamsId: any[];
    shadowMatrixPaletteId: any[];
    shadowCascadeDistancesId: any[];
    shadowCascadeCountId: any[];
    shadowCascadeBlendId: any[];
    screenSizeId: ScopeId;
    _screenSize: Float32Array<ArrayBuffer>;
    fogColor: Float32Array<ArrayBuffer>;
    ambientColor: Float32Array<ArrayBuffer>;
    pcssDiskSamples: number[];
    pcssSphereSamples: number[];
    /**
     * @param {Scene} scene - The scene.
     */
    dispatchGlobalLights(scene: Scene): void;
    _resolveLight(scope: any, i: any): void;
    setLTCDirectionalLight(wtm: any, cnt: any, dir: any, campos: any, far: any): void;
    dispatchDirectLights(dirs: any, mask: any, camera: any): number;
    setLTCPositionalLight(wtm: any, cnt: any): void;
    dispatchOmniLight(scope: any, omni: any, cnt: any): void;
    dispatchSpotLight(scope: any, spot: any, cnt: any): void;
    dispatchLocalLights(sortedLights: any, mask: any, usedDirLights: any): void;
    renderForwardPrepareMaterials(camera: any, renderTarget: any, drawCalls: any, sortedLights: any, layer: any, pass: any): {
        drawCalls: any[];
        shaderInstances: any[];
        isNewMaterial: any[];
        lightMaskChanged: any[];
        clear: () => void;
    };
    renderForwardInternal(camera: any, preparedCalls: any, sortedLights: any, pass: any, drawCallback: any, flipFaces: any, viewBindGroups: any): void;
    renderForward(camera: any, renderTarget: any, allDrawCalls: any, sortedLights: any, pass: any, drawCallback: any, layer: any, flipFaces: any, viewBindGroups: any): void;
    /**
     * Forward render mesh instances on a specified layer, using a camera and a render target.
     * Shaders used are based on the shaderPass provided, with optional clustered lighting support.
     *
     * @param {Camera} camera - The camera.
     * @param {RenderTarget|undefined} renderTarget - The render target.
     * @param {Layer} layer - The layer.
     * @param {boolean} transparent - True if transparent sublayer should be rendered, opaque
     * otherwise.
     * @param {number} shaderPass - A type of shader to use during rendering.
     * @param {BindGroup[]} viewBindGroups - An array storing the view level bing groups (can be
     * empty array, and this function populates if per view).
     * @param {object} [options] - Object for passing optional arguments.
     * @param {boolean} [options.clearColor] - True if the color buffer should be cleared.
     * @param {boolean} [options.clearDepth] - True if the depth buffer should be cleared.
     * @param {boolean} [options.clearStencil] - True if the stencil buffer should be cleared.
     * @param {WorldClusters} [options.lightClusters] - The world clusters object to be used for
     * clustered lighting.
     * @param {MeshInstance[]} [options.meshInstances] - The mesh instances to be rendered. Use
     * when layer is not provided.
     * @param {object} [options.splitLights] - The split lights to be used for clustered lighting.
     */
    renderForwardLayer(camera: Camera, renderTarget: RenderTarget | undefined, layer: Layer, transparent: boolean, shaderPass: number, viewBindGroups: BindGroup[], options?: {
        clearColor?: boolean;
        clearDepth?: boolean;
        clearStencil?: boolean;
        lightClusters?: WorldClusters;
        meshInstances?: MeshInstance[];
        splitLights?: object;
    }): void;
    setFogConstants(fogParams: any): void;
    setSceneConstants(): void;
    /**
     * Builds a frame graph for the rendering of the whole frame.
     *
     * @param {FrameGraph} frameGraph - The frame-graph that is built.
     * @param {LayerComposition} layerComposition - The layer composition used to build the frame
     * graph.
     * @ignore
     */
    buildFrameGraph(frameGraph: FrameGraph, layerComposition: LayerComposition): void;
    /**
     * @param {FrameGraph} frameGraph - The frame graph.
     * @param {LayerComposition} layerComposition - The layer composition.
     */
    addMainRenderPass(frameGraph: FrameGraph, layerComposition: LayerComposition, renderTarget: any, startIndex: any, endIndex: any): void;
    /**
     * @param {LayerComposition} comp - The layer composition.
     */
    update(comp: LayerComposition): void;
}

/**
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 */
/**
 * Records performance-related statistics related to the application.
 */
declare class ApplicationStats {
    /**
     * Create a new ApplicationStats instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     */
    constructor(device: GraphicsDevice);
    frame: {
        fps: number;
        ms: number;
        dt: number;
        updateStart: number;
        updateTime: number;
        renderStart: number;
        renderTime: number;
        physicsStart: number;
        physicsTime: number;
        scriptUpdateStart: number;
        scriptUpdate: number;
        scriptPostUpdateStart: number;
        scriptPostUpdate: number;
        animUpdateStart: number;
        animUpdate: number;
        cullTime: number;
        sortTime: number;
        skinTime: number;
        morphTime: number;
        instancingTime: number;
        triangles: number;
        gsplats: number;
        gsplatSort: number;
        gsplatBufferCopy: number;
        otherPrimitives: number;
        shaders: number;
        materials: number;
        cameras: number;
        shadowMapUpdates: number;
        shadowMapTime: number;
        depthMapTime: number;
        forwardTime: number;
        lightClustersTime: number;
        lightClusters: number;
        _timeToCountFrames: number;
        _fpsAccum: number;
    };
    drawCalls: {
        forward: number;
        depth: number;
        shadow: number;
        immediate: number;
        misc: number;
        total: number;
        skinned: number;
        instanced: number;
        removedByInstancing: number;
    };
    misc: {
        renderTargetCreationTime: number;
    };
    particles: {
        updatesPerFrame: number;
        _updatesPerFrame: number;
        frameTime: number;
        _frameTime: number;
    };
    shaders: {
        vsCompiled: number;
        fsCompiled: number;
        linked: number;
        materialShaders: number;
        compileTime: number;
    };
    vram: {
        texShadow: number;
        texAsset: number;
        texLightmap: number;
        tex: number;
        vb: number;
        ib: number;
        ub: number;
        sb: number;
    };
    gpu: Map<any, any>;
    get scene(): any;
    get lightmapper(): any;
    get batcher(): any;
    /**
     * Called at the end of each frame to reset per-frame statistics.
     *
     * @ignore
     */
    frameEnd(): void;
}

declare class LightmapFilters {
    constructor(device: any);
    shaderDilate: any[];
    shaderDenoise: any[];
    device: any;
    constantTexSource: any;
    constantPixelOffset: any;
    pixelOffset: Float32Array<ArrayBuffer>;
    sigmas: Float32Array<ArrayBuffer>;
    constantSigmas: any;
    kernel: any;
    setSourceTexture(texture: any): void;
    prepare(textureWidth: any, textureHeight: any): void;
    prepareDenoise(filterRange: any, filterSmoothness: any, bakeHDR: any): void;
    constantKernel: any;
    bZnorm: any;
    getDenoise(bakeHDR: any): any;
    getDilate(device: any, bakeHDR: any): any;
    evaluateDenoiseUniforms(filterRange: any, filterSmoothness: any): void;
}

/**
 * The lightmapper is used to bake scene lights into textures.
 *
 * @category Graphics
 */
declare class Lightmapper {
    /**
     * Create a new Lightmapper instance.
     *
     * @param {GraphicsDevice} device - The graphics device used by the lightmapper.
     * @param {Entity} root - The root entity of the scene.
     * @param {Scene} scene - The scene to lightmap.
     * @param {ForwardRenderer} renderer - The renderer.
     * @param {AssetRegistry} assets - Registry of assets to lightmap.
     * @ignore
     */
    constructor(device: GraphicsDevice, root: Entity, scene: Scene, renderer: ForwardRenderer, assets: AssetRegistry);
    device: GraphicsDevice;
    root: Entity;
    scene: Scene;
    renderer: ForwardRenderer;
    assets: AssetRegistry;
    shadowMapCache: ShadowMapCache;
    _tempSet: Set<any>;
    _initCalled: boolean;
    passMaterials: any[];
    ambientAOMaterial: StandardMaterial;
    fog: string;
    ambientLight: Color;
    renderTargets: Map<any, any>;
    stats: {
        renderPasses: number;
        lightmapCount: number;
        totalRenderTime: number;
        forwardTime: number;
        fboTime: number;
        shadowMapTime: number;
        compileTime: number;
        shadersLinked: number;
    };
    destroy(): void;
    blackTex: Texture;
    camera: Camera;
    initBake(device: any): void;
    bakeHDR: boolean;
    lightmapFilters: LightmapFilters;
    constantBakeDir: any;
    materials: any[];
    lightingParams: LightingParams;
    worldClusters: WorldClusters;
    shadowLocalClusteredPass: RenderPassShadowLocalClustered;
    finishBake(bakeNodes: any): void;
    createMaterialForPass(scene: any, pass: any, addAmbient: any): StandardMaterial;
    createMaterials(device: any, scene: any, passCount: any): void;
    createTexture(size: any, name: any): Texture;
    collectModels(node: any, bakeNodes: any, allNodes: any): void;
    prepareShadowCasters(nodes: any): any[];
    updateTransforms(nodes: any): void;
    calculateLightmapSize(node: any): number;
    setLightmapping(nodes: any, value: any, passCount: any, shaderDefs: any): void;
    /**
     * Generates and applies the lightmaps.
     *
     * @param {Entity[]|null} nodes - An array of entities (with model or render components) to
     * render lightmaps for. If not supplied, the entire scene will be baked.
     * @param {number} [mode] - Baking mode. Can be:
     *
     * - {@link BAKE_COLOR}: single color lightmap
     * - {@link BAKE_COLORDIR}: single color lightmap + dominant light direction (used for
     * bump/specular)
     *
     * Only lights with bakeDir=true will be used for generating the dominant light direction.
     * Defaults to {@link BAKE_COLORDIR}.
     */
    bake(nodes: Entity[] | null, mode?: number): void;
    allocateTextures(bakeNodes: any, passCount: any): void;
    prepareLightsToBake(allLights: any, bakeLights: any): void;
    restoreLights(allLights: any): void;
    setupScene(): void;
    restoreScene(): void;
    computeNodeBounds(meshInstances: any): BoundingBox;
    computeNodesBounds(nodes: any): void;
    computeBounds(meshInstances: any): BoundingBox;
    backupMaterials(meshInstances: any): void;
    restoreMaterials(meshInstances: any): void;
    lightCameraPrepare(device: any, bakeLight: any): any;
    lightCameraPrepareAndCull(bakeLight: any, bakeNode: any, shadowCam: any, casterBounds: any): boolean;
    setupLightArray(lightArray: any, light: any): void;
    renderShadowMap(comp: any, shadowMapRendered: any, casters: any, bakeLight: any): boolean;
    postprocessTextures(device: any, bakeNodes: any, passCount: any): void;
    bakeInternal(passCount: any, bakeNodes: any, allNodes: any): void;
}

/**
 * Item to be stored in the {@link SceneRegistry}.
 *
 * @category Graphics
 */
declare class SceneRegistryItem {
    /**
     * Creates a new SceneRegistryItem instance.
     *
     * @param {string} name - The name of the scene.
     * @param {string} url - The url of the scene file.
     */
    constructor(name: string, url: string);
    /**
     * The name of the scene.
     *
     * @type {string}
     */
    name: string;
    /**
     * The url of the scene file.
     *
     * @type {string}
     */
    url: string;
    /** @ignore */
    data: any;
    /** @private */
    private _loading;
    /** @private */
    private _onLoadedCallbacks;
    /**
     * Returns true if the scene data has loaded.
     *
     * @type {boolean}
     */
    get loaded(): boolean;
    /**
     * Returns true if the scene data is still being loaded.
     *
     * @type {boolean}
     */
    get loading(): boolean;
}

/**
 * Callback used by {@link SceneRegistry#loadSceneHierarchy}.
 */
type LoadHierarchyCallback = (err: string | null, entity?: Entity) => void;
/**
 * Callback used by {@link SceneRegistry#loadSceneSettings}.
 */
type LoadSettingsCallback = (err: string | null) => void;
/**
 * Callback used by {@link SceneRegistry#changeScene}.
 */
type ChangeSceneCallback = (err: string | null, entity?: Entity) => void;
/**
 * Callback used by {@link SceneRegistry#loadScene}.
 */
type LoadSceneCallback = (err: string | null, entity?: Entity) => void;
/**
 * Callback used by {@link SceneRegistry#loadSceneData}.
 */
type LoadSceneDataCallback = (err: string | null, sceneItem?: SceneRegistryItem) => void;
/**
 * @import { AppBase } from './app-base.js'
 * @import { Entity } from './entity.js'
 */
/**
 * @callback LoadHierarchyCallback
 * Callback used by {@link SceneRegistry#loadSceneHierarchy}.
 * @param {string|null} err - The error message in the case where the loading or parsing fails.
 * @param {Entity} [entity] - The loaded root entity if no errors were encountered.
 * @returns {void}
 */
/**
 * @callback LoadSettingsCallback
 * Callback used by {@link SceneRegistry#loadSceneSettings}.
 * @param {string|null} err - The error message in the case where the loading or parsing fails.
 * @returns {void}
 */
/**
 * @callback ChangeSceneCallback
 * Callback used by {@link SceneRegistry#changeScene}.
 * @param {string|null} err - The error message in the case where the loading or parsing fails.
 * @param {Entity} [entity] - The loaded root entity if no errors were encountered.
 * @returns {void}
 */
/**
 * @callback LoadSceneCallback
 * Callback used by {@link SceneRegistry#loadScene}.
 * @param {string|null} err - The error message in the case where the loading or parsing fails.
 * @param {Entity} [entity] - The loaded root entity if no errors were encountered.
 * @returns {void}
 */
/**
 * @callback LoadSceneDataCallback
 * Callback used by {@link SceneRegistry#loadSceneData}.
 * @param {string|null} err - The error message in the case where the loading or parsing fails.
 * @param {SceneRegistryItem} [sceneItem] - The scene registry item if no errors were encountered.
 * @returns {void}
 */
/**
 * Container for storing and loading of scenes. An instance of the registry is created on the
 * {@link AppBase} object as {@link AppBase#scenes}.
 *
 * @category Graphics
 */
declare class SceneRegistry {
    /**
     * Create a new SceneRegistry instance.
     *
     * @param {AppBase} app - The application.
     */
    constructor(app: AppBase);
    /**
     * @type {AppBase}
     * @private
     */
    private _app;
    /**
     * @type {SceneRegistryItem[]}
     * @private
     */
    private _list;
    /** @private */
    private _index;
    /** @private */
    private _urlIndex;
    /** @ignore */
    destroy(): void;
    /**
     * Return the list of scene.
     *
     * @returns {SceneRegistryItem[]} All items in the registry.
     */
    list(): SceneRegistryItem[];
    /**
     * Add a new item to the scene registry.
     *
     * @param {string} name - The name of the scene.
     * @param {string} url - The url of the scene file.
     * @returns {boolean} Returns true if the scene was successfully added to the registry, false otherwise.
     */
    add(name: string, url: string): boolean;
    /**
     * Find a Scene by name and return the {@link SceneRegistryItem}.
     *
     * @param {string} name - The name of the scene.
     * @returns {SceneRegistryItem|null} The stored data about a scene or null if no scene with
     * that name exists.
     */
    find(name: string): SceneRegistryItem | null;
    /**
     * Find a scene by the URL and return the {@link SceneRegistryItem}.
     *
     * @param {string} url - The URL to search by.
     * @returns {SceneRegistryItem|null} The stored data about a scene or null if no scene with
     * that URL exists.
     */
    findByUrl(url: string): SceneRegistryItem | null;
    /**
     * Remove an item from the scene registry.
     *
     * @param {string} name - The name of the scene.
     */
    remove(name: string): void;
    /**
     * Private function to load scene data with the option to cache. This allows us to retain
     * expected behavior of loadSceneSettings and loadSceneHierarchy where they don't store loaded
     * data which may be undesired behavior with projects that have many scenes.
     *
     * @param {SceneRegistryItem | string} sceneItem - The scene item (which can be found with
     * {@link SceneRegistry#find}, URL of the scene file (e.g."scene_id.json") or name of the scene.
     * @param {boolean} storeInCache - Whether to store the loaded data in the scene item.
     * @param {LoadSceneDataCallback} callback - The function to call after loading,
     * passed (err, sceneItem) where err is null if no errors occurred.
     * @private
     */
    private _loadSceneData;
    /**
     * Loads and stores the scene data to reduce the number of the network requests when the same
     * scenes are loaded multiple times. Can also be used to load data before calling
     * {@link SceneRegistry#loadSceneHierarchy} and {@link SceneRegistry#loadSceneSettings} to make
     * scene loading quicker for the user.
     *
     * @param {SceneRegistryItem | string} sceneItem - The scene item (which can be found with
     * {@link SceneRegistry#find}, URL of the scene file (e.g."scene_id.json") or name of the scene.
     * @param {LoadSceneDataCallback} callback - The function to call after loading,
     * passed (err, sceneItem) where err is null if no errors occurred.
     * @example
     * const sceneItem = app.scenes.find("Scene Name");
     * app.scenes.loadSceneData(sceneItem, (err, sceneItem) => {
     *     if (err) {
     *         // error
     *     }
     * });
     */
    loadSceneData(sceneItem: SceneRegistryItem | string, callback: LoadSceneDataCallback): void;
    /**
     * Unloads scene data that has been loaded previously using {@link SceneRegistry#loadSceneData}.
     *
     * @param {SceneRegistryItem | string} sceneItem - The scene item (which can be found with
     * {@link SceneRegistry#find} or URL of the scene file. Usually this will be "scene_id.json".
     * @example
     * const sceneItem = app.scenes.find("Scene Name");
     * app.scenes.unloadSceneData(sceneItem);
     */
    unloadSceneData(sceneItem: SceneRegistryItem | string): void;
    _loadSceneHierarchy(sceneItem: any, onBeforeAddHierarchy: any, callback: any): void;
    /**
     * Load a scene file, create and initialize the Entity hierarchy and add the hierarchy to the
     * application root Entity.
     *
     * @param {SceneRegistryItem | string} sceneItem - The scene item (which can be found with
     * {@link SceneRegistry#find}, URL of the scene file (e.g."scene_id.json") or name of the scene.
     * @param {LoadHierarchyCallback} callback - The function to call after loading,
     * passed (err, entity) where err is null if no errors occurred.
     * @example
     * const sceneItem = app.scenes.find("Scene Name");
     * app.scenes.loadSceneHierarchy(sceneItem, (err, entity) => {
     *     if (!err) {
     *         const e = app.root.find("My New Entity");
     *     } else {
     *         // error
     *     }
     * });
     */
    loadSceneHierarchy(sceneItem: SceneRegistryItem | string, callback: LoadHierarchyCallback): void;
    /**
     * Load a scene file and apply the scene settings to the current scene.
     *
     * @param {SceneRegistryItem | string} sceneItem - The scene item (which can be found with
     * {@link SceneRegistry#find}, URL of the scene file (e.g."scene_id.json") or name of the scene.
     * @param {LoadSettingsCallback} callback - The function called after the settings
     * are applied. Passed (err) where err is null if no error occurred.
     * @example
     * const sceneItem = app.scenes.find("Scene Name");
     * app.scenes.loadSceneSettings(sceneItem, (err) => {
     *     if (!err) {
     *         // success
     *     } else {
     *         // error
     *     }
     * });
     */
    loadSceneSettings(sceneItem: SceneRegistryItem | string, callback: LoadSettingsCallback): void;
    /**
     * Change to a new scene. Calling this function will load the scene data, delete all
     * entities and graph nodes under `app.root` and load the scene settings and hierarchy.
     *
     * @param {SceneRegistryItem | string} sceneItem - The scene item (which can be found with
     * {@link SceneRegistry#find}, URL of the scene file (e.g."scene_id.json") or name of the scene.
     * @param {ChangeSceneCallback} [callback] - The function to call after loading,
     * passed (err, entity) where err is null if no errors occurred.
     * @example
     * app.scenes.changeScene("Scene Name", (err, entity) => {
     *     if (!err) {
     *         // success
     *     } else {
     *         // error
     *     }
     * });
     */
    changeScene(sceneItem: SceneRegistryItem | string, callback?: ChangeSceneCallback): void;
    /**
     * Load the scene hierarchy and scene settings. This is an internal method used by the
     * {@link AppBase}.
     *
     * @param {string} url - The URL of the scene file.
     * @param {LoadSceneCallback} callback - The function called after the settings are
     * applied. Passed (err, scene) where err is null if no error occurred and scene is the
     * {@link Scene}.
     */
    loadScene(url: string, callback: LoadSceneCallback): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 * @import { AttributeSchema } from './script-attributes.js'
 * @import { ScriptType } from './script-type.js'
 */
/**
 * Container for all {@link ScriptType}s that are available to this application. Note that
 * PlayCanvas scripts can access the Script Registry from inside the application with
 * {@link AppBase#scripts}.
 *
 * @category Script
 */
declare class ScriptRegistry extends EventHandler {
    /**
     * Create a new ScriptRegistry instance.
     *
     * @param {AppBase} app - Application to attach registry to.
     */
    constructor(app: AppBase);
    /**
     * @type {Object<string, typeof ScriptType>}
     * @private
     */
    private _scripts;
    /**
     * @type {typeof ScriptType[]}
     * @private
     */
    private _list;
    /**
     * A Map of script names to attribute schemas.
     *
     * @type {Map<string, AttributeSchema>}
     * @private
     */
    private _scriptSchemas;
    app: AppBase;
    destroy(): void;
    /**
     * Registers a schema against a script instance.
     *
     * @param {string} id - The key to use to store the schema
     * @param {AttributeSchema} schema - An schema definition for the script
     */
    addSchema(id: string, schema: AttributeSchema): void;
    /**
     * Returns a schema for a given script name.
     *
     * @param {string} id - The key to store the schema under
     * @returns {AttributeSchema | undefined} - The schema stored under the key
     */
    getSchema(id: string): AttributeSchema | undefined;
    /**
     * Add {@link ScriptType} to registry. Note: when {@link createScript} is called, it will add
     * the {@link ScriptType} to the registry automatically. If a script already exists in
     * registry, and the new script has a `swap` method defined, it will perform code hot swapping
     * automatically in async manner.
     *
     * @param {typeof ScriptType} script - Script Type that is created
     * using {@link createScript}.
     * @returns {boolean} True if added for the first time or false if script already exists.
     * @example
     * var PlayerController = pc.createScript('playerController');
     * // playerController Script Type will be added to pc.ScriptRegistry automatically
     * console.log(app.scripts.has('playerController')); // outputs true
     */
    add(script: typeof ScriptType): boolean;
    /**
     * Remove {@link ScriptType}.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type
     * of {@link ScriptType}.
     * @returns {boolean} True if removed or False if already not in registry.
     * @example
     * app.scripts.remove('playerController');
     */
    remove(nameOrType: string | typeof ScriptType): boolean;
    /**
     * Get {@link ScriptType} by name.
     *
     * @param {string} name - Name of a {@link ScriptType}.
     * @returns {typeof ScriptType} The Script Type if it exists in the
     * registry or null otherwise.
     * @example
     * var PlayerController = app.scripts.get('playerController');
     */
    get(name: string): typeof ScriptType;
    /**
     * Check if a {@link ScriptType} with the specified name is in the registry.
     *
     * @param {string|typeof ScriptType} nameOrType - The name or type
     * of {@link ScriptType}.
     * @returns {boolean} True if {@link ScriptType} is in registry.
     * @example
     * if (app.scripts.has('playerController')) {
     *     // playerController is in pc.ScriptRegistry
     * }
     */
    has(nameOrType: string | typeof ScriptType): boolean;
    /**
     * Get list of all {@link ScriptType}s from registry.
     *
     * @returns {Array<typeof ScriptType>} list of all {@link ScriptType}s
     * in registry.
     * @example
     * // logs array of all Script Type names available in registry
     * console.log(app.scripts.list().map(function (o) {
     *     return o.name;
     * }));
     */
    list(): Array<typeof ScriptType>;
}

declare class AnimComponentData {
    enabled: boolean;
}

/**
 * The AnimComponentSystem manages creating and deleting AnimComponents.
 *
 * @category Animation
 */
declare class AnimComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof AnimComponent;
    DataType: typeof AnimComponentData;
    schema: string[];
    initializeComponentData(component: any, data: any, properties: any): void;
    onAnimationUpdate(dt: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onBeforeRemove(entity: any, component: any): void;
}

declare class AnimationComponentData {
    enabled: boolean;
}

/**
 * The AnimationComponentSystem manages creating and deleting AnimationComponents.
 *
 * @category Animation
 */
declare class AnimationComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof AnimationComponent;
    DataType: typeof AnimationComponentData;
    schema: string[];
    /**
     * Called during {@link ComponentSystem#addComponent} to initialize the component data in the
     * store. This can be overridden by derived Component Systems and either called by the derived
     * System or replaced entirely.
     *
     * @param {AnimationComponent} component - The component being initialized.
     * @param {object} data - The data block used to initialize the component.
     * @param {Array<string | {name: string, type: string}>} properties - The array of property descriptors for the component.
     * A descriptor can be either a plain property name, or an object specifying the name and type.
     * @ignore
     */
    initializeComponentData(component: AnimationComponent, data: object, properties: Array<string | {
        name: string;
        type: string;
    }>): void;
    /**
     * Create a clone of component. This creates a copy of all component data variables.
     *
     * @param {Entity} entity - The entity to clone the component from.
     * @param {Entity} clone - The entity to clone the component into.
     * @returns {AnimationComponent} The newly cloned component.
     * @ignore
     */
    cloneComponent(entity: Entity, clone: Entity): AnimationComponent;
    /**
     * @param {Entity} entity - The entity having its component removed.
     * @param {AnimationComponent} component - The component being removed.
     * @private
     */
    private onBeforeRemove;
    /**
     * @param {number} dt - The time delta since the last frame.
     * @private
     */
    private onUpdate;
}

declare class AudioListenerComponentData {
    enabled: boolean;
}

/**
 * Component System for adding and removing {@link AudioListenerComponent} objects to Entities.
 *
 * @category Sound
 */
declare class AudioListenerComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof AudioListenerComponent;
    DataType: typeof AudioListenerComponentData;
    schema: string[];
    manager: SoundManager;
    current: any;
    initializeComponentData(component: any, data: any, properties: any): void;
    onUpdate(dt: any): void;
}

/**
 * The JointComponent adds a physics joint constraint linking two rigid bodies.
 *
 * @ignore
 */
declare class JointComponent extends Component {
    /**
     * Create a new JointComponent instance.
     *
     * @param {JointComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: JointComponentSystem, entity: Entity);
    _constraint: any;
    _entityA: any;
    _entityB: any;
    _breakForce: number;
    _enableCollision: boolean;
    _linearMotionX: string;
    _linearLimitsX: Vec2;
    _linearSpringX: boolean;
    _linearStiffnessX: number;
    _linearDampingX: number;
    _linearEquilibriumX: number;
    _linearMotionY: string;
    _linearLimitsY: Vec2;
    _linearSpringY: boolean;
    _linearStiffnessY: number;
    _linearDampingY: number;
    _linearEquilibriumY: number;
    _linearMotionZ: string;
    _linearLimitsZ: Vec2;
    _linearSpringZ: boolean;
    _linearStiffnessZ: number;
    _linearDampingZ: number;
    _linearEquilibriumZ: number;
    _angularMotionX: string;
    _angularLimitsX: Vec2;
    _angularSpringX: boolean;
    _angularStiffnessX: number;
    _angularDampingX: number;
    _angularEquilibriumX: number;
    _angularMotionY: string;
    _angularLimitsY: Vec2;
    _angularSpringY: boolean;
    _angularStiffnessY: number;
    _angularDampingY: number;
    _angularEquilibriumY: number;
    _angularMotionZ: string;
    _angularLimitsZ: Vec2;
    _angularSpringZ: boolean;
    _angularEquilibriumZ: number;
    _angularDampingZ: number;
    _angularStiffnessZ: number;
    set entityA(body: any);
    get entityA(): any;
    set entityB(body: any);
    get entityB(): any;
    set breakForce(force: number);
    get breakForce(): number;
    set enableCollision(enableCollision: boolean);
    get enableCollision(): boolean;
    set angularLimitsX(limits: Vec2);
    get angularLimitsX(): Vec2;
    set angularMotionX(value: string);
    get angularMotionX(): string;
    set angularLimitsY(limits: Vec2);
    get angularLimitsY(): Vec2;
    set angularMotionY(value: string);
    get angularMotionY(): string;
    set angularLimitsZ(limits: Vec2);
    get angularLimitsZ(): Vec2;
    set angularMotionZ(value: string);
    get angularMotionZ(): string;
    set linearLimitsX(limits: Vec2);
    get linearLimitsX(): Vec2;
    set linearMotionX(value: string);
    get linearMotionX(): string;
    set linearLimitsY(limits: Vec2);
    get linearLimitsY(): Vec2;
    set linearMotionY(value: string);
    get linearMotionY(): string;
    set linearLimitsZ(limits: Vec2);
    get linearLimitsZ(): Vec2;
    set linearMotionZ(value: string);
    get linearMotionZ(): string;
    _convertTransform(pcTransform: any, ammoTransform: any): void;
    _updateAngularLimits(): void;
    _updateLinearLimits(): void;
    _createConstraint(): void;
    _destroyConstraint(): void;
    initFromData(data: any): void;
    _onSetEnabled(prop: any, old: any, value: any): void;
    _onBeforeRemove(): void;
}

declare class JointComponentData {
    enabled: boolean;
}

/**
 * Creates and manages physics joint components.
 *
 * @ignore
 */
declare class JointComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof JointComponent;
    DataType: typeof JointComponentData;
    schema: string[];
    initializeComponentData(component: any, data: any, properties: any): void;
}

declare class LayoutChildComponentData {
    enabled: boolean;
}

/**
 * Manages creation of {@link LayoutChildComponent}s.
 *
 * @category User Interface
 */
declare class LayoutChildComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof LayoutChildComponent;
    DataType: typeof LayoutChildComponentData;
    schema: string[];
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
}

/**
 * @import { AppBase } from '../../app-base.js'
 */
/**
 * A Light Component is used to dynamically light the scene.
 *
 * @category Graphics
 */
declare class LightComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof LightComponent;
    DataType: typeof LightComponentData;
    initializeComponentData(component: any, _data: any): void;
    _onRemoveComponent(entity: any, component: any): void;
    cloneComponent(entity: any, clone: any): Component;
    changeType(component: any, oldValue: any, newValue: any): void;
}

declare class RigidBodyComponentData {
    enabled: boolean;
}

/**
 * A pool of reusable objects of the same type. Designed to promote reuse of objects to reduce
 * garbage collection.
 *
 * @template {new (...args: any[]) => any} T
 * @ignore
 */
declare class ObjectPool<T extends new (...args: any[]) => any> {
    /**
     * @param {T} constructorFunc - The constructor function for the
     * objects in the pool.
     * @param {number} size - The initial number of object instances to allocate.
     */
    constructor(constructorFunc: T, size: number);
    /**
     * The constructor function for the objects in the pool.
     *
     * @type {new (...args: any[]) => any}
     * @private
     */
    private _constructor;
    /**
     * Array of object instances.
     *
     * @type {InstanceType<T>[]}
     * @private
     */
    private _pool;
    /**
     * The number of object instances that are currently allocated.
     *
     * @type {number}
     * @private
     */
    private _count;
    /**
     * @param {number} size - The number of object instances to allocate.
     * @private
     */
    private _resize;
    /**
     * Returns an object instance from the pool. If no instances are available, the pool will be
     * doubled in size and a new instance will be returned.
     *
     * @returns {InstanceType<T>} An object instance from the pool.
     */
    allocate(): InstanceType<T>;
    /**
     * All object instances in the pool will be available again. The pool itself will not be
     * resized.
     */
    freeAll(): void;
}

/**
 * Represents a single point of contact between two colliding rigid bodies in the physics
 * simulation. Each contact point stores detailed spatial information about the collision,
 * including both local and world space coordinates of the exact contact points on both entities,
 * the contact normal  direction, and the collision impulse force.
 *
 * Contact points are generated by the physics engine during collision detection and are typically
 * accessed through a {@link ContactResult} object, which can contain multiple contact points for a
 * single collision between two entities. Multiple contact points commonly occur when objects
 * collide along edges or faces rather than at a single point.
 *
 * The impulse property can be particularly useful for gameplay mechanics that need to respond
 * differently based on the force of impact, such as damage calculations or sound effect volume.
 *
 * @example
 * // Access contact points from a collision event
 * entity.collision.on('contact', (result) => {
 *     // Get the first contact point
 *     const contact = result.contacts[0];
 *
 *     // Get the contact position in world space
 *     const worldPos = contact.point;
 *
 *     // Check how hard the collision was
 *     if (contact.impulse > 10) {
 *         console.log("That was a hard impact!");
 *     }
 * });
 *
 * @category Physics
 */
declare class ContactPoint {
    /**
     * Create a new ContactPoint instance.
     *
     * @param {Vec3} [localPoint] - The point on the entity where the contact occurred, relative to
     * the entity.
     * @param {Vec3} [localPointOther] - The point on the other entity where the contact occurred,
     * relative to the other entity.
     * @param {Vec3} [point] - The point on the entity where the contact occurred, in world space.
     * @param {Vec3} [pointOther] - The point on the other entity where the contact occurred, in
     * world space.
     * @param {Vec3} [normal] - The normal vector of the contact on the other entity, in world
     * space.
     * @param {number} [impulse] - The total accumulated impulse applied by the constraint solver
     * during the last sub-step. Describes how hard two objects collide. Defaults to 0.
     * @ignore
     */
    constructor(localPoint?: Vec3, localPointOther?: Vec3, point?: Vec3, pointOther?: Vec3, normal?: Vec3, impulse?: number);
    /**
     * The point on the entity where the contact occurred, relative to the entity.
     *
     * @type {Vec3}
     */
    localPoint: Vec3;
    /**
     * The point on the other entity where the contact occurred, relative to the other entity.
     *
     * @type {Vec3}
     */
    localPointOther: Vec3;
    /**
     * The point on the entity where the contact occurred, in world space.
     *
     * @type {Vec3}
     */
    point: Vec3;
    /**
     * The point on the other entity where the contact occurred, in world space.
     *
     * @type {Vec3}
     */
    pointOther: Vec3;
    /**
     * The normal vector of the contact on the other entity, in world space. This vector points
     * away from the surface of the other entity at the point of contact.
     *
     * @type {Vec3}
     */
    normal: Vec3;
    /**
     * The total accumulated impulse applied by the constraint solver during the last sub-step.
     * This value represents how hard two objects collided. Higher values indicate stronger impacts.
     *
     * @type {number}
     */
    impulse: number;
}
/**
 * Represents a collection of contact points between two entities in a physics collision.
 * When rigid bodies collide, this object stores the entity involved in the collision and
 * an array of specific contact points where the collision occurred. This information is
 * used by the physics system to resolve collisions and notify components through events.
 *
 * Instances of this class are passed to event handlers for the `contact` and `collisionstart`
 * events on individual {@link RigidBodyComponent} and {@link CollisionComponent} instances.
 *
 * Unlike {@link SingleContactResult} which is used for global contact events, ContactResult
 * objects provide information about collision from the perspective of one entity, with
 * information about which other entity was involved and all points of contact.
 *
 * Please refer to the following event documentation for more information:
 *
 * - {@link CollisionComponent.EVENT_CONTACT}
 * - {@link CollisionComponent.EVENT_COLLISIONSTART}
 * - {@link RigidBodyComponent.EVENT_CONTACT}
 * - {@link RigidBodyComponent.EVENT_COLLISIONSTART}
 *
 * @category Physics
 */
declare class ContactResult {
    /**
     * Create a new ContactResult instance.
     *
     * @param {Entity} other - The entity that was involved in the contact with this entity.
     * @param {ContactPoint[]} contacts - An array of ContactPoints with the other entity.
     * @ignore
     */
    constructor(other: Entity, contacts: ContactPoint[]);
    /**
     * The entity that was involved in the contact with this entity.
     *
     * @type {Entity}
     */
    other: Entity;
    /**
     * An array of ContactPoints with the other entity.
     *
     * @type {ContactPoint[]}
     */
    contacts: ContactPoint[];
}
/**
 * Contains the result of a successful raycast intersection with a rigid body. When a ray
 * intersects with a rigid body in the physics simulation, this class stores the complete
 * information about that intersection including the entity, the exact point of impact, the normal
 * at the impact point, and the fractional distance along the ray where the intersection occurred.
 *
 * Instances of this class are created and returned by {@link RigidBodyComponentSystem#raycastFirst}
 * and {@link RigidBodyComponentSystem#raycastAll} methods when performing physics raycasts.
 *
 * @category Physics
 */
declare class RaycastResult {
    /**
     * Create a new RaycastResult instance.
     *
     * @param {Entity} entity - The entity that was hit.
     * @param {Vec3} point - The point at which the ray hit the entity in world space.
     * @param {Vec3} normal - The normal vector of the surface where the ray hit in world space.
     * @param {number} hitFraction - The normalized distance (between 0 and 1) at which the ray hit
     * occurred from the starting point.
     * @ignore
     */
    constructor(entity: Entity, point: Vec3, normal: Vec3, hitFraction: number);
    /**
     * The entity that was hit.
     *
     * @type {Entity}
     */
    entity: Entity;
    /**
     * The point at which the ray hit the entity in world space.
     *
     * @type {Vec3}
     */
    point: Vec3;
    /**
     * The normal vector of the surface where the ray hit in world space.
     *
     * @type {Vec3}
     */
    normal: Vec3;
    /**
     * The normalized distance (between 0 and 1) at which the ray hit occurred from the
     * starting point.
     *
     * @type {number}
     */
    hitFraction: number;
}
/**
 * The RigidBodyComponentSystem manages the physics simulation for all rigid body components
 * in the application. It creates and maintains the underlying Ammo.js physics world, handles
 * physics object creation and destruction, performs physics raycasting, detects and reports
 * collisions, and updates the transforms of entities with rigid bodies after each physics step.
 *
 * The system controls global physics settings like gravity and provides methods for raycasting
 * and collision detection.
 *
 * This system is only functional if your application has loaded the Ammo.js {@link WasmModule}.
 *
 * @category Physics
 */
declare class RigidBodyComponentSystem extends ComponentSystem {
    /**
     * Fired when a contact occurs between two rigid bodies. The handler is passed a
     * {@link SingleContactResult} object containing details of the contact between the two bodies.
     *
     * @event
     * @example
     * app.systems.rigidbody.on('contact', (result) => {
     *     console.log(`Contact between ${result.a.name} and ${result.b.name}`);
     * });
     */
    static EVENT_CONTACT: string;
    /**
     * @type {number}
     * @ignore
     */
    maxSubSteps: number;
    /**
     * @type {number}
     * @ignore
     */
    fixedTimeStep: number;
    /**
     * The world space vector representing global gravity in the physics simulation. Defaults to
     * [0, -9.81, 0] which is an approximation of the gravitational force on Earth.
     *
     * @type {Vec3}
     * @example
     * // Set the gravity in the physics world to simulate a planet with low gravity
     * app.systems.rigidbody.gravity = new pc.Vec3(0, -3.7, 0);
     */
    gravity: Vec3;
    /**
     * @type {Float32Array}
     * @private
     */
    private _gravityFloat32;
    /**
     * @type {RigidBodyComponent[]}
     * @private
     */
    private _dynamic;
    /**
     * @type {RigidBodyComponent[]}
     * @private
     */
    private _kinematic;
    /**
     * @type {Trigger[]}
     * @private
     */
    private _triggers;
    /**
     * @type {CollisionComponent[]}
     * @private
     */
    private _compounds;
    id: string;
    _stats: {
        fps: number;
        ms: number;
        dt: number;
        updateStart: number;
        updateTime: number;
        renderStart: number;
        renderTime: number;
        physicsStart: number;
        physicsTime: number;
        scriptUpdateStart: number;
        scriptUpdate: number;
        scriptPostUpdateStart: number;
        scriptPostUpdate: number;
        animUpdateStart: number;
        animUpdate: number;
        cullTime: number;
        sortTime: number;
        skinTime: number;
        morphTime: number;
        instancingTime: number;
        triangles: number;
        gsplats: number;
        gsplatSort: number;
        gsplatBufferCopy: number;
        otherPrimitives: number;
        shaders: number;
        materials: number;
        cameras: number;
        shadowMapUpdates: number;
        shadowMapTime: number;
        depthMapTime: number;
        forwardTime: number;
        lightClustersTime: number;
        lightClusters: number;
        _timeToCountFrames: number;
        _fpsAccum: number;
    };
    ComponentType: typeof RigidBodyComponent;
    DataType: typeof RigidBodyComponentData;
    contactPointPool: ObjectPool<typeof ContactPoint>;
    contactResultPool: ObjectPool<typeof ContactResult>;
    singleContactResultPool: ObjectPool<typeof SingleContactResult>;
    schema: string[];
    collisions: {};
    frameCollisions: {};
    /**
     * Called once Ammo has been loaded. Responsible for creating the physics world.
     *
     * @ignore
     */
    onLibraryLoaded(): void;
    collisionConfiguration: any;
    dispatcher: any;
    overlappingPairCache: any;
    solver: any;
    dynamicsWorld: any;
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onBeforeRemove(entity: any, component: any): void;
    addBody(body: any, group: any, mask: any): void;
    removeBody(body: any): void;
    createBody(mass: any, shape: any, transform: any): any;
    destroyBody(body: any): void;
    /**
     * Raycast the world and return the first entity the ray hits. Fire a ray into the world from
     * start to end, if the ray hits an entity with a collision component, it returns a
     * {@link RaycastResult}, otherwise returns null.
     *
     * @param {Vec3} start - The world space point where the ray starts.
     * @param {Vec3} end - The world space point where the ray ends.
     * @param {object} [options] - The additional options for the raycasting.
     * @param {number} [options.filterCollisionGroup] - Collision group to apply to the raycast.
     * @param {number} [options.filterCollisionMask] - Collision mask to apply to the raycast.
     * @param {any[]} [options.filterTags] - Tags filters. Defined the same way as a {@link Tags#has}
     * query but within an array.
     * @param {Function} [options.filterCallback] - Custom function to use to filter entities.
     * Must return true to proceed with result. Takes one argument: the entity to evaluate.
     *
     * @returns {RaycastResult|null} The result of the raycasting or null if there was no hit.
     */
    raycastFirst(start: Vec3, end: Vec3, options?: {
        filterCollisionGroup?: number;
        filterCollisionMask?: number;
        filterTags?: any[];
        filterCallback?: Function;
    }): RaycastResult | null;
    /**
     * Raycast the world and return all entities the ray hits. It returns an array of
     * {@link RaycastResult}, one for each hit. If no hits are detected, the returned array will be
     * of length 0. Results are sorted by distance with closest first.
     *
     * @param {Vec3} start - The world space point where the ray starts.
     * @param {Vec3} end - The world space point where the ray ends.
     * @param {object} [options] - The additional options for the raycasting.
     * @param {boolean} [options.sort] - Whether to sort raycast results based on distance with closest
     * first. Defaults to false.
     * @param {number} [options.filterCollisionGroup] - Collision group to apply to the raycast.
     * @param {number} [options.filterCollisionMask] - Collision mask to apply to the raycast.
     * @param {any[]} [options.filterTags] - Tags filters. Defined the same way as a {@link Tags#has}
     * query but within an array.
     * @param {Function} [options.filterCallback] - Custom function to use to filter entities.
     * Must return true to proceed with result. Takes the entity to evaluate as argument.
     *
     * @returns {RaycastResult[]} An array of raycast hit results (0 length if there were no hits).
     *
     * @example
     * // Return all results of a raycast between 0, 2, 2 and 0, -2, -2
     * const hits = this.app.systems.rigidbody.raycastAll(new Vec3(0, 2, 2), new Vec3(0, -2, -2));
     * @example
     * // Return all results of a raycast between 0, 2, 2 and 0, -2, -2
     * // where hit entity is tagged with `bird` OR `mammal`
     * const hits = this.app.systems.rigidbody.raycastAll(new Vec3(0, 2, 2), new Vec3(0, -2, -2), {
     *     filterTags: [ "bird", "mammal" ]
     * });
     * @example
     * // Return all results of a raycast between 0, 2, 2 and 0, -2, -2
     * // where hit entity has a `camera` component
     * const hits = this.app.systems.rigidbody.raycastAll(new Vec3(0, 2, 2), new Vec3(0, -2, -2), {
     *     filterCallback: (entity) => entity && entity.camera
     * });
     * @example
     * // Return all results of a raycast between 0, 2, 2 and 0, -2, -2
     * // where hit entity is tagged with (`carnivore` AND `mammal`) OR (`carnivore` AND `reptile`)
     * // and the entity has an `anim` component
     * const hits = this.app.systems.rigidbody.raycastAll(new Vec3(0, 2, 2), new Vec3(0, -2, -2), {
     *     filterTags: [
     *         [ "carnivore", "mammal" ],
     *         [ "carnivore", "reptile" ]
     *     ],
     *     filterCallback: (entity) => entity && entity.anim
     * });
     */
    raycastAll(start: Vec3, end: Vec3, options?: {
        sort?: boolean;
        filterCollisionGroup?: number;
        filterCollisionMask?: number;
        filterTags?: any[];
        filterCallback?: Function;
    }): RaycastResult[];
    /**
     * Stores a collision between the entity and other in the contacts map and returns true if it
     * is a new collision.
     *
     * @param {Entity} entity - The entity.
     * @param {Entity} other - The entity that collides with the first entity.
     * @returns {boolean} True if this is a new collision, false otherwise.
     * @private
     */
    private _storeCollision;
    _createContactPointFromAmmo(contactPoint: any): ContactPoint;
    _createReverseContactPointFromAmmo(contactPoint: any): ContactPoint;
    _createSingleContactResult(a: any, b: any, contactPoint: any): SingleContactResult;
    _createContactResult(other: any, contacts: any): ContactResult;
    /**
     * Removes collisions that no longer exist from the collisions list and fires collisionend
     * events to the related entities.
     *
     * @private
     */
    private _cleanOldCollisions;
    /**
     * Returns true if the entity has a contact event attached and false otherwise.
     *
     * @param {Entity} entity - Entity to test.
     * @returns {boolean} True if the entity has a contact and false otherwise.
     * @private
     */
    private _hasContactEvent;
    /**
     * Checks for collisions and fires collision events.
     *
     * @param {number} world - The pointer to the dynamics world that invoked this callback.
     * @param {number} timeStep - The amount of simulation time processed in the last simulation tick.
     * @private
     */
    private _checkForCollisions;
    onUpdate(dt: any): void;
}
/**
 * Represents the detailed data of a single contact point between two rigid bodies in the physics
 * simulation. This class provides comprehensive information about the contact, including the
 * entities involved, the exact contact points in both local and world space coordinates, the
 * contact normal, and the collision impulse force.
 *
 * Instances of this class are created by the physics engine when collision events occur and are
 * passed to event handlers only through the global `contact` event on the
 * {@link RigidBodyComponentSystem}. Individual rigid body components receive instances of
 * {@link ContactResult} instead.
 *
 * @example
 * app.systems.rigidbody.on('contact', (result) => {
 *     console.log(`Contact between ${result.a.name} and ${result.b.name}`);
 * });
 * @category Physics
 */
declare class SingleContactResult {
    /**
     * Create a new SingleContactResult instance.
     *
     * @param {Entity} a - The first entity involved in the contact.
     * @param {Entity} b - The second entity involved in the contact.
     * @param {ContactPoint} contactPoint - The contact point between the two entities.
     * @ignore
     */
    constructor(a: Entity, b: Entity, contactPoint: ContactPoint, ...args: any[]);
    /**
     * The first entity involved in the contact.
     *
     * @type {Entity}
     */
    a: Entity;
    /**
     * The second entity involved in the contact.
     *
     * @type {Entity}
     */
    b: Entity;
    /**
     * The total accumulated impulse applied by the constraint solver during the last
     * sub-step. Describes how hard two bodies collided.
     *
     * @type {number}
     */
    impulse: number;
    /**
     * The point on Entity A where the contact occurred, relative to A.
     *
     * @type {Vec3}
     */
    localPointA: Vec3;
    /**
     * The point on Entity B where the contact occurred, relative to B.
     *
     * @type {Vec3}
     */
    localPointB: Vec3;
    /**
     * The point on Entity A where the contact occurred, in world space.
     *
     * @type {Vec3}
     */
    pointA: Vec3;
    /**
     * The point on Entity B where the contact occurred, in world space.
     *
     * @type {Vec3}
     */
    pointB: Vec3;
    /**
     * The normal vector of the contact on Entity B, in world space.
     *
     * @type {Vec3}
     */
    normal: Vec3;
}

declare class SoundComponentData {
    enabled: boolean;
}

/**
 * Manages creation of {@link SoundComponent}s.
 *
 * @category Sound
 */
declare class SoundComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof SoundComponent;
    DataType: typeof SoundComponentData;
    schema: string[];
    /**
     * Gets / sets the sound manager.
     *
     * @type {SoundManager}
     */
    manager: SoundManager;
    /**
     * Sets the volume for the entire Sound system. All sounds will have their volume multiplied by
     * this value. Valid range is between 0 and 1. Defaults to 1.
     *
     * @type {number}
     */
    set volume(volume: number);
    /**
     * Gets the volume for the entire Sound system.
     *
     * @type {number}
     */
    get volume(): number;
    /**
     * Gets the AudioContext currently used by the sound manager. Requires Web Audio API support.
     * Returns null if the device does not support the Web Audio API.
     *
     * @type {AudioContext|null}
     */
    get context(): AudioContext | null;
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    onUpdate(dt: any): void;
    onBeforeRemove(entity: any, component: any): void;
}

/**
 * @import { Entity } from '../../entity.js'
 * @import { ZoneComponentSystem } from './system.js'
 */
/**
 * The ZoneComponent allows you to define an area in world space of certain size. This can be used
 * in various ways, such as affecting audio reverb when {@link AudioListenerComponent} is within
 * zone. Or create culling system with portals between zones to hide whole indoor sections for
 * performance reasons. And many other possible options. Zones are building blocks and meant to be
 * used in many different ways.
 *
 * @ignore
 */
declare class ZoneComponent extends Component {
    /**
     * Fired when the zone component is enabled. This event does not take into account the enabled
     * state of the entity or any of its ancestors.
     *
     * @event
     * @example
     * entity.zone.on('enable', () => {
     *     console.log(`Zone component of entity '${entity.name}' has been enabled`);
     * });
     */
    static EVENT_ENABLE: string;
    /**
     * Fired when the zone component is disabled. This event does not take into account the enabled
     * state of the entity or any of its ancestors.
     *
     * @event
     * @example
     * entity.zone.on('disable', () => {
     *     console.log(`Zone component of entity '${entity.name}' has been disabled`);
     * });
     */
    static EVENT_DISABLE: string;
    /**
     * Fired when the enabled state of the zone component changes. This event does not take into
     * account the enabled state of the entity or any of its ancestors.
     *
     * @event
     * @example
     * entity.zone.on('state', (enabled) => {
     *     console.log(`Zone component of entity '${entity.name}' has been ${enabled ? 'enabled' : 'disabled'}`);
     * });
     */
    static EVENT_STATE: string;
    /**
     * Fired when a zone component is removed from an entity.
     *
     * @event
     * @example
     * entity.zone.on('remove', () => {
     *     console.log(`Zone component removed from entity '${entity.name}'`);
     * });
     */
    static EVENT_REMOVE: string;
    /**
     * Create a new ZoneComponent instance.
     *
     * @param {ZoneComponentSystem} system - The ComponentSystem that created this Component.
     * @param {Entity} entity - The Entity that this Component is attached to.
     */
    constructor(system: ZoneComponentSystem, entity: Entity);
    _oldState: boolean;
    _size: Vec3;
    /**
     * The size of the axis-aligned box of this ZoneComponent.
     *
     * @type {Vec3}
     */
    set size(data: Vec3);
    get size(): Vec3;
    _onSetEnabled(prop: any, old: any, value: any): void;
    _checkState(): void;
    _onBeforeRemove(): void;
}

declare class ZoneComponentData {
    enabled: boolean;
}

/**
 * Creates and manages {@link ZoneComponent} instances.
 *
 * @ignore
 */
declare class ZoneComponentSystem extends ComponentSystem {
    id: string;
    ComponentType: typeof ZoneComponent;
    DataType: typeof ZoneComponentData;
    schema: string[];
    initializeComponentData(component: any, data: any, properties: any): void;
    cloneComponent(entity: any, clone: any): Component;
    _onBeforeRemove(entity: any, component: any): void;
}

/**
 * @import { AnimComponentSystem } from './anim/system.js'
 * @import { AnimationComponentSystem } from './animation/system.js'
 * @import { AudioListenerComponentSystem } from './audio-listener/system.js'
 * @import { ButtonComponentSystem } from './button/system.js'
 * @import { CameraComponentSystem } from './camera/system.js'
 * @import { CollisionComponentSystem } from './collision/system.js'
 * @import { ElementComponentSystem } from './element/system.js'
 * @import { GSplatComponentSystem } from './gsplat/system.js'
 * @import { JointComponentSystem } from './joint/system.js'
 * @import { LayoutChildComponentSystem } from './layout-child/system.js'
 * @import { LayoutGroupComponentSystem } from './layout-group/system.js'
 * @import { LightComponentSystem } from './light/system.js'
 * @import { ModelComponentSystem } from './model/system.js'
 * @import { ParticleSystemComponentSystem } from './particle-system/system.js'
 * @import { RenderComponentSystem } from './render/system.js'
 * @import { RigidBodyComponentSystem } from './rigid-body/system.js'
 * @import { ScreenComponentSystem } from './screen/system.js'
 * @import { ScriptComponentSystem } from './script/system.js'
 * @import { ScrollViewComponentSystem } from './scroll-view/system.js'
 * @import { ScrollbarComponentSystem } from './scrollbar/system.js'
 * @import { SoundComponentSystem } from './sound/system.js'
 * @import { SpriteComponentSystem } from './sprite/system.js'
 * @import { ZoneComponentSystem } from './zone/system.js'
 */
/**
 * The ComponentSystemRegistry manages the instances of an application's {@link ComponentSystem}s.
 * {@link AppBase} maintains a single instance of this class which can be accessed via
 * {@link AppBase#systems}.
 *
 * ```javascript
 * // Set the gravity to zero
 * app.systems.rigidbody.gravity = new pc.Vec3(0, 0, 0);
 *
 * // Set the volume to 50%
 * app.systems.sound.volume = 0.5;
 * ```
 */
declare class ComponentSystemRegistry extends EventHandler {
    /**
     * Gets the {@link AnimComponentSystem} from the registry.
     *
     * @type {AnimComponentSystem|undefined}
     * @readonly
     */
    readonly anim: AnimComponentSystem | undefined;
    /**
     * Gets the {@link AnimationComponentSystem} from the registry.
     *
     * @type {AnimationComponentSystem|undefined}
     * @readonly
     */
    readonly animation: AnimationComponentSystem | undefined;
    /**
     * Gets the {@link AudioListenerComponentSystem} from the registry.
     *
     * @type {AudioListenerComponentSystem|undefined}
     * @readonly
     */
    readonly audiolistener: AudioListenerComponentSystem | undefined;
    /**
     * Gets the {@link ButtonComponentSystem} from the registry.
     *
     * @type {ButtonComponentSystem|undefined}
     * @readonly
     */
    readonly button: ButtonComponentSystem | undefined;
    /**
     * Gets the {@link CameraComponentSystem} from the registry.
     *
     * @type {CameraComponentSystem|undefined}
     * @readonly
     */
    readonly camera: CameraComponentSystem | undefined;
    /**
     * Gets the {@link CollisionComponentSystem} from the registry.
     *
     * @type {CollisionComponentSystem|undefined}
     * @readonly
     */
    readonly collision: CollisionComponentSystem | undefined;
    /**
     * Gets the {@link ElementComponentSystem} from the registry.
     *
     * @type {ElementComponentSystem|undefined}
     * @readonly
     */
    readonly element: ElementComponentSystem | undefined;
    /**
     * Gets the {@link GSplatComponentSystem} from the registry.
     *
     * @type {GSplatComponentSystem|undefined}
     * @readonly
     */
    readonly gsplat: GSplatComponentSystem | undefined;
    /**
     * Gets the {@link JointComponentSystem} from the registry.
     *
     * @type {JointComponentSystem|undefined}
     * @readonly
     * @ignore
     */
    readonly joint: JointComponentSystem | undefined;
    /**
     * Gets the {@link LayoutChildComponentSystem} from the registry.
     *
     * @type {LayoutChildComponentSystem|undefined}
     * @readonly
     */
    readonly layoutchild: LayoutChildComponentSystem | undefined;
    /**
     * Gets the {@link LayoutGroupComponentSystem} from the registry.
     *
     * @type {LayoutGroupComponentSystem|undefined}
     * @readonly
     */
    readonly layoutgroup: LayoutGroupComponentSystem | undefined;
    /**
     * Gets the {@link LightComponentSystem} from the registry.
     *
     * @type {LightComponentSystem|undefined}
     * @readonly
     */
    readonly light: LightComponentSystem | undefined;
    /**
     * Gets the {@link ModelComponentSystem} from the registry.
     *
     * @type {ModelComponentSystem|undefined}
     * @readonly
     */
    readonly model: ModelComponentSystem | undefined;
    /**
     * Gets the {@link ParticleSystemComponentSystem} from the registry.
     *
     * @type {ParticleSystemComponentSystem|undefined}
     * @readonly
     */
    readonly particlesystem: ParticleSystemComponentSystem | undefined;
    /**
     * Gets the {@link RenderComponentSystem} from the registry.
     *
     * @type {RenderComponentSystem|undefined}
     * @readonly
     */
    readonly render: RenderComponentSystem | undefined;
    /**
     * Gets the {@link RigidBodyComponentSystem} from the registry.
     *
     * @type {RigidBodyComponentSystem|undefined}
     * @readonly
     */
    readonly rigidbody: RigidBodyComponentSystem | undefined;
    /**
     * Gets the {@link ScreenComponentSystem} from the registry.
     *
     * @type {ScreenComponentSystem|undefined}
     * @readonly
     */
    readonly screen: ScreenComponentSystem | undefined;
    /**
     * Gets the {@link ScriptComponentSystem} from the registry.
     *
     * @type {ScriptComponentSystem|undefined}
     * @readonly
     */
    readonly script: ScriptComponentSystem | undefined;
    /**
     * Gets the {@link ScrollbarComponentSystem} from the registry.
     *
     * @type {ScrollbarComponentSystem|undefined}
     * @readonly
     */
    readonly scrollbar: ScrollbarComponentSystem | undefined;
    /**
     * Gets the {@link ScrollViewComponentSystem} from the registry.
     *
     * @type {ScrollViewComponentSystem|undefined}
     * @readonly
     */
    readonly scrollview: ScrollViewComponentSystem | undefined;
    /**
     * Gets the {@link SoundComponentSystem} from the registry.
     *
     * @type {SoundComponentSystem|undefined}
     * @readonly
     */
    readonly sound: SoundComponentSystem | undefined;
    /**
     * Gets the {@link SpriteComponentSystem} from the registry.
     *
     * @type {SpriteComponentSystem|undefined}
     * @readonly
     */
    readonly sprite: SpriteComponentSystem | undefined;
    /**
     * Gets the {@link ZoneComponentSystem} from the registry.
     *
     * @type {ZoneComponentSystem|undefined}
     * @readonly
     * @ignore
     */
    readonly zone: ZoneComponentSystem | undefined;
    list: any[];
    /**
     * Add a component system to the registry.
     *
     * @param {object} system - The {@link ComponentSystem} instance.
     * @ignore
     */
    add(system: object): void;
    /**
     * Remove a component system from the registry.
     *
     * @param {object} system - The {@link ComponentSystem} instance.
     * @ignore
     */
    remove(system: object): void;
    destroy(): void;
}

declare class I18nParser {
    _validate(data: any): void;
    parse(data: any): any;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
/**
 * Handles localization. Responsible for loading localization assets and returning translations for
 * a certain key. Can also handle plural forms. To override its default behavior define a different
 * implementation for {@link I18n#getText} and {@link I18n#getPluralText}.
 */
declare class I18n extends EventHandler {
    /**
     * Fired when when the locale is changed.
     *
     * @event
     * @example
     * app.i18n.on('change', (newLocale, oldLocale) => {
     *    console.log(`Locale changed from ${oldLocale} to ${newLocale}`);
     * });
     */
    static EVENT_CHANGE: string;
    /**
     * Returns the first available locale based on the desired locale specified. First tries to
     * find the desired locale and then tries to find an alternative locale based on the language.
     *
     * @param {string} desiredLocale - The desired locale e.g. en-US.
     * @param {object} availableLocales - A dictionary where each key is an available locale.
     * @returns {string} The locale found or if no locale is available returns the default en-US
     * locale.
     * @example
     * // With a defined dictionary of locales
     * const availableLocales = { en: 'en-US', fr: 'fr-FR' };
     * const locale = pc.I18n.getText('en-US', availableLocales);
     * // returns 'en'
     * @ignore
     */
    static findAvailableLocale(desiredLocale: string, availableLocales: object): string;
    /**
     * Create a new I18n instance.
     *
     * @param {AppBase} app - The application.
     */
    constructor(app: AppBase);
    /**
     * Sets the current locale. For example, "en-US". Changing the locale will raise an event which
     * will cause localized Text Elements to change language to the new locale.
     *
     * @type {string}
     */
    set locale(value: string);
    /**
     * Gets the current locale.
     *
     * @type {string}
     */
    get locale(): string;
    _translations: {};
    _availableLangs: {};
    _app: AppBase;
    _assets: any[];
    _parser: I18nParser;
    /**
     * Sets the array of asset ids or assets that contain localization data in the expected format.
     * I18n will automatically load translations from these assets as the assets are loaded and it
     * will also automatically unload translations if the assets get removed or unloaded at runtime.
     *
     * @type {number[]|Asset[]}
     */
    set assets(value: number[] | Asset[]);
    /**
     * Gets the array of asset ids that contain localization data in the expected format.
     *
     * @type {number[]|Asset[]}
     */
    get assets(): number[] | Asset[];
    _locale: any;
    _lang: any;
    _pluralFn: any;
    /**
     * Returns the first available locale based on the desired locale specified. First tries to
     * find the desired locale in the loaded translations and then tries to find an alternative
     * locale based on the language.
     *
     * @param {string} desiredLocale - The desired locale e.g. en-US.
     * @returns {string} The locale found or if no locale is available returns the default en-US
     * locale.
     * @example
     * const locale = this.app.i18n.getText('en-US');
     */
    findAvailableLocale(desiredLocale: string): string;
    /**
     * Returns the translation for the specified key and locale. If the locale is not specified it
     * will use the current locale.
     *
     * @param {string} key - The localization key.
     * @param {string} [locale] - The desired locale.
     * @returns {string} The translated text. If no translations are found at all for the locale
     * then it will return the en-US translation. If no translation exists for that key then it will
     * return the localization key.
     * @example
     * const localized = this.app.i18n.getText('localization-key');
     * const localizedFrench = this.app.i18n.getText('localization-key', 'fr-FR');
     */
    getText(key: string, locale?: string): string;
    /**
     * Returns the pluralized translation for the specified key, number n and locale. If the locale
     * is not specified it will use the current locale.
     *
     * @param {string} key - The localization key.
     * @param {number} n - The number used to determine which plural form to use. E.g. For the
     * phrase "5 Apples" n equals 5.
     * @param {string} [locale] - The desired locale.
     * @returns {string} The translated text. If no translations are found at all for the locale
     * then it will return the en-US translation. If no translation exists for that key then it
     * will return the localization key.
     * @example
     * // manually replace {number} in the resulting translation with our number
     * const localized = this.app.i18n.getPluralText('{number} apples', number).replace("{number}", number);
     */
    getPluralText(key: string, n: number, locale?: string): string;
    /**
     * Adds localization data. If the locale and key for a translation already exists it will be
     * overwritten.
     *
     * @param {object} data - The localization data. See example for the expected format of the
     * data.
     * @example
     * this.app.i18n.addData({
     *     header: {
     *         version: 1
     *     },
     *     data: [{
     *         info: {
     *             locale: 'en-US'
     *         },
     *         messages: {
     *             "key": "translation",
     *             // The number of plural forms depends on the locale. See the manual for more information.
     *             "plural_key": ["one item", "more than one items"]
     *         }
     *     }, {
     *         info: {
     *             locale: 'fr-FR'
     *         },
     *         messages: {
     *             // ...
     *         }
     *     }]
     * });
     */
    addData(data: object): void;
    /**
     * Removes localization data.
     *
     * @param {object} data - The localization data. The data is expected to be in the same format
     * as {@link I18n#addData}.
     */
    removeData(data: object): void;
    /**
     * Frees up memory.
     */
    destroy(): void;
    _findFallbackLocale(locale: any, lang: any): any;
    _onAssetAdd(asset: any): void;
    _onAssetLoad(asset: any): void;
    _onAssetChange(asset: any): void;
    _onAssetRemove(asset: any): void;
    _onAssetUnload(asset: any): void;
}

/**
 * Holds mesh batching settings and a unique id. Created via {@link BatchManager#addGroup}.
 *
 * @category Graphics
 */
declare class BatchGroup {
    static MODEL: string;
    static ELEMENT: string;
    static SPRITE: string;
    static RENDER: string;
    /**
     * Create a new BatchGroup instance.
     *
     * @param {number} id - Unique id. Can be assigned to model, render and element components.
     * @param {string} name - The name of the group.
     * @param {boolean} dynamic - Whether objects within this batch group should support
     * transforming at runtime.
     * @param {number} maxAabbSize - Maximum size of any dimension of a bounding box around batched
     * objects. {@link BatchManager#prepare} will split objects into local groups based on this
     * size.
     * @param {number[]} [layers] - Layer ID array. Default is [{@link LAYERID_WORLD}]. The whole
     * batch group will belong to these layers. Layers of source models will be ignored.
     */
    constructor(id: number, name: string, dynamic: boolean, maxAabbSize: number, layers?: number[]);
    /** @private */
    private _ui;
    /** @private */
    private _sprite;
    /** @private */
    private _obj;
    /**
     * Unique id. Can be assigned to model, render and element components.
     *
     * @type {number}
     */
    id: number;
    /**
     * Name of the group.
     *
     * @type {string}
     */
    name: string;
    /**
     * Whether objects within this batch group should support transforming at runtime.
     *
     * @type {boolean}
     */
    dynamic: boolean;
    /**
     * Maximum size of any dimension of a bounding box around batched objects.
     * {@link BatchManager#prepare} will split objects into local groups based on this size.
     *
     * @type {number}
     */
    maxAabbSize: number;
    /**
     * Layer ID array. Default is [{@link LAYERID_WORLD}]. The whole batch group will belong to
     * these layers. Layers of source models will be ignored.
     *
     * @type {number[]}
     */
    layers: number[];
}

/**
 * @import { MeshInstance } from '../mesh-instance.js'
 * @import { Scene } from '../scene.js'
 */
/**
 * Holds information about batched mesh instances. Created in {@link BatchManager#create}.
 *
 * @category Graphics
 */
declare class Batch {
    /**
     * Create a new Batch instance.
     *
     * @param {MeshInstance[]} meshInstances - The mesh instances to be batched.
     * @param {boolean} dynamic - Whether this batch is dynamic (supports transforming mesh
     * instances at runtime).
     * @param {number} batchGroupId - Link this batch to a specific batch group. This is done
     * automatically with default batches.
     */
    constructor(meshInstances: MeshInstance[], dynamic: boolean, batchGroupId: number);
    /** @private */
    private _aabb;
    /**
     * An array of original mesh instances, from which this batch was generated.
     *
     * @type {MeshInstance[]}
     */
    origMeshInstances: MeshInstance[];
    /**
     * A single combined mesh instance, the result of batching.
     *
     * @type {MeshInstance}
     */
    meshInstance: MeshInstance;
    /**
     * Whether this batch is dynamic (supports transforming mesh instances at runtime).
     *
     * @type {boolean}
     */
    dynamic: boolean;
    /**
     * Link this batch to a specific batch group. This is done automatically with default batches.
     *
     * @type {number}
     */
    batchGroupId: number;
    /**
     * Removes the batch from the layers and destroys it.
     *
     * @param {Scene} scene - The scene.
     * @param {number[]} layers - The layers to remove the batch from.
     */
    destroy(scene: Scene, layers: number[]): void;
    addToLayers(scene: any, layers: any): void;
    removeFromLayers(scene: any, layers: any): void;
    updateBoundingBox(): void;
    /**
     * @deprecated
     * @ignore
     * @type {undefined}
     */
    get model(): undefined;
}

/**
 * Glues many mesh instances into a single one for better performance.
 *
 * @category Graphics
 */
declare class BatchManager {
    /**
     * Create a new BatchManager instance.
     *
     * @param {GraphicsDevice} device - The graphics device used by the batch manager.
     * @param {Entity} root - The entity under which batched models are added.
     * @param {Scene} scene - The scene that the batch manager affects.
     */
    constructor(device: GraphicsDevice, root: Entity, scene: Scene);
    device: GraphicsDevice;
    rootNode: Entity;
    scene: Scene;
    _init: boolean;
    _batchGroups: {};
    _batchGroupCounter: number;
    _batchList: any[];
    _dirtyGroups: any[];
    _stats: {
        createTime: number;
        updateLastFrameTime: number;
    };
    destroy(): void;
    /**
     * Adds new global batch group.
     *
     * @param {string} name - Custom name.
     * @param {boolean} dynamic - Is this batch group dynamic? Will these objects move/rotate/scale
     * after being batched?
     * @param {number} maxAabbSize - Maximum size of any dimension of a bounding box around batched
     * objects.
     * {@link BatchManager#prepare} will split objects into local groups based on this size.
     * @param {number} [id] - Optional custom unique id for the group (will be generated
     * automatically otherwise).
     * @param {number[]} [layers] - Optional layer ID array. Default is [{@link LAYERID_WORLD}].
     * The whole batch group will belong to these layers. Layers of source models will be ignored.
     * @returns {BatchGroup} Group object.
     */
    addGroup(name: string, dynamic: boolean, maxAabbSize: number, id?: number, layers?: number[]): BatchGroup;
    /**
     * Remove global batch group by id. Note, this traverses the entire scene graph and clears the
     * batch group id from all components.
     *
     * @param {number} id - Batch Group ID.
     */
    removeGroup(id: number): void;
    /**
     * Mark a specific batch group as dirty. Dirty groups are re-batched before the next frame is
     * rendered. Note, re-batching a group is a potentially expensive operation.
     *
     * @param {number} id - Batch Group ID to mark as dirty.
     */
    markGroupDirty(id: number): void;
    /**
     * Retrieves a {@link BatchGroup} object with a corresponding name, if it exists, or null
     * otherwise.
     *
     * @param {string} name - Name.
     * @returns {BatchGroup|null} The batch group matching the name or null if not found.
     */
    getGroupByName(name: string): BatchGroup | null;
    /**
     * Retrieves a {@link BatchGroup} object with a corresponding id, if it exists, or null
     * otherwise.
     *
     * @param {number} id - The batch group id.
     * @returns {BatchGroup|null} The batch group matching the id or null if not found.
     */
    getGroupById(id: number): BatchGroup | null;
    /**
     * Return a list of all {@link Batch} objects that belong to the Batch Group supplied.
     *
     * @param {number} batchGroupId - The id of the batch group.
     * @returns {Batch[]} A list of batches that are used to render the batch group.
     * @private
     */
    private getBatches;
    _removeModelsFromBatchGroup(node: any, id: any): void;
    insert(type: any, groupId: any, node: any): void;
    remove(type: any, groupId: any, node: any): void;
    /**
     * Filter out mesh instances that have skin or morph, as these are not supported by batching.
     * If any mesh instance has skin/morph, the entire set is excluded.
     *
     * @param {MeshInstance[]} meshInstances - The mesh instances to filter.
     * @param {string} nodeName - The node name for warning messages.
     * @returns {MeshInstance[]|null} The mesh instances if none have skin/morph, or null if any do.
     * @private
     */
    private _filterBatchableInstances;
    _extractRender(node: any, arr: any, group: any, groupMeshInstances: any): any;
    _extractModel(node: any, arr: any, group: any, groupMeshInstances: any): any;
    _extractElement(node: any, arr: any, group: any): void;
    _collectAndRemoveMeshInstances(groupMeshInstances: any, groupIds: any): void;
    /**
     * Destroys all batches and creates new based on scene models. Hides original models. Called by
     * engine automatically on app start, and if batchGroupIds on models are changed.
     *
     * @param {number[]} [groupIds] - Optional array of batch group IDs to update. Otherwise all
     * groups are updated.
     */
    generate(groupIds?: number[]): void;
    /**
     * Takes a list of mesh instances to be batched and sorts them into lists one for each draw
     * call. The input list will be split, if:
     *
     * - Mesh instances use different materials.
     * - Mesh instances have different parameters (e.g. lightmaps or static lights).
     * - Mesh instances have different shader defines (shadow receiving, being aligned to screen
     * space, etc).
     * - Too many vertices for a single batch (65535 is maximum).
     * - Too many instances for a single batch (hardware-dependent, expect 128 on low-end and 1024
     * on high-end).
     * - Bounding box of a batch is larger than maxAabbSize in any dimension.
     * - Mesh instances differ in shadow casting ({@link MeshInstance#castShadow}) or directional
     * shadow cascade mask ({@link MeshInstance#shadowCascadeMask}).
     *
     * @param {MeshInstance[]} meshInstances - Input list of mesh instances
     * @param {boolean} dynamic - Are we preparing for a dynamic batch? Instance count will matter
     * then (otherwise not).
     * @param {number} maxAabbSize - Maximum size of any dimension of a bounding box around batched
     * objects.
     * @param {boolean} translucent - Are we batching UI elements or sprites
     * This is useful to keep a balance between the number of draw calls and the number of drawn
     * triangles, because smaller batches can be hidden when not visible in camera.
     * @returns {MeshInstance[][]} An array of arrays of mesh instances, each valid to pass to
     * {@link BatchManager#create}.
     */
    prepare(meshInstances: MeshInstance[], dynamic: boolean, maxAabbSize: number, translucent: boolean): MeshInstance[][];
    collectBatchedMeshData(meshInstances: any, dynamic: any): {
        streams: {};
        batchNumVerts: number;
        batchNumIndices: number;
        material: any;
    };
    /**
     * Takes a mesh instance list that has been prepared by {@link BatchManager#prepare}, and
     * returns a {@link Batch} object. This method assumes that all mesh instances provided can be
     * rendered in a single draw call.
     *
     * @param {MeshInstance[]} meshInstances - Input list of mesh instances.
     * @param {boolean} dynamic - Is it a static or dynamic batch? Will objects be transformed
     * after batching?
     * @param {number} [batchGroupId] - Link this batch to a specific batch group. This is done
     * automatically with default batches.
     * @returns {Batch} The resulting batch object.
     */
    create(meshInstances: MeshInstance[], dynamic: boolean, batchGroupId?: number): Batch;
    vertexFormats: {};
    /**
     * Updates bounding boxes for all dynamic batches. Called automatically.
     *
     * @ignore
     */
    updateAll(): void;
    /**
     * Clones a batch. This method doesn't rebuild batch geometry, but only creates a new model and
     * batch objects, linked to different source mesh instances.
     *
     * @param {Batch} batch - A batch object.
     * @param {MeshInstance[]} clonedMeshInstances - New mesh instances.
     * @returns {Batch} New batch object.
     */
    clone(batch: Batch, clonedMeshInstances: MeshInstance[]): Batch;
    /**
     * Removes the batch model from all layers and destroys it.
     *
     * @param {Batch} batch - A batch object.
     * @private
     */
    private destroyBatch;
}

/**
 * @import { BatchManager } from '../scene/batching/batch-manager.js'
 * @import { ComponentSystem } from './components/system.js'
 * @import { ElementInput } from './input/element-input.js'
 * @import { GamePads } from '../platform/input/game-pads.js'
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 * @import { Keyboard } from '../platform/input/keyboard.js'
 * @import { Lightmapper } from './lightmapper/lightmapper.js'
 * @import { Mouse } from '../platform/input/mouse.js'
 * @import { ResourceHandler } from './handlers/handler.js'
 * @import { SoundManager } from '../platform/sound/manager.js'
 * @import { TouchDevice } from '../platform/input/touch-device.js'
 * @import { XrManager } from './xr/xr-manager.js'
 */
/**
 * AppOptions holds configuration settings utilized in the creation of an {@link AppBase} instance.
 * It allows functionality to be included or excluded from the AppBase instance.
 */
declare class AppOptions {
    /**
     * Input handler for {@link ElementComponent}s.
     *
     * @type {ElementInput}
     */
    elementInput: ElementInput;
    /**
     * Keyboard handler for input.
     *
     * @type {Keyboard}
     */
    keyboard: Keyboard;
    /**
     * Mouse handler for input.
     *
     * @type {Mouse}
     */
    mouse: Mouse;
    /**
     * TouchDevice handler for input.
     *
     * @type {TouchDevice}
     */
    touch: TouchDevice;
    /**
     * Gamepad handler for input.
     *
     * @type {GamePads}
     */
    gamepads: GamePads;
    /**
     * Prefix to apply to script urls before loading.
     *
     * @type {string}
     */
    scriptPrefix: string;
    /**
     * Prefix to apply to asset urls before loading.
     *
     * @type {string}
     */
    assetPrefix: string;
    /**
     * Scripts in order of loading first.
     *
     * @type {string[]}
     */
    scriptsOrder: string[];
    /**
     * The sound manager
     *
     * @type {SoundManager}
     */
    soundManager: SoundManager;
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    graphicsDevice: GraphicsDevice;
    /**
     * The lightmapper.
     *
     * @type {typeof Lightmapper}
     */
    lightmapper: typeof Lightmapper;
    /**
     * The BatchManager.
     *
     * @type {typeof BatchManager}
     */
    batchManager: typeof BatchManager;
    /**
     * The XrManager.
     *
     * @type {typeof XrManager}
     */
    xr: typeof XrManager;
    /**
     * The component systems the app requires.
     *
     * @type {typeof ComponentSystem[]}
     */
    componentSystems: (typeof ComponentSystem)[];
    /**
     * The resource handlers the app requires.
     *
     * @type {typeof ResourceHandler[]}
     */
    resourceHandlers: (typeof ResourceHandler)[];
}

/**
 * Callback used by {@link AppBase#configure} when configuration file is loaded and parsed (or an
 * error occurs).
 */
type ConfigureAppCallback = (err: string | null) => void;
/**
 * Callback used by {@link AppBase#preload} when all assets (marked as 'preload') are loaded.
 */
type PreloadAppCallback = () => void;
/**
 * Callback used by {@link AppBase#start} and itself to request the rendering of a new animation
 * frame.
 */
type MakeTickCallback = (timestamp?: number, frame?: XRFrame) => void;
/**
 * @import { AppOptions } from './app-options.js'
 * @import { BatchManager } from '../scene/batching/batch-manager.js'
 * @import { ElementInput } from './input/element-input.js'
 * @import { GamePads } from '../platform/input/game-pads.js'
 * @import { GraphicsDevice } from '../platform/graphics/graphics-device.js'
 * @import { Keyboard } from '../platform/input/keyboard.js'
 * @import { Lightmapper } from './lightmapper/lightmapper.js'
 * @import { Material } from '../scene/materials/material.js'
 * @import { MeshInstance } from '../scene/mesh-instance.js'
 * @import { Mesh } from '../scene/mesh.js'
 * @import { Mouse } from '../platform/input/mouse.js'
 * @import { SoundManager } from '../platform/sound/manager.js'
 * @import { Texture } from '../platform/graphics/texture.js'
 * @import { TouchDevice } from '../platform/input/touch-device.js'
 * @import { XrManager } from './xr/xr-manager.js'
 */
/**
 * @callback ConfigureAppCallback
 * Callback used by {@link AppBase#configure} when configuration file is loaded and parsed (or an
 * error occurs).
 * @param {string|null} err - The error message in the case where the loading or parsing fails.
 * @returns {void}
 */
/**
 * @callback PreloadAppCallback
 * Callback used by {@link AppBase#preload} when all assets (marked as 'preload') are loaded.
 * @returns {void}
 */
/**
 * @callback MakeTickCallback
 * Callback used by {@link AppBase#start} and itself to request the rendering of a new animation
 * frame.
 * @param {number} [timestamp] - The timestamp supplied by requestAnimationFrame.
 * @param {XRFrame} [frame] - XRFrame from requestAnimationFrame callback.
 * @returns {void}
 */
/**
 * Gets the current application, if any.
 *
 * @type {AppBase|null}
 * @ignore
 */
declare let app: AppBase | null;
/**
 * AppBase represents the base functionality for all PlayCanvas applications. It is responsible for
 * initializing and managing the application lifecycle. It coordinates core engine systems such
 * as:
 *
 * - The graphics device - see {@link GraphicsDevice}.
 * - The asset registry - see {@link AssetRegistry}.
 * - The component system registry - see {@link ComponentSystemRegistry}.
 * - The scene - see {@link Scene}.
 * - Input devices - see {@link Keyboard}, {@link Mouse}, {@link TouchDevice}, and {@link GamePads}.
 * - The main update/render loop.
 *
 * Using AppBase directly requires you to register {@link ComponentSystem}s and
 * {@link ResourceHandler}s yourself. This facilitates
 * [tree-shaking](https://developer.mozilla.org/en-US/docs/Glossary/Tree_shaking) when bundling
 * your application.
 */
declare class AppBase extends EventHandler {
    static _applications: {};
    /**
     * Get the current application. In the case where there are multiple running applications, the
     * function can get an application based on a supplied canvas id. This function is particularly
     * useful when the current Application is not readily available. For example, in the JavaScript
     * console of the browser's developer tools.
     *
     * @param {string} [id] - If defined, the returned application should use the canvas which has
     * this id. Otherwise current application will be returned.
     * @returns {AppBase|undefined} The running application, if any.
     * @example
     * const app = pc.AppBase.getApplication();
     */
    static getApplication(id?: string): AppBase | undefined;
    static cancelTick(app: any): void;
    /**
     * Create a new AppBase instance.
     *
     * @param {HTMLCanvasElement | OffscreenCanvas} canvas - The canvas element.
     * @example
     * const app = new pc.AppBase(canvas);
     *
     * const options = new AppOptions();
     * app.init(options);
     *
     * // Start the application's main loop
     * app.start();
     */
    constructor(canvas: HTMLCanvasElement | OffscreenCanvas);
    /**
     * The application's batch manager.
     *
     * @type {BatchManager|null}
     * @private
     */
    private _batcher;
    /** @private */
    private _destroyRequested;
    /** @private */
    private _inFrameUpdate;
    /** @private */
    private _librariesLoaded;
    /** @private */
    private _fillMode;
    /** @private */
    private _resolutionMode;
    /** @private */
    private _allowResize;
    /**
     * @type {Asset|null}
     * @private
     */
    private _skyboxAsset;
    /**
     * @type {SoundManager}
     * @private
     */
    private _soundManager;
    /** @private */
    private _visibilityChangeHandler;
    /**
     * Stores all entities that have been created for this app by guid.
     *
     * @type {Object<string, Entity>}
     * @ignore
     */
    _entityIndex: {
        [x: string]: Entity;
    };
    /**
     * @type {boolean}
     * @ignore
     */
    _inTools: boolean;
    /**
     * @type {string}
     * @ignore
     */
    _scriptPrefix: string;
    /** @ignore */
    _time: number;
    /**
     * Set this to false if you want to run without using bundles. We set it to true only if
     * TextDecoder is available because we currently rely on it for untarring.
     *
     * @type {boolean}
     * @ignore
     */
    enableBundles: boolean;
    /**
     * A request id returned by requestAnimationFrame, allowing us to cancel it.
     *
     * @ignore
     */
    frameRequestId: any;
    /**
     * Scales the global time delta. Defaults to 1.
     *
     * @type {number}
     * @example
     * // Set the app to run at half speed
     * this.app.timeScale = 0.5;
     */
    timeScale: number;
    /**
     * Clamps per-frame delta time to an upper bound. Useful since returning from a tab
     * deactivation can generate huge values for dt, which can adversely affect game state.
     * Defaults to 0.1 (seconds).
     *
     * @type {number}
     * @example
     * // Don't clamp inter-frame times of 200ms or less
     * this.app.maxDeltaTime = 0.2;
     */
    maxDeltaTime: number;
    /**
     * The total number of frames the application has updated since start() was called.
     *
     * @type {number}
     * @ignore
     */
    frame: number;
    /**
     * The frame graph.
     *
     * @type {FrameGraph}
     * @ignore
     */
    frameGraph: FrameGraph;
    /**
     * The forward renderer.
     *
     * @type {ForwardRenderer}
     * @ignore
     */
    renderer: ForwardRenderer;
    /**
     * Scripts in order of loading first.
     *
     * @type {string[]}
     */
    scriptsOrder: string[];
    /**
     * The application's performance stats.
     *
     * @type {ApplicationStats}
     * @ignore
     */
    stats: ApplicationStats;
    /**
     * When true, the application's render function is called every frame. Setting autoRender to
     * false is useful to applications where the rendered image may often be unchanged over time.
     * This can heavily reduce the application's load on the CPU and GPU. Defaults to true.
     *
     * @type {boolean}
     * @example
     * // Disable rendering every frame and only render on a keydown event
     * this.app.autoRender = false;
     * this.app.keyboard.on('keydown', (event) => {
     *     this.app.renderNextFrame = true;
     * });
     */
    autoRender: boolean;
    /**
     * Set to true to render the scene on the next iteration of the main loop. This only has an
     * effect if {@link autoRender} is set to false. The value of renderNextFrame is set back to
     * false again as soon as the scene has been rendered.
     *
     * @type {boolean}
     * @example
     * // Render the scene only while space key is pressed
     * if (this.app.keyboard.isPressed(pc.KEY_SPACE)) {
     *     this.app.renderNextFrame = true;
     * }
     */
    renderNextFrame: boolean;
    /**
     * The graphics device used by the application.
     *
     * @type {GraphicsDevice}
     */
    graphicsDevice: GraphicsDevice;
    /**
     * The root entity of the application.
     *
     * @type {Entity}
     * @example
     * // Return the first entity called 'Camera' in a depth-first search of the scene hierarchy
     * const camera = this.app.root.findByName('Camera');
     */
    root: Entity;
    /**
     * The scene managed by the application.
     *
     * @type {Scene}
     * @example
     * // Set the fog type property of the application's scene
     * this.app.scene.fog.type = pc.FOG_LINEAR;
     */
    scene: Scene;
    /**
     * The run-time lightmapper.
     *
     * @type {Lightmapper|null}
     */
    lightmapper: Lightmapper | null;
    /**
     * The resource loader.
     *
     * @type {ResourceLoader}
     */
    loader: ResourceLoader;
    /**
     * The asset registry managed by the application.
     *
     * @type {AssetRegistry}
     * @example
     * // Search the asset registry for all assets with the tag 'vehicle'
     * const vehicleAssets = this.app.assets.findByTag('vehicle');
     */
    assets: AssetRegistry;
    /**
     * The bundle registry managed by the application.
     *
     * @type {BundleRegistry}
     * @ignore
     */
    bundles: BundleRegistry;
    /**
     * The scene registry managed by the application.
     *
     * @type {SceneRegistry}
     * @example
     * // Search the scene registry for a item with the name 'racetrack1'
     * const sceneItem = this.app.scenes.find('racetrack1');
     *
     * // Load the scene using the item's url
     * this.app.scenes.loadScene(sceneItem.url);
     */
    scenes: SceneRegistry;
    /**
     * The application's script registry.
     *
     * @type {ScriptRegistry}
     */
    scripts: ScriptRegistry;
    /**
     * The application's component system registry.
     *
     * @type {ComponentSystemRegistry}
     * @example
     * // Set global gravity to zero
     * this.app.systems.rigidbody.gravity.set(0, 0, 0);
     * @example
     * // Set the global sound volume to 50%
     * this.app.systems.sound.volume = 0.5;
     */
    systems: ComponentSystemRegistry;
    /**
     * Handles localization.
     *
     * @type {I18n}
     */
    i18n: I18n;
    /**
     * The keyboard device.
     *
     * @type {Keyboard|null}
     */
    keyboard: Keyboard | null;
    /**
     * The mouse device.
     *
     * @type {Mouse|null}
     */
    mouse: Mouse | null;
    /**
     * Used to get touch events input.
     *
     * @type {TouchDevice|null}
     */
    touch: TouchDevice | null;
    /**
     * Used to access GamePad input.
     *
     * @type {GamePads|null}
     */
    gamepads: GamePads | null;
    /**
     * Used to handle input for {@link ElementComponent}s.
     *
     * @type {ElementInput|null}
     */
    elementInput: ElementInput | null;
    /**
     * The XR Manager that provides ability to start VR/AR sessions.
     *
     * @type {XrManager|null}
     * @example
     * // check if VR is available
     * if (app.xr.isAvailable(pc.XRTYPE_VR)) {
     *     // VR is available
     * }
     */
    xr: XrManager | null;
    /**
     * Initialize the app.
     *
     * @param {AppOptions} appOptions - Options specifying the init parameters for the app.
     */
    init(appOptions: AppOptions): void;
    defaultLayerWorld: Layer;
    defaultLayerDepth: Layer;
    defaultLayerSkybox: Layer;
    defaultLayerUi: Layer;
    defaultLayerImmediate: Layer;
    _hiddenAttr: string;
    tick: MakeTickCallback;
    /** @private */
    private _initDefaultMaterial;
    /** @private */
    private _initProgramLibrary;
    /**
     * @type {SoundManager}
     * @ignore
     */
    get soundManager(): SoundManager;
    /**
     * The application's batch manager. The batch manager is used to merge mesh instances in
     * the scene, which reduces the overall number of draw calls, thereby boosting performance.
     *
     * @type {BatchManager}
     */
    get batcher(): BatchManager;
    /**
     * The current fill mode of the canvas. Can be:
     *
     * - {@link FILLMODE_NONE}: the canvas will always match the size provided.
     * - {@link FILLMODE_FILL_WINDOW}: the canvas will simply fill the window, changing aspect ratio.
     * - {@link FILLMODE_KEEP_ASPECT}: the canvas will grow to fill the window as best it can while
     * maintaining the aspect ratio.
     *
     * @type {string}
     */
    get fillMode(): string;
    /**
     * The current resolution mode of the canvas, Can be:
     *
     * - {@link RESOLUTION_AUTO}: if width and height are not provided, canvas will be resized to
     * match canvas client size.
     * - {@link RESOLUTION_FIXED}: resolution of canvas will be fixed.
     *
     * @type {string}
     */
    get resolutionMode(): string;
    /**
     * Load the application configuration file and apply application properties and fill the asset
     * registry.
     *
     * @param {string} url - The URL of the configuration file to load.
     * @param {ConfigureAppCallback} callback - The Function called when the configuration file is
     * loaded and parsed (or an error occurs).
     */
    configure(url: string, callback: ConfigureAppCallback): void;
    /**
     * Load all assets in the asset registry that are marked as 'preload'.
     *
     * @param {PreloadAppCallback} callback - Function called when all assets are loaded.
     */
    preload(callback: PreloadAppCallback): void;
    _preloadScripts(sceneData: any, callback: any): void;
    _parseApplicationProperties(props: any, callback: any): void;
    _width: any;
    _height: any;
    /**
     * @param {string[]} urls - List of URLs to load.
     * @param {Function} callback - Callback function.
     * @private
     */
    private _loadLibraries;
    /**
     * Insert scene name/urls into the registry.
     *
     * @param {*} scenes - Scenes to add to the scene registry.
     * @private
     */
    private _parseScenes;
    /**
     * Insert assets into registry.
     *
     * @param {*} assets - Assets to insert.
     * @private
     */
    private _parseAssets;
    /**
     * Start the application. This function does the following:
     *
     * 1. Fires an event on the application named 'start'
     * 2. Calls initialize for all components on entities in the hierarchy
     * 3. Fires an event on the application named 'initialize'
     * 4. Calls postInitialize for all components on entities in the hierarchy
     * 5. Fires an event on the application named 'postinitialize'
     * 6. Starts executing the main loop of the application
     *
     * This function is called internally by PlayCanvas applications made in the Editor but you
     * will need to call start yourself if you are using the engine stand-alone.
     *
     * @example
     * app.start();
     */
    start(): void;
    _alreadyStarted: boolean;
    /**
     * Request the next animation frame tick.
     *
     * @ignore
     */
    requestAnimationFrame(): void;
    /**
     * Update all input devices managed by the application.
     *
     * @param {number} dt - The time in seconds since the last update.
     * @private
     */
    private inputUpdate;
    /**
     * Update the application. This function will call the update functions and then the postUpdate
     * functions of all enabled components. It will then update the current state of all connected
     * input devices. This function is called internally in the application's main loop and does
     * not need to be called explicitly.
     *
     * @param {number} dt - The time delta in seconds since the last frame.
     */
    update(dt: number): void;
    /**
     * Render the application's scene. More specifically, the scene's {@link LayerComposition} is
     * rendered. This function is called internally in the application's main loop and does not
     * need to be called explicitly.
     *
     * @ignore
     */
    render(): void;
    renderComposition(layerComposition: any): void;
    /**
     * @param {number} now - The timestamp passed to the requestAnimationFrame callback.
     * @param {number} dt - The time delta in seconds since the last frame. This is subject to the
     * application's time scale and max delta values.
     * @param {number} ms - The time in milliseconds since the last frame.
     * @private
     */
    private _fillFrameStatsBasic;
    /** @private */
    private _fillFrameStats;
    /**
     * Controls how the canvas fills the window and resizes when the window changes.
     *
     * @param {string} mode - The mode to use when setting the size of the canvas. Can be:
     *
     * - {@link FILLMODE_NONE}: the canvas will always match the size provided.
     * - {@link FILLMODE_FILL_WINDOW}: the canvas will simply fill the window, changing aspect ratio.
     * - {@link FILLMODE_KEEP_ASPECT}: the canvas will grow to fill the window as best it can while
     * maintaining the aspect ratio.
     *
     * @param {number} [width] - The width of the canvas (only used when mode is {@link FILLMODE_NONE}).
     * @param {number} [height] - The height of the canvas (only used when mode is {@link FILLMODE_NONE}).
     */
    setCanvasFillMode(mode: string, width?: number, height?: number): void;
    /**
     * Change the resolution of the canvas, and set the way it behaves when the window is resized.
     *
     * @param {string} mode - The mode to use when setting the resolution. Can be:
     *
     * - {@link RESOLUTION_AUTO}: if width and height are not provided, canvas will be resized to
     * match canvas client size.
     * - {@link RESOLUTION_FIXED}: resolution of canvas will be fixed.
     *
     * @param {number} [width] - The horizontal resolution, optional in AUTO mode, if not provided
     * canvas clientWidth is used.
     * @param {number} [height] - The vertical resolution, optional in AUTO mode, if not provided
     * canvas clientHeight is used.
     */
    setCanvasResolution(mode: string, width?: number, height?: number): void;
    /**
     * Queries the visibility of the window or tab in which the application is running.
     *
     * @returns {boolean} True if the application is not visible and false otherwise.
     */
    isHidden(): boolean;
    /**
     * Called when the visibility state of the current tab/window changes.
     *
     * @private
     */
    private onVisibilityChange;
    /**
     * Resize the application's canvas element in line with the current fill mode.
     *
     * - In {@link FILLMODE_KEEP_ASPECT} mode, the canvas will grow to fill the window as best it
     * can while maintaining the aspect ratio.
     * - In {@link FILLMODE_FILL_WINDOW} mode, the canvas will simply fill the window, changing
     * aspect ratio.
     * - In {@link FILLMODE_NONE} mode, the canvas will always match the size provided.
     *
     * @param {number} [width] - The width of the canvas. Only used if current fill mode is {@link FILLMODE_NONE}.
     * @param {number} [height] - The height of the canvas. Only used if current fill mode is {@link FILLMODE_NONE}.
     * @returns {object} A object containing the values calculated to use as width and height.
     */
    resizeCanvas(width?: number, height?: number): object;
    /**
     * Updates the {@link GraphicsDevice} canvas size to match the canvas size on the document
     * page. It is recommended to call this function when the canvas size changes (e.g on window
     * resize and orientation change events) so that the canvas resolution is immediately updated.
     */
    updateCanvasSize(): void;
    /**
     * Event handler called when all code libraries have been loaded. Code libraries are passed
     * into the constructor of the Application and the application won't start running or load
     * packs until all libraries have been loaded.
     *
     * @private
     */
    private onLibrariesLoaded;
    /**
     * Apply scene settings to the current scene. Useful when your scene settings are parsed or
     * generated from a non-URL source.
     *
     * @param {object} settings - The scene settings to be applied.
     * @param {object} settings.physics - The physics settings to be applied.
     * @param {number[]} settings.physics.gravity - The world space vector representing global
     * gravity in the physics simulation. Must be a fixed size array with three number elements,
     * corresponding to each axis [ X, Y, Z ].
     * @param {object} settings.render - The rendering settings to be applied.
     * @param {number[]} settings.render.global_ambient - The color of the scene's ambient light.
     * Must be a fixed size array with three number elements, corresponding to each color channel
     * [ R, G, B ].
     * @param {string} settings.render.fog - The type of fog used by the scene. Can be:
     *
     * - {@link FOG_NONE}
     * - {@link FOG_LINEAR}
     * - {@link FOG_EXP}
     * - {@link FOG_EXP2}
     *
     * @param {number[]} settings.render.fog_color - The color of the fog (if enabled). Must be a
     * fixed size array with three number elements, corresponding to each color channel [ R, G, B ].
     * @param {number} settings.render.fog_density - The density of the fog (if enabled). This
     * property is only valid if the fog property is set to {@link FOG_EXP} or {@link FOG_EXP2}.
     * @param {number} settings.render.fog_start - The distance from the viewpoint where linear fog
     * begins. This property is only valid if the fog property is set to {@link FOG_LINEAR}.
     * @param {number} settings.render.fog_end - The distance from the viewpoint where linear fog
     * reaches its maximum. This property is only valid if the fog property is set to {@link FOG_LINEAR}.
     * @param {number} settings.render.gamma_correction - The gamma correction to apply when
     * rendering the scene. Can be:
     *
     * - {@link GAMMA_NONE}
     * - {@link GAMMA_SRGB}
     *
     * @param {number} settings.render.tonemapping - The tonemapping transform to apply when
     * writing fragments to the frame buffer. Can be:
     *
     * - {@link TONEMAP_LINEAR}
     * - {@link TONEMAP_FILMIC}
     * - {@link TONEMAP_HEJL}
     * - {@link TONEMAP_ACES}
     * - {@link TONEMAP_ACES2}
     * - {@link TONEMAP_NEUTRAL}
     *
     * @param {number} settings.render.exposure - The exposure value tweaks the overall brightness
     * of the scene.
     * @param {number|null} [settings.render.skybox] - The asset ID of the cube map texture to be
     * used as the scene's skybox. Defaults to null.
     * @param {number} [settings.render.skyboxIntensity] - Multiplier for skybox intensity. Defaults to 1.
     * @param {number} [settings.render.skyboxLuminance] - Lux (lm/m^2) value for skybox intensity when physical light units are enabled. Defaults to 20000.
     * @param {number} [settings.render.skyboxMip] - The mip level of the skybox to be displayed. Defaults to 0.
     * Only valid for prefiltered cubemap skyboxes.
     * @param {number[]} [settings.render.skyboxRotation] - Rotation of skybox. Defaults to [0, 0, 0].
     *
     * @param {string} [settings.render.skyType] - The type of the sky. One of the SKYTYPE_* constants. Defaults to {@link SKYTYPE_INFINITE}.
     * @param {number[]} [settings.render.skyMeshPosition] - The position of sky mesh. Ignored for {@link SKYTYPE_INFINITE}. Defaults to [0, 0, 0].
     * @param {number[]} [settings.render.skyMeshRotation] - The rotation of sky mesh. Ignored for {@link SKYTYPE_INFINITE}. Defaults to [0, 0, 0].
     * @param {number[]} [settings.render.skyMeshScale] - The scale of sky mesh. Ignored for {@link SKYTYPE_INFINITE}. Defaults to [1, 1, 1].
     * @param {number[]} [settings.render.skyCenter] - The center of the sky. Ignored for {@link SKYTYPE_INFINITE}. Defaults to [0, 1, 0].
     *
     * @param {number} settings.render.lightmapSizeMultiplier - The lightmap resolution multiplier.
     * @param {number} settings.render.lightmapMaxResolution - The maximum lightmap resolution.
     * @param {number} settings.render.lightmapMode - The lightmap baking mode. Can be:
     *
     * - {@link BAKE_COLOR}: single color lightmap
     * - {@link BAKE_COLORDIR}: single color lightmap + dominant light direction (used for bump/specular)
     *
     * @param {boolean} [settings.render.lightmapFilterEnabled] - Enables bilateral filter on runtime baked color lightmaps. Defaults to false.
     * @param {number} [settings.render.lightmapFilterRange] - Sets the range parameter of the bilateral filter. Defaults to 10.
     * @param {number} [settings.render.lightmapFilterSmoothness] - Sets the spatial parameter of the bilateral filter. Defaults to 0.2.
     *
     * @param {boolean} [settings.render.ambientBake] - Enable baking ambient light into lightmaps. Defaults to false.
     * @param {number} [settings.render.ambientBakeNumSamples] - Number of samples to use when baking ambient light. Defaults to 1.
     * @param {number} [settings.render.ambientBakeSpherePart] - How much of the sphere to include when baking ambient light. Defaults to 0.4.
     * @param {number} [settings.render.ambientBakeOcclusionBrightness] - Brightness of the baked ambient occlusion. Defaults to 0.
     * @param {number} [settings.render.ambientBakeOcclusionContrast] - Contrast of the baked ambient occlusion. Defaults to 0.
     * @param {number} settings.render.ambientLuminance - Lux (lm/m^2) value for ambient light intensity.
     *
     * @param {boolean} [settings.render.clusteredLightingEnabled] - Enable clustered lighting. Defaults to false.
     * @param {boolean} [settings.render.lightingShadowsEnabled] - If set to true, the clustered lighting will support shadows. Defaults to true.
     * @param {boolean} [settings.render.lightingCookiesEnabled] - If set to true, the clustered lighting will support cookie textures. Defaults to false.
     * @param {boolean} [settings.render.lightingAreaLightsEnabled] - If set to true, the clustered lighting will support area lights. Defaults to false.
     * @param {number} [settings.render.lightingShadowAtlasResolution] - Resolution of the atlas texture storing all non-directional shadow textures. Defaults to 2048.
     * @param {number} [settings.render.lightingCookieAtlasResolution] - Resolution of the atlas texture storing all non-directional cookie textures. Defaults to 2048.
     * @param {number} [settings.render.lightingMaxLightsPerCell] - Maximum number of lights a cell can store. Defaults to 255.
     * @param {number} [settings.render.lightingShadowType] - The type of shadow filtering used by all shadows. Can be:
     *
     * - {@link SHADOW_PCF1_32F}
     * - {@link SHADOW_PCF3_32F}
     * - {@link SHADOW_PCF5_32F}
     * - {@link SHADOW_PCF1_16F}
     * - {@link SHADOW_PCF3_16F}
     * - {@link SHADOW_PCF5_16F}
     *
     * Defaults to {@link SHADOW_PCF3_32F}.
     * @param {number[]} [settings.render.lightingCells] - Number of cells along each world space axis the space containing lights
     * is subdivided into. Defaults to [10, 3, 10].
     *
     * Only lights with bakeDir=true will be used for generating the dominant light direction.
     * @example
     *
     * const settings = {
     *     physics: {
     *         gravity: [0, -9.8, 0]
     *     },
     *     render: {
     *         fog_end: 1000,
     *         tonemapping: 0,
     *         skybox: null,
     *         fog_density: 0.01,
     *         gamma_correction: 1,
     *         exposure: 1,
     *         fog_start: 1,
     *         global_ambient: [0, 0, 0],
     *         skyboxIntensity: 1,
     *         skyboxRotation: [0, 0, 0],
     *         fog_color: [0, 0, 0],
     *         lightmapMode: 1,
     *         fog: 'none',
     *         lightmapMaxResolution: 2048,
     *         skyboxMip: 2,
     *         lightmapSizeMultiplier: 16
     *     }
     * };
     * app.applySceneSettings(settings);
     */
    applySceneSettings(settings: {
        physics: {
            gravity: number[];
        };
        render: {
            global_ambient: number[];
            fog: string;
            fog_color: number[];
            fog_density: number;
            fog_start: number;
            fog_end: number;
            gamma_correction: number;
            tonemapping: number;
            exposure: number;
            skybox?: number | null;
            skyboxIntensity?: number;
            skyboxLuminance?: number;
            skyboxMip?: number;
            skyboxRotation?: number[];
            skyType?: string;
            skyMeshPosition?: number[];
            skyMeshRotation?: number[];
            skyMeshScale?: number[];
            skyCenter?: number[];
            lightmapSizeMultiplier: number;
            lightmapMaxResolution: number;
            lightmapMode: number;
            lightmapFilterEnabled?: boolean;
            lightmapFilterRange?: number;
            lightmapFilterSmoothness?: number;
            ambientBake?: boolean;
            ambientBakeNumSamples?: number;
            ambientBakeSpherePart?: number;
            ambientBakeOcclusionBrightness?: number;
            ambientBakeOcclusionContrast?: number;
            ambientLuminance: number;
            clusteredLightingEnabled?: boolean;
            lightingShadowsEnabled?: boolean;
            lightingCookiesEnabled?: boolean;
            lightingAreaLightsEnabled?: boolean;
            lightingShadowAtlasResolution?: number;
            lightingCookieAtlasResolution?: number;
            lightingMaxLightsPerCell?: number;
            lightingShadowType?: number;
            lightingCells?: number[];
        };
    }): void;
    /**
     * Sets the area light LUT tables for this app.
     *
     * @param {number[]} ltcMat1 - LUT table of type `array` to be set.
     * @param {number[]} ltcMat2 - LUT table of type `array` to be set.
     */
    setAreaLightLuts(ltcMat1: number[], ltcMat2: number[]): void;
    /**
     * Sets the skybox asset to current scene, and subscribes to asset load/change events.
     *
     * @param {Asset} asset - Asset of type `skybox` to be set to, or null to remove skybox.
     */
    setSkybox(asset: Asset): void;
    /** @private */
    private _firstBake;
    /** @private */
    private _firstBatch;
    /**
     * Provide an opportunity to modify the timestamp supplied by requestAnimationFrame.
     *
     * @param {number} [timestamp] - The timestamp supplied by requestAnimationFrame.
     * @returns {number|undefined} The modified timestamp.
     * @ignore
     */
    _processTimestamp(timestamp?: number): number | undefined;
    /**
     * Draws a single line. Line start and end coordinates are specified in world space. The line
     * will be flat-shaded with the specified color.
     *
     * @param {Vec3} start - The start world space coordinate of the line.
     * @param {Vec3} end - The end world space coordinate of the line.
     * @param {Color} [color] - The color of the line. It defaults to white if not specified.
     * @param {boolean} [depthTest] - Specifies if the line is depth tested against the depth
     * buffer. Defaults to true.
     * @param {Layer} [layer] - The layer to render the line into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @example
     * // Render a 1-unit long white line
     * const start = new pc.Vec3(0, 0, 0);
     * const end = new pc.Vec3(1, 0, 0);
     * app.drawLine(start, end);
     * @example
     * // Render a 1-unit long red line which is not depth tested and renders on top of other geometry
     * const start = new pc.Vec3(0, 0, 0);
     * const end = new pc.Vec3(1, 0, 0);
     * app.drawLine(start, end, pc.Color.RED, false);
     * @example
     * // Render a 1-unit long white line into the world layer
     * const start = new pc.Vec3(0, 0, 0);
     * const end = new pc.Vec3(1, 0, 0);
     * const worldLayer = app.scene.layers.getLayerById(pc.LAYERID_WORLD);
     * app.drawLine(start, end, pc.Color.WHITE, true, worldLayer);
     */
    drawLine(start: Vec3, end: Vec3, color?: Color, depthTest?: boolean, layer?: Layer): void;
    /**
     * Renders an arbitrary number of discrete line segments. The lines are not connected by each
     * subsequent point in the array. Instead, they are individual segments specified by two
     * points. Therefore, the lengths of the supplied position and color arrays must be the same
     * and also must be a multiple of 2. The colors of the ends of each line segment will be
     * interpolated along the length of each line.
     *
     * @param {Vec3[]} positions - An array of points to draw lines between. The length of the
     * array must be a multiple of 2.
     * @param {Color[] | Color} colors - An array of colors or a single color. If an array is
     * specified, this must be the same length as the position array. The length of the array
     * must also be a multiple of 2.
     * @param {boolean} [depthTest] - Specifies if the lines are depth tested against the depth
     * buffer. Defaults to true.
     * @param {Layer} [layer] - The layer to render the lines into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @example
     * // Render a single line, with unique colors for each point
     * const start = new pc.Vec3(0, 0, 0);
     * const end = new pc.Vec3(1, 0, 0);
     * app.drawLines([start, end], [pc.Color.RED, pc.Color.WHITE]);
     * @example
     * // Render 2 discrete line segments
     * const points = [
     *     // Line 1
     *     new pc.Vec3(0, 0, 0),
     *     new pc.Vec3(1, 0, 0),
     *     // Line 2
     *     new pc.Vec3(1, 1, 0),
     *     new pc.Vec3(1, 1, 1)
     * ];
     * const colors = [
     *     // Line 1
     *     pc.Color.RED,
     *     pc.Color.YELLOW,
     *     // Line 2
     *     pc.Color.CYAN,
     *     pc.Color.BLUE
     * ];
     * app.drawLines(points, colors);
     */
    drawLines(positions: Vec3[], colors: Color[] | Color, depthTest?: boolean, layer?: Layer): void;
    /**
     * Renders an arbitrary number of discrete line segments. The lines are not connected by each
     * subsequent point in the array. Instead, they are individual segments specified by two
     * points.
     *
     * @param {number[]} positions - An array of points to draw lines between. Each point is
     * represented by 3 numbers - x, y and z coordinate.
     * @param {number[]|Color} colors - A single color for all lines, or an array of colors to color
     * the lines. If an array is specified, number of colors it stores must match the number of
     * positions provided.
     * @param {boolean} [depthTest] - Specifies if the lines are depth tested against the depth
     * buffer. Defaults to true.
     * @param {Layer} [layer] - The layer to render the lines into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @example
     * // Render 2 discrete line segments
     * const points = [
     *     // Line 1
     *     0, 0, 0,
     *     1, 0, 0,
     *     // Line 2
     *     1, 1, 0,
     *     1, 1, 1
     * ];
     * const colors = [
     *     // Line 1
     *     1, 0, 0, 1,  // red
     *     0, 1, 0, 1,  // green
     *     // Line 2
     *     0, 0, 1, 1,  // blue
     *     1, 1, 1, 1   // white
     * ];
     * app.drawLineArrays(points, colors);
     */
    drawLineArrays(positions: number[], colors: number[] | Color, depthTest?: boolean, layer?: Layer): void;
    /**
     * Draws a wireframe sphere with center, radius and color.
     *
     * @param {Vec3} center - The center of the sphere.
     * @param {number} radius - The radius of the sphere.
     * @param {Color} [color] - The color of the sphere. It defaults to white if not specified.
     * @param {number} [segments] - Number of line segments used to render the circles forming the
     * sphere. Defaults to 20.
     * @param {boolean} [depthTest] - Specifies if the sphere lines are depth tested against the
     * depth buffer. Defaults to true.
     * @param {Layer} [layer] - The layer to render the sphere into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @example
     * // Render a red wire sphere with radius of 1
     * const center = new pc.Vec3(0, 0, 0);
     * app.drawWireSphere(center, 1.0, pc.Color.RED);
     * @ignore
     */
    drawWireSphere(center: Vec3, radius: number, color?: Color, segments?: number, depthTest?: boolean, layer?: Layer): void;
    /**
     * Draws a wireframe axis aligned box specified by min and max points and color.
     *
     * @param {Vec3} minPoint - The min corner point of the box.
     * @param {Vec3} maxPoint - The max corner point of the box.
     * @param {Color} [color] - The color of the sphere. It defaults to white if not specified.
     * @param {boolean} [depthTest] - Specifies if the sphere lines are depth tested against the
     * depth buffer. Defaults to true.
     * @param {Layer} [layer] - The layer to render the sphere into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @param {Mat4} [mat] - Matrix to transform the box before rendering.
     * @example
     * // Render a red wire aligned box
     * const min = new pc.Vec3(-1, -1, -1);
     * const max = new pc.Vec3(1, 1, 1);
     * app.drawWireAlignedBox(min, max, pc.Color.RED);
     * @ignore
     */
    drawWireAlignedBox(minPoint: Vec3, maxPoint: Vec3, color?: Color, depthTest?: boolean, layer?: Layer, mat?: Mat4): void;
    /**
     * Draw meshInstance at this frame
     *
     * @param {MeshInstance} meshInstance - The mesh instance
     * to draw.
     * @param {Layer} [layer] - The layer to render the mesh instance into. Defaults to
     * {@link LAYERID_IMMEDIATE}.
     * @ignore
     */
    drawMeshInstance(meshInstance: MeshInstance, layer?: Layer): void;
    /**
     * Draw mesh at this frame.
     *
     * @param {Mesh} mesh - The mesh to draw.
     * @param {Material} material - The material to use to render the mesh.
     * @param {Mat4} matrix - The matrix to use to render the mesh.
     * @param {Layer} [layer] - The layer to render the mesh into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @ignore
     */
    drawMesh(mesh: Mesh, material: Material, matrix: Mat4, layer?: Layer): void;
    /**
     * Draw quad of size [-0.5, 0.5] at this frame.
     *
     * @param {Mat4} matrix - The matrix to use to render the quad.
     * @param {Material} material - The material to use to render the quad.
     * @param {Layer} [layer] - The layer to render the quad into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @ignore
     */
    drawQuad(matrix: Mat4, material: Material, layer?: Layer): void;
    /**
     * Draws a texture at [x, y] position on screen, with size [width, height]. The origin of the
     * screen is top-left [0, 0]. Coordinates and sizes are in projected space (-1 .. 1).
     *
     * @param {number} x - The x coordinate on the screen of the center of the texture.
     * Should be in the range [-1, 1].
     * @param {number} y - The y coordinate on the screen of the center of the texture.
     * Should be in the range [-1, 1].
     * @param {number} width - The width of the rectangle of the rendered texture. Should be in the
     * range [0, 2].
     * @param {number} height - The height of the rectangle of the rendered texture. Should be in
     * the range [0, 2].
     * @param {Texture} texture - The texture to render.
     * @param {Material} material - The material used when rendering the texture.
     * @param {Layer} [layer] - The layer to render the texture into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @param {boolean} [filterable] - Indicate if the texture can be sampled using filtering.
     * Passing false uses unfiltered sampling, allowing a depth texture to be sampled on WebGPU.
     * Defaults to true.
     * @ignore
     */
    drawTexture(x: number, y: number, width: number, height: number, texture: Texture, material: Material, layer?: Layer, filterable?: boolean): void;
    /**
     * Draws a depth texture at [x, y] position on screen, with size [width, height]. The origin of
     * the screen is top-left [0, 0]. Coordinates and sizes are in projected space (-1 .. 1).
     *
     * @param {number} x - The x coordinate on the screen of the center of the texture.
     * Should be in the range [-1, 1].
     * @param {number} y - The y coordinate on the screen of the center of the texture.
     * Should be in the range [-1, 1].
     * @param {number} width - The width of the rectangle of the rendered texture. Should be in the
     * range [0, 2].
     * @param {number} height - The height of the rectangle of the rendered texture. Should be in
     * the range [0, 2].
     * @param {Layer} [layer] - The layer to render the texture into. Defaults to {@link LAYERID_IMMEDIATE}.
     * @ignore
     */
    drawDepthTexture(x: number, y: number, width: number, height: number, layer?: Layer): void;
    /**
     * Destroys application and removes all event listeners at the end of the current engine frame
     * update. However, if called outside of the engine frame update, calling destroy() will
     * destroy the application immediately.
     *
     * @example
     * app.destroy();
     */
    destroy(): void;
    _gsplatSortedEvt: EventHandle;
    controller: any;
    context: any;
    /**
     * Get entity from the index by guid.
     *
     * @param {string} guid - The GUID to search for.
     * @returns {Entity} The Entity with the GUID or null.
     * @ignore
     */
    getEntityFromIndex(guid: string): Entity;
    /**
     * @param {Scene} scene - The scene.
     * @private
     */
    private _registerSceneImmediate;
}

declare class Render2d {
    constructor(device: any, maxQuads?: number);
    device: any;
    maxQuads: number;
    buffer: VertexBuffer;
    data: Float32Array<ArrayBuffer>;
    indexBuffer: IndexBuffer;
    prim: {
        type: number;
        indexed: boolean;
        base: number;
        baseVertex: number;
        count: number;
    };
    quads: number;
    mesh: Mesh;
    material: ShaderMaterial;
    meshInstance: MeshInstance;
    uniforms: {
        clr: Float32Array<ArrayBuffer>;
    };
    targetSize: {
        width: any;
        height: any;
    };
    quad(x: any, y: any, w: any, h: any, u: any, v: any, uw: any, uh: any, texture: any, wordFlag?: number): void;
    startFrame(): void;
    render(app: any, layer: any, graphTexture: any, wordsTexture: any, clr: any, height: any): void;
}

type MiniStatsSizeOptions = {
    /**
     * - Width of the graph area.
     */
    width: number;
    /**
     * - Height of the graph area.
     */
    height: number;
    /**
     * - Spacing between graphs.
     */
    spacing: number;
    /**
     * - Whether to show graphs.
     */
    graphs: boolean;
};
type MiniStatsProcessorOptions = {
    /**
     * - Whether to show the graph.
     */
    enabled: boolean;
    /**
     * - Watermark - shown as a line on the graph, useful for displaying a
     * budget.
     */
    watermark: number;
};
type MiniStatsGraphOptions = {
    /**
     * - Display name.
     */
    name: string;
    /**
     * - Path to data inside Application.stats.
     */
    stats: string[];
    /**
     * - Number of decimal places (defaults to none).
     */
    decimalPlaces?: number;
    /**
     * - Units (defaults to "").
     */
    unitsName?: string;
    /**
     * - Watermark - shown as a line on the graph, useful for displaying
     * a budget.
     */
    watermark?: number;
};
type MiniStatsOptions = {
    /**
     * - Sizes of area to render individual graphs in and
     * spacing between individual graphs.
     */
    sizes: MiniStatsSizeOptions[];
    /**
     * - Index into sizes array for initial setting.
     */
    startSizeIndex: number;
    /**
     * - Refresh rate of text stats in ms.
     */
    textRefreshRate: number;
    /**
     * - CPU graph options.
     */
    cpu: MiniStatsProcessorOptions;
    /**
     * - GPU graph options.
     */
    gpu: MiniStatsProcessorOptions;
    /**
     * - Array of options to render additional graphs based
     * on stats collected into Application.stats.
     */
    stats: MiniStatsGraphOptions[];
    /**
     * - Minimum size index at which to show GPU pass timing
     * graphs. Defaults to 1.
     */
    gpuTimingMinSize?: number;
    /**
     * - Minimum size index at which to show CPU sub-timing
     * graphs (script, anim, physics, render). Defaults to 1.
     */
    cpuTimingMinSize?: number;
    /**
     * - Minimum size index at which to show VRAM subcategory
     * graphs. Defaults to 1.
     */
    vramTimingMinSize?: number;
};
/**
 * @typedef {object} MiniStatsSizeOptions
 * @property {number} width - Width of the graph area.
 * @property {number} height - Height of the graph area.
 * @property {number} spacing - Spacing between graphs.
 * @property {boolean} graphs - Whether to show graphs.
 */
/**
 * @typedef {object} MiniStatsProcessorOptions
 * @property {boolean} enabled - Whether to show the graph.
 * @property {number} watermark - Watermark - shown as a line on the graph, useful for displaying a
 * budget.
 */
/**
 * @typedef {object} MiniStatsGraphOptions
 * @property {string} name - Display name.
 * @property {string[]} stats - Path to data inside Application.stats.
 * @property {number} [decimalPlaces] - Number of decimal places (defaults to none).
 * @property {string} [unitsName] - Units (defaults to "").
 * @property {number} [watermark] - Watermark - shown as a line on the graph, useful for displaying
 * a budget.
 */
/**
 * @typedef {object} MiniStatsOptions
 * @property {MiniStatsSizeOptions[]} sizes - Sizes of area to render individual graphs in and
 * spacing between individual graphs.
 * @property {number} startSizeIndex - Index into sizes array for initial setting.
 * @property {number} textRefreshRate - Refresh rate of text stats in ms.
 * @property {MiniStatsProcessorOptions} cpu - CPU graph options.
 * @property {MiniStatsProcessorOptions} gpu - GPU graph options.
 * @property {MiniStatsGraphOptions[]} stats - Array of options to render additional graphs based
 * on stats collected into Application.stats.
 * @property {number} [gpuTimingMinSize] - Minimum size index at which to show GPU pass timing
 * graphs. Defaults to 1.
 * @property {number} [cpuTimingMinSize] - Minimum size index at which to show CPU sub-timing
 * graphs (script, anim, physics, render). Defaults to 1.
 * @property {number} [vramTimingMinSize] - Minimum size index at which to show VRAM subcategory
 * graphs. Defaults to 1.
 */
/**
 * MiniStats is a small graphical overlay that displays realtime performance metrics. By default,
 * it shows CPU and GPU utilization, frame timings and draw call count. It can also be configured
 * to display additional graphs based on data collected into {@link AppBase#stats}.
 */
declare class MiniStats {
    /**
     * Predefined stat groups that can be included via {@link MiniStats.getDefaultOptions}. Each
     * key maps to an array of {@link MiniStatsGraphOptions} entries that are inserted after the
     * 'Frame' stat in the default options.
     *
     * @type {Object<string, MiniStatsGraphOptions[]>}
     * @ignore
     */
    static statPresets: {
        [x: string]: MiniStatsGraphOptions[];
    };
    /**
     * Returns the default options for MiniStats. The default options configure the overlay to
     * show the following graphs:
     *
     * - CPU utilization
     * - GPU utilization
     * - Overall frame time
     * - Draw call count
     * - Total VRAM usage
     *
     * @param {string[]} [extraStats] - Optional array of preset names from
     * {@link MiniStats.statPresets} to include. The preset stats are inserted after the 'Frame'
     * entry. Can be: 'gsplats', 'gsplatsCopy'.
     * @returns {object} The default options for MiniStats.
     * @example
     * // default options without extra stats
     * const options = pc.MiniStats.getDefaultOptions();
     * @example
     * // include gsplat stats
     * const options = pc.MiniStats.getDefaultOptions(['gsplats', 'gsplatsCopy']);
     */
    static getDefaultOptions(extraStats?: string[]): object;
    /**
     * Create a new MiniStats instance.
     *
     * @param {AppBase} app - The application.
     * @param {MiniStatsOptions} [options] - Options for the MiniStats instance.
     * @example
     * // create a new MiniStats instance using default options
     * const miniStats = new pc.MiniStats(app);
     */
    constructor(app: AppBase, options?: MiniStatsOptions);
    graphRows: Map<any, any>;
    freeRows: any[];
    nextRowIndex: number;
    sizes: MiniStatsSizeOptions[];
    wordAtlas: WordAtlas;
    _activeSizeIndex: number;
    /**
     * Sets the opacity of the MiniStats overlay.
     *
     * @type {number}
     * @ignore
     */
    set opacity(value: number);
    /**
     * Gets the opacity of the MiniStats overlay.
     *
     * @type {number}
     * @ignore
     */
    get opacity(): number;
    /**
     * Sets the active size index. Setting the active size index will resize the overlay to the
     * size specified by the corresponding entry in the sizes array.
     *
     * @type {number}
     * @ignore
     */
    set activeSizeIndex(value: number);
    /**
     * Gets the active size index.
     *
     * @type {number}
     * @ignore
     */
    get activeSizeIndex(): number;
    app: AppBase;
    drawLayer: Layer;
    device: GraphicsDevice;
    render2d: Render2d;
    div: HTMLDivElement;
    width: number;
    height: number;
    gspacing: number;
    clr: number[];
    _enabled: boolean;
    gpuTimingMinSize: number;
    gpuPassGraphs: Map<any, any>;
    cpuTimingMinSize: number;
    cpuGraphs: Map<any, any>;
    vramTimingMinSize: number;
    vramGraphs: Map<any, any>;
    frameIndex: number;
    textRefreshRate: number;
    /**
     * Destroy the MiniStats instance.
     *
     * @example
     * miniStats.destroy();
     */
    destroy(): void;
    /**
     * Gets the overall height of the MiniStats overlay.
     *
     * @type {number}
     * @ignore
     */
    get overallHeight(): number;
    /**
     * Sets the enabled state of the MiniStats overlay.
     *
     * @type {boolean}
     */
    set enabled(value: boolean);
    /**
     * Gets the enabled state of the MiniStats overlay.
     *
     * @type {boolean}
     */
    get enabled(): boolean;
    /**
     * Create the graphs requested by the user and add them to the MiniStats instance.
     *
     * @param {AppBase} app - The application.
     * @param {GraphicsDevice} device - The graphics device.
     * @param {object} options - Options for the MiniStats instance.
     * @private
     */
    private initGraphs;
    graphs: any[];
    texture: Texture;
    /**
     * Render the MiniStats overlay. This is called automatically when the `postrender` event is
     * fired by the application.
     *
     * @private
     */
    private render;
    /**
     * Resize the MiniStats overlay.
     *
     * @param {number} width - The new width.
     * @param {number} height - The new height.
     * @param {boolean} showGraphs - Whether to show the graphs.
     * @private
     */
    private resize;
    /**
     * Update the size and position of the MiniStats overlay. This is called automatically when the
     * `resizecanvas` event is fired by the graphics device.
     *
     * @private
     */
    private updateDiv;
    /**
     * Called when the graphics device is lost.
     *
     * @private
     */
    private loseContext;
    /**
     * Update sub-stat graphs (GPU passes or CPU timings).
     * @param {Map} subGraphs - Map to store graph data (gpuPassGraphs or cpuGraphs)
     * @param {string} mainGraphName - Name of main graph ('GPU' or 'CPU')
     * @param {Map<string,number>|Object} stats - Stats data (Map for GPU, object for CPU)
     * @param {string} statPathPrefix - Prefix for stat path ('gpu' for GPU, 'frame' for CPU)
     * @param {number} removeAfterFrames - Frames of zero before removal
     * @private
     */
    private updateSubStats;
    /**
     * Allocates a texture row for a graph. Reuses free rows when available.
     *
     * @param {Graph} graph - The graph to allocate a row for.
     * @returns {number} The allocated row index.
     * @private
     */
    private allocateRow;
    /**
     * Frees a texture row when a graph is destroyed.
     *
     * @param {Graph} graph - The graph whose row to free.
     * @private
     */
    private freeRow;
    /**
     * Remove all sub-stat graphs from a tracking map when collapsing below a size threshold.
     *
     * @param {Map} subGraphs - The sub-graph map to clear.
     * @param {string} [mainGraphName] - If provided, reset the main graph's graphType.
     * @param {number} [graphType] - The graphType value to restore on the main graph.
     * @private
     */
    private clearSubGraphs;
    /**
     * Ensures the texture has enough rows. Only grows, never shrinks.
     *
     * @param {number} requiredRows - The minimum number of rows needed.
     * @private
     */
    private ensureTextureHeight;
    /**
     * Called when the `postrender` event is fired by the application.
     *
     * @private
     */
    private postRender;
}

/**
 * Class responsible for rendering color outlines around objects in the scene.
 *
 * @category Graphics
 */
declare class OutlineRenderer {
    /**
     * Create a new OutlineRenderer.
     *
     * @param {AppBase} app - The application.
     * @param {Layer} [renderingLayer] - A layer used internally to render the outlines. If not
     * provided, the renderer will use the 'Immediate' layer. This needs to be supplied only if the
     * 'Immediate' layer is not present in the scene.
     * @param {number} [priority] - The priority of the camera rendering the outlines. Should be
     * smaller value than the priority of the scene camera, to be updated first. Defaults to -1.
     */
    constructor(app: AppBase, renderingLayer?: Layer, priority?: number);
    app: AppBase;
    renderingLayer: Layer;
    rt: RenderTarget;
    outlineCameraEntity: Entity;
    outlineShaderPass: number;
    postRender: (cameraComponent: any) => void;
    tempRt: RenderTarget;
    blendState: BlendState;
    shaderExtend: Shader;
    shaderBlend: Shader;
    quadRenderer: QuadRender;
    whiteTex: Texture;
    /**
     * Destroy the outline renderer and its resources.
     */
    destroy(): void;
    getMeshInstances(entity: any, recursive: any): any[];
    /**
     * Add an entity to the outline renderer.
     *
     * @param {Entity} entity - The entity to add. All MeshInstance of the entity and its
     * descendants will be added.
     * @param {Color} color - The color of the outline.
     * @param {boolean} [recursive] - Whether to add MeshInstances of the entity's descendants.
     * Defaults to true.
     */
    addEntity(entity: Entity, color: Color, recursive?: boolean): void;
    /**
     * Remove an entity from the outline renderer.
     *
     * @param {Entity} entity - The entity to remove.
     * @param {boolean} [recursive] - Whether to add MeshInstances of the entity's descendants.
     * Defaults to true.
     */
    removeEntity(entity: Entity, recursive?: boolean): void;
    removeAllEntities(): void;
    blendOutlines(): void;
    onPostRender(): void;
    createRenderTarget(name: any, width: any, height: any, depth: any): RenderTarget;
    updateRenderTarget(sceneCamera: any): void;
    /**
     * Update the outline renderer. Should be called once per frame.
     *
     * @param {Entity} sceneCameraEntity - The camera used to render the scene, which is used to provide
     * the camera properties to the outline rendering camera.
     * @param {Layer} blendLayer - The layer in which the outlines should be rendered.
     * @param {boolean} blendLayerTransparent - Whether the blend layer is transparent.
     */
    frameUpdate(sceneCameraEntity: Entity, blendLayer: Layer, blendLayerTransparent: boolean): void;
}

/**
 * @import { Color } from '../../core/math/color.js'
 */
/**
 * The base class for the exporters, implementing shared functionality.
 *
 * @category Exporter
 * @ignore
 */
declare class CoreExporter {
    /**
     * Converts a texture to a canvas.
     *
     * @param {Texture} texture - The source texture to be converted.
     * @param {object} options - Object for passing optional arguments.
     * @param {Color} [options.color] - The tint color to modify the texture with.
     * @param {number} [options.maxTextureSize] - Maximum texture size. Texture is resized if over the size.
     * @returns {Promise<HTMLCanvasElement>|Promise<undefined>} - The canvas element containing the image.
     *
     * @ignore
     */
    textureToCanvas(texture: Texture, options?: {
        color?: Color;
        maxTextureSize?: number;
    }): Promise<HTMLCanvasElement> | Promise<undefined>;
    calcTextureSize(width: any, height: any, maxTextureSize: any): {
        width: any;
        height: any;
    };
}

/**
 * Implementation of the USDZ format exporter. Note that ASCII version of the format (USDA) is used.
 *
 * @category Exporter
 */
declare class UsdzExporter extends CoreExporter {
    /**
     * Maps a mesh to a reference (path) inside the usdz container
     *
     * @type {Map<Mesh, string>}
     * @ignore
     */
    meshMap: Map<Mesh, string>;
    /**
     * Maps a material to a reference (path) inside the usdz container
     *
     * @type {Map<Material, string>}
     * @ignore
     */
    materialMap: Map<Material, string>;
    /**
     * A list of generated material usda contents, which are processed at the end
     *
     * @ignore
     */
    materials: any;
    /**
     * A map of texture requests
     *
     * @type {Map<Texture, string>}
     * @ignore
     */
    textureMap: Map<Texture, string>;
    /**
     * A set of used node names. Used in order to keep them unique.
     *
     * @type {Set<string>}
     * @ignore
     */
    nodeNames: Set<string>;
    /**
     * An object, storing a mapping between the file name and its content. Used as input to fflate to
     * zip up the data.
     *
     * @type {object}
     * @ignore
     */
    files: object;
    init(): void;
    done(): void;
    /**
     * Converts a hierarchy of entities to USDZ format.
     *
     * @param {Entity} entity - The root of the entity hierarchy to convert.
     * @param {object} options - Object for passing optional arguments.
     * @param {number} [options.maxTextureSize] - Maximum texture size. Texture is resized if over
     * the size.
     * @returns {Promise<ArrayBuffer>} - The USDZ file content.
     */
    build(entity: Entity, options?: {
        maxTextureSize?: number;
    }): Promise<ArrayBuffer>;
    alignFiles(): void;
    getFileIds(category: any, name: any, ref: any, extension?: string): {
        name: any;
        fileName: string;
        refName: string;
    };
    getTextureFileIds(texture: any): {
        name: any;
        fileName: string;
        refName: string;
    };
    addFile(category: any, uniqueId: any, refName?: string, content?: string): string;
    getMaterialRef(material: any): string;
    getMeshRef(mesh: any): string;
    buildArray2(array: any): string;
    buildArray3(array: any): string;
    buildMat4(mat: any): string;
    buildMaterial(material: any): string;
    buildMesh(mesh: any): string;
    buildMeshInstance(meshInstance: any): string;
}

/**
 * Implementation of the GLTF 2.0 format exporter.
 *
 * @category Exporter
 */
declare class GltfExporter extends CoreExporter {
    static writeBufferView(resources: any, json: any, buffer: any): void;
    static createPrimitive(resources: any, json: any, mesh: any, options?: {}): {
        attributes: {};
    };
    /**
     * @ignore
     */
    collectResources(root: any): {
        buffers: any[];
        cameras: any[];
        entities: any[];
        materials: any[];
        skins: any[];
        textures: any[];
        entityMeshInstances: any[];
        bufferViewMap: Map<any, any>;
        compressableTexture: Set<any>;
    };
    writeBufferViews(resources: any, json: any): void;
    writeCameras(resources: any, json: any): void;
    attachTexture(resources: any, material: any, destination: any, name: any, textureSemantic: any, json: any): void;
    addExtension(json: any, output: any, name: any, data?: {}): void;
    writeStandardMaterial(resources: any, mat: any, output: any, json: any): void;
    writeMaterials(resources: any, json: any): void;
    writeNodes(resources: any, json: any): void;
    writeMeshes(resources: any, json: any, options: any): void;
    writeSkins(resources: any, json: any): void;
    convertTextures(srcTextures: any, options: any): any[];
    writeTextures(resources: any, textureCanvases: any, json: any, options: any): Promise<any[]>;
    getBlob(canvas: any, mimeType: any): any;
    getPaddedArrayBuffer(arrayBuffer: any, paddingByte?: number): any;
    buildJson(resources: any, options: any): Promise<{
        asset: {
            version: string;
            generator: string;
        };
        scenes: {
            nodes: number[];
        }[];
        images: any[];
        samplers: any[];
        textures: any[];
        scene: number;
    }>;
    /**
     * Converts a hierarchy of entities to GLB format.
     *
     * @param {Entity} entity - The root of the entity hierarchy to convert.
     * @param {object} options - Object for passing optional arguments.
     * @param {number} [options.maxTextureSize] - Maximum texture size. Texture is resized if over the size.
     * @param {boolean} [options.stripUnusedAttributes] - If true, removes unused vertex attributes:
     *
     * - Texture coordinates not referenced by materials
     * - Vertex colors if not used by materials
     * - Tangents if no normal maps are used
     * - Skinning data if no skinned meshes exist
     *
     * Defaults to false.
     * @returns {Promise<ArrayBuffer>} - The GLB file content.
     */
    build(entity: Entity, options?: {
        maxTextureSize?: number;
        stripUnusedAttributes?: boolean;
    }): Promise<ArrayBuffer>;
}

/**
 * @import { Shader } from '../../platform/graphics/shader.js'
 * @import { StencilParameters } from '../../platform/graphics/stencil-parameters.js'
 * @import { Vec4 } from '../../core/math/vec4.js'
 */
/**
 * A render pass that implements rendering a quad with a shader, and exposes controls over the
 * render state. This is typically used as a base class for render passes that render a quad with
 * a shader, but can be used directly as well by specifying a shader.
 *
 * @ignore
 */
declare class RenderPassShaderQuad extends RenderPass {
    /**
     * @type {Shader|null}
     */
    _shader: Shader | null;
    /**
     * @type {QuadRender|null}
     */
    quadRender: QuadRender | null;
    /**
     * The cull mode to use when rendering the quad. Defaults to {@link CULLFACE_NONE}.
     */
    cullMode: number;
    /**
     * The front face to use when rendering the quad. Defaults to {@link FRONTFACE_CCW}.
     */
    frontFace: number;
    /**
     * A blend state to use when rendering the quad. Defaults to {@link BlendState.NOBLEND}.
     *
     * @type {BlendState}
     */
    blendState: BlendState;
    /**
     * A depth state to use when rendering the quad. Defaults to {@link DepthState.NODEPTH}.
     *
     * @type {DepthState}
     */
    depthState: DepthState;
    /**
     * Stencil parameters for front faces to use when rendering the quad. Defaults to null.
     *
     * @type {StencilParameters|null}
     */
    stencilFront: StencilParameters | null;
    /**
     * Stencil parameters for back faces to use when rendering the quad. Defaults to null.
     *
     * @type {StencilParameters|null}
     */
    stencilBack: StencilParameters | null;
    /**
     * Optional viewport rectangle (x, y, width, height). If set, the quad renders only to this
     * region and the original viewport is restored after rendering.
     *
     * @type {Vec4|undefined}
     */
    viewport: Vec4 | undefined;
    /**
     * Optional scissor rectangle (x, y, width, height). If set, pixels outside this region are
     * discarded. Only used when viewport is also set. Defaults to the viewport if not specified.
     *
     * @type {Vec4|undefined}
     */
    scissor: Vec4 | undefined;
    /**
     * Sets the shader used to render the quad.
     *
     * @type {Shader}
     * @ignore
     */
    set shader(shader: Shader);
    get shader(): Shader;
}

/**
 * @import { Texture } from '../../platform/graphics/texture.js';
 */
/**
 * Render pass implementation of the final post-processing composition.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassCompose extends RenderPassShaderQuad {
    constructor(graphicsDevice: any);
    /**
     * @type {Texture|null}
     */
    sceneTexture: Texture | null;
    bloomIntensity: number;
    _bloomTexture: any;
    _cocTexture: any;
    blurTexture: any;
    blurTextureUpscale: boolean;
    _ssaoTexture: any;
    _toneMapping: number;
    _gradingEnabled: boolean;
    gradingSaturation: number;
    gradingContrast: number;
    gradingBrightness: number;
    gradingTint: Color;
    _shaderDirty: boolean;
    _vignetteEnabled: boolean;
    vignetteInner: number;
    vignetteOuter: number;
    vignetteCurvature: number;
    vignetteIntensity: number;
    vignetteColor: Color;
    _fringingEnabled: boolean;
    fringingIntensity: number;
    _colorEnhanceEnabled: boolean;
    colorEnhanceShadows: number;
    colorEnhanceHighlights: number;
    colorEnhanceVibrance: number;
    colorEnhanceDehaze: number;
    colorEnhanceMidtones: number;
    _taaEnabled: boolean;
    _hdrScene: boolean;
    _sharpness: number;
    _gammaCorrection: number;
    /**
     * @type {Texture|null}
     */
    _colorLUT: Texture | null;
    colorLUTIntensity: number;
    _key: string;
    _debug: any;
    _customComposeChunks: Map<string, string>;
    sceneTextureId: any;
    bloomTextureId: any;
    cocTextureId: any;
    ssaoTextureId: any;
    blurTextureId: any;
    bloomIntensityId: any;
    bcsId: any;
    tintId: any;
    vignetterParamsId: any;
    vignetteColorId: any;
    fringingIntensityId: any;
    sceneTextureInvResId: any;
    sceneTextureInvResValue: Float32Array<ArrayBuffer>;
    sharpnessId: any;
    colorLUTId: any;
    colorLUTParams: Float32Array<ArrayBuffer>;
    colorLUTParamsId: any;
    colorEnhanceParamsId: any;
    colorEnhanceMidtonesId: any;
    set debug(value: any);
    get debug(): any;
    set colorLUT(value: Texture);
    get colorLUT(): Texture;
    set bloomTexture(value: any);
    get bloomTexture(): any;
    set cocTexture(value: any);
    get cocTexture(): any;
    set ssaoTexture(value: any);
    get ssaoTexture(): any;
    set taaEnabled(value: boolean);
    get taaEnabled(): boolean;
    set gradingEnabled(value: boolean);
    get gradingEnabled(): boolean;
    set vignetteEnabled(value: boolean);
    get vignetteEnabled(): boolean;
    set fringingEnabled(value: boolean);
    get fringingEnabled(): boolean;
    set colorEnhanceEnabled(value: boolean);
    get colorEnhanceEnabled(): boolean;
    set toneMapping(value: number);
    get toneMapping(): number;
    set sharpness(value: number);
    get sharpness(): number;
    get isSharpnessEnabled(): boolean;
    set hdrScene(value: boolean);
    get hdrScene(): boolean;
}

/**
 * Render pass implementation of a depth-aware bilateral blur filter.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassDepthAwareBlur extends RenderPassShaderQuad {
    constructor(device: any, sourceTexture: any, cameraComponent: any, horizontal: any);
    sourceTexture: any;
    sourceTextureId: ScopeId;
    sourceInvResolutionId: ScopeId;
    sourceInvResolutionValue: Float32Array<ArrayBuffer>;
    filterSizeId: ScopeId;
}

/**
 * Render pass implementation of a Circle of Confusion texture generation, used by Depth of Field.
 * This pass generates a CoC texture based on the scene's depth buffer, and focus range and distance
 * parameters. The CoC texture stores far and near CoC values in the red and green channels.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassCoC extends RenderPassShaderQuad {
    constructor(device: any, cameraComponent: any, nearBlur: any);
    focusDistance: any;
    focusRange: any;
    cameraComponent: any;
    paramsId: any;
    paramsValue: Float32Array<ArrayBuffer>;
    cameraParams: Float32Array<ArrayBuffer>;
    cameraParamsId: any;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * Render pass implementation of a down-sample filter.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassDownsample extends RenderPassShaderQuad {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {Texture} sourceTexture - The source texture to downsample.
     * @param {object} [options] - The options for the render pass.
     * @param {boolean} [options.boxFilter] - Whether to use a box filter for downsampling.
     * @param {Texture|null} [options.premultiplyTexture] - The texture to premultiply the source texture
     * with. Only supported when boxFilter is true.
     * @param {string} [options.premultiplySrcChannel] - The source channel to premultiply.
     * @param {boolean} [options.removeInvalid] - Whether to remove invalid pixels from the output.
     */
    constructor(device: GraphicsDevice, sourceTexture: Texture, options?: {
        boxFilter?: boolean;
        premultiplyTexture?: Texture | null;
        premultiplySrcChannel?: string;
        removeInvalid?: boolean;
    });
    sourceTexture: Texture;
    premultiplyTexture: Texture;
    sourceTextureId: ScopeId;
    premultiplyTextureId: ScopeId;
    sourceInvResolutionId: ScopeId;
    sourceInvResolutionValue: Float32Array<ArrayBuffer>;
    setSourceTexture(value: any): void;
    _sourceTexture: any;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * Render pass implementation of a down-sample filter used by the Depth of Field pass. Based on
 * a texel of the CoC texture, it generates blurred version of the near or far texture.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassDofBlur extends RenderPassShaderQuad {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {Texture|null} nearTexture - The near texture to blur. Skip near blur if the texture is null.
     * @param {Texture} farTexture - The far texture to blur.
     * @param {Texture} cocTexture - The CoC texture.
     */
    constructor(device: GraphicsDevice, nearTexture: Texture | null, farTexture: Texture, cocTexture: Texture);
    blurRadiusNear: number;
    blurRadiusFar: number;
    _blurRings: number;
    _blurRingPoints: number;
    nearTexture: Texture;
    farTexture: Texture;
    cocTexture: Texture;
    kernelId: ScopeId;
    kernelCountId: ScopeId;
    blurRadiusNearId: ScopeId;
    blurRadiusFarId: ScopeId;
    nearTextureId: ScopeId;
    farTextureId: ScopeId;
    cocTextureId: ScopeId;
    set blurRings(value: number);
    get blurRings(): number;
    set blurRingPoints(value: number);
    get blurRingPoints(): number;
    createShader(): void;
    kernel: Float32Array<ArrayBuffer>;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { CameraComponent } from '../../framework/components/camera/component.js'
 */
/**
 * Render pass implementation of Depth of Field effect.
 *
 * @category Graphics
 * @ignore
 */
declare class FramePassDof extends FramePass {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {CameraComponent} cameraComponent - The camera component.
     * @param {Texture} sceneTexture - The full resolution texture.
     * @param {Texture} sceneTextureHalf - The half resolution texture.
     * @param {boolean} highQuality - Whether to use high quality setup.
     * @param {boolean} nearBlur - Whether to apply near blur.
     */
    constructor(device: GraphicsDevice, cameraComponent: CameraComponent, sceneTexture: Texture, sceneTextureHalf: Texture, highQuality: boolean, nearBlur: boolean);
    focusDistance: number;
    focusRange: number;
    blurRadius: number;
    blurRings: number;
    blurRingPoints: number;
    highQuality: boolean;
    /** @type {Texture|null} */
    cocTexture: Texture | null;
    /** @type {Texture|null} */
    blurTexture: Texture | null;
    /** @type {RenderPassCoC|null} */
    cocPass: RenderPassCoC | null;
    /** @type {RenderPassDownsample|null} */
    farPass: RenderPassDownsample | null;
    /** @type {RenderPassDofBlur|null} */
    blurPass: RenderPassDofBlur | null;
    cocRT: RenderTarget;
    farRt: RenderTarget;
    blurRt: RenderTarget;
    destroyRenderPasses(): void;
    destroyRT(rt: any): void;
    setupCocPass(device: any, cameraComponent: any, sourceTexture: any, nearBlur: any): RenderPassCoC;
    setupFarPass(device: any, sourceTexture: any, scale: any): RenderPassDownsample;
    setupBlurPass(device: any, nearTexture: any, nearBlur: any, scale: any): RenderPassDofBlur;
    createTexture(name: any, format: any): Texture;
    createRenderTarget(name: any, format: any): RenderTarget;
}

/**
 * Render pass implementation of a up-sample filter.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassUpsample extends RenderPassShaderQuad {
    constructor(device: any, sourceTexture: any);
    sourceTexture: any;
    sourceTextureId: any;
    sourceInvResolutionId: any;
    sourceInvResolutionValue: Float32Array<ArrayBuffer>;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 */
/**
 * Render pass implementation of HDR bloom effect.
 *
 * @category Graphics
 * @ignore
 */
declare class FramePassBloom extends FramePass {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {Texture} sourceTexture - The source texture, usually at half the resolution of the
     * render target getting blurred.
     * @param {number} format - The texture format.
     */
    constructor(device: GraphicsDevice, sourceTexture: Texture, format: number);
    bloomTexture: Texture;
    blurLevel: number;
    bloomRenderTarget: RenderTarget;
    textureFormat: number;
    renderTargets: any[];
    _sourceTexture: Texture;
    destroyRenderTargets(startIndex?: number): void;
    destroyRenderPasses(): void;
    createRenderTarget(index: any): RenderTarget;
    createRenderTargets(count: any): void;
    calcMipLevels(width: any, height: any, minSize: any): number;
    createRenderPasses(numPasses: any): void;
}

/**
 * A render pass which typically executes before the rendering of the main scene, and renders data
 * that is required for the main rendering pass (and also in following passes) into separate render
 * targets. This can include depth, normals, velocity, etc, used by TAA, motion blur, SSAO, etc.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassPrepass extends RenderPass {
    constructor(device: any, scene: any, renderer: any, camera: any, options: any);
    /** @type {BindGroup[]} */
    viewBindGroups: BindGroup[];
    /** @type {Texture} */
    linearDepthTexture: Texture;
    /** @type {Color} */
    linearDepthClearValue: Color;
    scene: any;
    renderer: any;
    camera: any;
    setupRenderTarget(options: any): void;
    linearDepthFormat: number;
}

/**
 * Render pass implementation of Screen-Space Ambient Occlusion (SSAO) based on the non-linear depth
 * buffer.
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassSsao extends RenderPassShaderQuad {
    constructor(device: any, sourceTexture: any, cameraComponent: any, blurEnabled: any);
    /**
     * The filter radius.
     *
     * @type {number}
     */
    radius: number;
    /**
     * The intensity.
     *
     * @type {number}
     */
    intensity: number;
    /**
     * The power controlling the falloff curve.
     *
     * @type {number}
     */
    power: number;
    /**
     * The number of samples to take.
     *
     * @type {number}
     */
    sampleCount: number;
    /**
     * The minimum angle in degrees that creates an occlusion. Helps to reduce fake occlusions due
     * to low geometry tessellation.
     *
     * @type {number}
     */
    minAngle: number;
    /**
     * Enable randomization of the sample pattern. Useful when TAA is used to remove the noise,
     * instead of blurring.
     */
    randomize: boolean;
    /**
     * The texture containing the occlusion information in the red channel.
     *
     * @type {Texture}
     * @readonly
     */
    readonly ssaoTexture: Texture;
    /** @type {number} */
    _scale: number;
    _blueNoise: BlueNoise;
    sourceTexture: any;
    cameraComponent: any;
    ssaoTextureId: any;
    ssaoTextureSizeInvId: any;
    /**
     * The scale multiplier for the render target size.
     *
     * @type {number}
     */
    set scale(value: number);
    get scale(): number;
    createRenderTarget(name: any): RenderTarget;
}

/**
 * A render pass implementation of Temporal Anti-Aliasing (TAA).
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassTAA extends RenderPassShaderQuad {
    constructor(device: any, sourceTexture: any, cameraComponent: any);
    /**
     * The index of the history texture to render to.
     *
     * @type {number}
     */
    historyIndex: number;
    /**
     * @type {Texture}
     */
    historyTexture: Texture;
    /**
     * @type {Texture[]}
     */
    historyTextures: Texture[];
    /**
     * @type {RenderTarget[]}
     */
    historyRenderTargets: RenderTarget[];
    sourceTexture: any;
    cameraComponent: any;
    sourceTextureId: any;
    textureSizeId: any;
    textureSize: Float32Array<ArrayBuffer>;
    historyTextureId: any;
    viewProjPrevId: any;
    viewProjInvId: any;
    jittersId: any;
    cameraParams: Float32Array<ArrayBuffer>;
    cameraParamsId: any;
    setup(): void;
    update(): Texture;
}

/**
 * @import { CameraComponent } from '../../framework/components/camera/component.js'
 * @import { LayerComposition } from '../composition/layer-composition.js'
 * @import { Layer } from '../layer.js'
 * @import { Renderer } from './renderer.js'
 * @import { Scene } from '../scene.js'
 */
/**
 * A render pass used render a set of layers using a camera.
 *
 * @ignore
 */
declare class RenderPassForward extends RenderPass {
    constructor(device: any, layerComposition: any, scene: any, renderer: any);
    /**
     * @type {LayerComposition}
     */
    layerComposition: LayerComposition;
    /**
     * @type {Scene}
     */
    scene: Scene;
    /**
     * @type {Renderer}
     */
    renderer: Renderer;
    /**
     * @type {RenderAction[]}
     */
    renderActions: RenderAction[];
    /**
     * The gamma correction setting for the render pass. If not set, the setting from the camera
     * is used. This allows render passes to override the camera's gamma correction during the
     * render pass.
     *
     * For HDR pipelines, scene render passes typically set this to {@link GAMMA_NONE} to output
     * linear values to an HDR render target, while subsequent passes (like UI) leave it undefined
     * to use the camera's default {@link GAMMA_SRGB} for correct display output.
     *
     * Can be:
     * - {@link GAMMA_NONE}
     * - {@link GAMMA_SRGB}
     * - `undefined` (uses camera setting)
     *
     * @type {number|undefined}
     */
    gammaCorrection: number | undefined;
    /**
     * The tone mapping setting for the render pass. In not set, setting from the camera is used.
     *
     * @type {number|undefined}
     */
    toneMapping: number | undefined;
    /**
     * If true, do not clear the depth buffer before rendering, as it was already primed by a depth
     * pre-pass.
     *
     * @type {boolean}
     */
    noDepthClear: boolean;
    get rendersAnything(): boolean;
    addRenderAction(renderAction: any): void;
    /**
     * Adds a layer to be rendered by this render pass.
     *
     * @param {CameraComponent} cameraComponent - The camera component that is used to render the
     * layers.
     * @param {Layer} layer - The layer to be added.
     * @param {boolean} transparent - True if the layer is transparent.
     * @param {boolean} autoClears - True if the render target should be cleared based on the camera
     * and layer clear flags. Defaults to true.
     */
    addLayer(cameraComponent: CameraComponent, layer: Layer, transparent: boolean, autoClears?: boolean): void;
    /**
     * Adds layers to be rendered by this render pass, starting from the given index of the layer
     * in the layer composition, till the end of the layer list, or till the last layer with the
     * given id and transparency is reached (inclusive). Note that only layers that are rendered by
     * the specified camera are added.
     *
     * @param {LayerComposition} composition - The layer composition containing the layers to be
     * added, typically the scene layer composition.
     * @param {CameraComponent} cameraComponent - The camera component that is used to render the
     * layers.
     * @param {number} startIndex - The index of the first layer to be considered for adding.
     * @param {boolean} firstLayerClears - True if the first layer added should clear the render
     * target.
     * @param {number} [lastLayerId] - The id of the last layer to be added. If not specified, all
     * layers till the end of the layer list are added.
     * @param {boolean} [lastLayerIsTransparent] - True if the last layer to be added is transparent.
     * Defaults to true.
     * @returns {number} Returns the index of last layer added.
     */
    addLayers(composition: LayerComposition, cameraComponent: CameraComponent, startIndex: number, firstLayerClears: boolean, lastLayerId?: number, lastLayerIsTransparent?: boolean): number;
    updateCameraBeforePasses(): void;
    updateDirectionalShadows(): void;
    updateClears(): void;
    /**
     * @param {RenderAction} renderAction - The render action.
     * @param {boolean} firstRenderAction - True if this is the first render action in the render pass.
     */
    renderRenderAction(renderAction: RenderAction, firstRenderAction: boolean): void;
    log(device: any, index: any): void;
}

/**
 * Render pass implementation of a common camera frame rendering with integrated  post-processing
 * effects.
 *
 * @category Graphics
 * @ignore
 */
declare class FramePassCameraFrame extends FramePass {
    constructor(app: any, cameraFrame: any, cameraComponent: any, options?: {});
    app: any;
    prePass: any;
    scenePass: any;
    composePass: any;
    bloomPass: any;
    ssaoPass: any;
    taaPass: any;
    scenePassHalf: any;
    dofPass: any;
    _renderTargetScale: number;
    /**
     * True if the render pass needs to be re-created because layers have been added or removed.
     *
     * @type {boolean}
     * @ignore
     */
    layersDirty: boolean;
    /**
     * The camera frame that this render pass belongs to.
     *
     * @type {CameraFrame}
     */
    cameraFrame: CameraFrame;
    /**
     * @type {RenderTarget|null}
     * @private
     */
    private rt;
    cameraComponent: any;
    options: any;
    reset(): void;
    sceneTexture: Texture;
    sceneTextureHalf: Texture;
    rtHalf: RenderTarget;
    scenePassTransparent: RenderPassForward;
    colorGrabPass: FramePassColorGrab;
    afterPass: RenderPassForward;
    sanitizeOptions(options: any): any;
    set renderTargetScale(value: number);
    get renderTargetScale(): number;
    needsReset(options: any): boolean;
    update(options: any): void;
    createRenderTarget(name: any, depth: any, stencil: any, samples: any, flipY: any): RenderTarget;
    setupRenderPasses(options: any): void;
    hdrFormat: number;
    _bloomEnabled: boolean;
    _sceneHalfEnabled: any;
    sceneOptions: {
        resizeSource: any;
        scaleX: number;
        scaleY: number;
    };
    /**
     * Scan all RenderPassForward instances in the pass chain and mark the first / last
     * render action per camera with firstCameraUse / lastCameraUse. This mirrors what
     * LayerComposition does for the non-CameraFrame path and ensures that beforePasses
     * collection and EVENT_PRERENDER / EVENT_POSTRENDER fire exactly once per camera.
     *
     * @private
     */
    private updateCameraUseFlags;
    collectPasses(): any[];
    createPasses(options: any): void;
    setupScenePrepass(options: any): void;
    setupScenePassSettings(pass: any): void;
    setupScenePass(options: any): {
        lastAddedIndex: number;
        clearRenderTarget: boolean;
    };
    setupSsaoPass(options: any): void;
    setupSceneHalfPass(options: any, sourceTexture: any): void;
    setupBloomPass(options: any, inputTexture: any): void;
    setupDofPass(options: any, inputTexture: any, inputTextureHalf: any): void;
    setupTaaPass(options: any): Texture;
    setupComposePass(options: any): void;
    setupAfterPass(options: any, scenePassesInfo: any): void;
}
/**
 * @import { CameraFrame } from './camera-frame.js'
 */
/**
 * Options used to configure the FramePassCameraFrame. To modify these options, you must create
 * a new instance of the FramePassCameraFrame with the desired settings.
 *
 * @ignore
 */
declare class CameraFrameOptions {
    formats: any;
    stencil: boolean;
    samples: number;
    sceneColorMap: boolean;
    lastGrabLayerId: number;
    lastGrabLayerIsTransparent: boolean;
    lastSceneLayerId: number;
    lastSceneLayerIsTransparent: boolean;
    taaEnabled: boolean;
    bloomEnabled: boolean;
    ssaoType: string;
    ssaoBlurEnabled: boolean;
    prepassEnabled: boolean;
    dofEnabled: boolean;
    dofNearBlur: boolean;
    dofHighQuality: boolean;
}

/**
 * Properties related to scene rendering, encompassing settings that control the rendering resolution,
 * pixel format, multi-sampling for anti-aliasing, tone-mapping and similar.
 */
type Rendering = {
    /**
     * - The preferred render formats of the frame buffer, in order of
     * preference. First format from this list that is supported by the hardware is used. When none of
     * the formats are supported, {@link PIXELFORMAT_RGBA8} is used, but this automatically disables
     * bloom effect, which requires HDR format. The list can contain the following formats:
     * {@link PIXELFORMAT_111110F}, {@link PIXELFORMAT_RGBA16F}, {@link PIXELFORMAT_RGBA32F} and {@link  * PIXELFORMAT_RGBA8}. Typically the default option should be used, which prefers the faster formats,
     * but if higher dynamic range is needed, the list can be adjusted to prefer higher precision formats.
     * Defaults to [{@link PIXELFORMAT_111110F}, {@link PIXELFORMAT_RGBA16F}, {@link PIXELFORMAT_RGBA32F}].
     */
    renderFormats: number[];
    /**
     * - Whether the render buffer has a stencil buffer. Defaults to false.
     */
    stencil: boolean;
    /**
     * - The scale of the render target, 0.1-1 range. This allows the
     * scene to be rendered to a lower resolution render target as an optimization. The post-processing
     * is also applied at this lower resolution. The image is then up-scaled to the full resolution and
     * any UI rendering that follows is applied at the full resolution. Defaults to 1 which represents
     * full resolution rendering.
     */
    renderTargetScale: number;
    /**
     * - The number of samples of the {@link RenderTarget} used for the scene
     * rendering, in 1-4 range. Value of 1 disables multisample anti-aliasing, other values enable
     * anti-aliasing, Typically set to 1 when TAA is used, even though both anti-aliasing options can be
     * used together at a higher cost. Defaults to 1.
     */
    samples: number;
    /**
     * - Whether rendering generates a scene color map. Defaults to false.
     */
    sceneColorMap: boolean;
    /**
     * - Whether rendering generates a scene depth map. Defaults to false.
     */
    sceneDepthMap: boolean;
    /**
     * - The tone mapping. Can be:
     *
     * - {@link TONEMAP_LINEAR}
     * - {@link TONEMAP_FILMIC}
     * - {@link TONEMAP_HEJL}
     * - {@link TONEMAP_ACES}
     * - {@link TONEMAP_ACES2}
     * - {@link TONEMAP_NEUTRAL}
     *
     * Defaults to {@link TONEMAP_LINEAR}.
     */
    toneMapping: number;
    /**
     * - The sharpening intensity, 0-1 range. This can be used to increase
     * the sharpness of the rendered image. Often used to counteract the blurriness of the TAA effect,
     * but also blurriness caused by rendering to a lower resolution render target by using
     * rendering.renderTargetScale property. Defaults to 0.
     */
    sharpness: number;
};
/**
 * Properties related to the Screen Space Ambient Occlusion (SSAO) effect, a postprocessing technique
 * that approximates ambient occlusion by calculating how exposed each point in the screen space is
 * to ambient light, enhancing depth perception and adding subtle shadowing in crevices and between
 * objects.
 */
type Ssao = {
    /**
     * - The type of the SSAO determines how it is applied in the rendering
     * process. Defaults to {@link SSAOTYPE_NONE}. Can be:
     *
     * - {@link SSAOTYPE_NONE}
     * - {@link SSAOTYPE_LIGHTING}
     * - {@link SSAOTYPE_COMBINE}
     */
    type: string;
    /**
     * - Whether the SSAO effect is blurred. Defaults to true.
     */
    blurEnabled: boolean;
    /**
     * - Whether the SSAO sampling is randomized. Useful when used instead
     * of blur effect together with TAA. Defaults to false.
     */
    randomize: boolean;
    /**
     * - The intensity of the SSAO effect, 0-1 range. Defaults to 0.5.
     */
    intensity: number;
    /**
     * - The radius of the SSAO effect, 0-100 range. Defaults to 30.
     */
    radius: number;
    /**
     * - The number of samples of the SSAO effect, 1-64 range. Defaults to 12.
     */
    samples: number;
    /**
     * - The power of the SSAO effect, 0.1-10 range. Defaults to 6.
     */
    power: number;
    /**
     * - The minimum angle of the SSAO effect, 1-90 range. Defaults to 10.
     */
    minAngle: number;
    /**
     * - The scale of the SSAO effect, 0.5-1 range. Defaults to 1.
     */
    scale: number;
};
/**
 * Properties related to the HDR bloom effect, a postprocessing technique that simulates the natural
 * glow of bright light sources by spreading their intensity beyond their boundaries, creating a soft
 * and realistic blooming effect.
 */
type Bloom = {
    /**
     * - The intensity of the bloom effect, 0-0.1 range. Defaults to 0,
     * making it disabled.
     */
    intensity: number;
    /**
     * - The number of iterations for blurring the bloom effect, with each
     * level doubling the blur size. Once the blur size matches the dimensions of the render target,
     * further blur passes are skipped. The default value is 16.
     */
    blurLevel: number;
};
/**
 * Properties related to the color grading effect, a postprocessing technique used to adjust and the
 * visual tone of an image. This effect modifies brightness, contrast, saturation, and overall color
 * balance to achieve a specific aesthetic or mood.
 */
type Grading = {
    /**
     * - Whether grading is enabled. Defaults to false.
     */
    enabled: boolean;
    /**
     * - The brightness of the grading effect, 0-3 range. Defaults to 1.
     */
    brightness: number;
    /**
     * - The contrast of the grading effect, 0.5-1.5 range. Defaults to 1.
     */
    contrast: number;
    /**
     * - The saturation of the grading effect, 0-2 range. Defaults to 1.
     */
    saturation: number;
    /**
     * - The tint color of the grading effect. Defaults to white.
     */
    tint: Color;
};
/**
 * Properties related to the color lookup table (LUT) effect, a postprocessing technique used to
 * apply a color transformation to the image.
 */
type ColorLUT = {
    /**
     * - The LUT texture. This must be a 2D "horizontal strip" texture
     * representing an unwrapped 3D LUT (the same format used by Unreal Engine). For an N×N×N 3D LUT,
     * the texture dimensions are N² × N pixels (width × height). For example, a 16×16×16 LUT uses a
     * 256×16 texture, and a 32×32×32 LUT uses a 1024×32 texture. The texture contains N horizontal
     * slices representing the blue channel, with each slice mapping red to the X-axis and green to
     * the Y-axis. Note that HALD LUTs (e.g. from ImageMagick) and Unity LUTs use different layouts
     * and are not compatible. Defaults to null.
     */
    texture: Texture | null;
    /**
     * - The intensity of the color LUT effect. Defaults to 1.
     */
    intensity: number;
};
/**
 * Properties related to the vignette effect, a postprocessing technique that darkens the image
 * edges, creating a gradual falloff in brightness from the center outward. The effect can be also
 * reversed, making the center of the image darker than the edges, by specifying the outer distance
 * smaller than the inner distance.
 */
type Vignette = {
    /**
     * - The intensity of the vignette effect, 0-1 range. Defaults to 0,
     * making it disabled.
     */
    intensity: number;
    /**
     * - The inner distance of the vignette effect measured from the center of
     * the screen, 0-3 range. This is where the vignette effect starts. Value larger than 1 represents
     * the value off screen, which allows more control. Defaults to 0.5, representing half the distance
     * from center.
     */
    inner: number;
    /**
     * - The outer distance of the vignette effect measured from the center of
     * the screen, 0-3 range. This is where the vignette reaches full intensity. Value larger than 1
     * represents the value off screen, which allows more control. Defaults to 1, representing the full
     * screen.
     */
    outer: number;
    /**
     * - The curvature of the vignette effect, 0.01-10 range. The vignette
     * is rendered using a rectangle with rounded corners, and this parameter controls the curvature of
     * the corners. Value of 1 represents a circle. Smaller values make the corners more square, while
     * larger values make them more rounded. Defaults to 0.5.
     */
    curvature: number;
    /**
     * - The color of the vignette effect. Defaults to black.
     */
    color: Color;
};
/**
 * Properties related to the fringing effect, a chromatic aberration phenomenon where the red, green,
 * and blue color channels diverge increasingly with greater distance from the center of the screen.
 */
type Fringing = {
    /**
     * - The intensity of the fringing effect, 0-100 range. Defaults to 0,
     * making it disabled.
     */
    intensity: number;
};
/**
 * Properties related to the color enhancement effect, a postprocessing technique that provides
 * HDR-aware adjustments for shadows, highlights, vibrance, and dehaze. Shadows and highlights allow
 * selective adjustment of dark and bright areas of the image, vibrance is a smart saturation
 * that boosts less-saturated colors more than already-saturated ones, and dehaze removes atmospheric
 * haze to increase clarity and contrast.
 */
type ColorEnhance = {
    /**
     * - Whether color enhancement is enabled. Defaults to false.
     */
    enabled: boolean;
    /**
     * - The shadow adjustment, -3 to 3 range. Uses an exponential curve where
     * -3 gives 0.125x, 0 gives 1x, and +3 gives 8x brightness on dark areas. Defaults to 0.
     */
    shadows: number;
    /**
     * - The highlight adjustment, -3 to 3 range. Uses an exponential curve
     * where -3 gives 0.125x, 0 gives 1x, and +3 gives 8x brightness on bright areas. Defaults to 0.
     */
    highlights: number;
    /**
     * - The vibrance (smart saturation), -1 to 1 range. Positive values boost
     * saturation of less-saturated colors more than already-saturated ones. Negative values desaturate.
     * Defaults to 0.
     */
    vibrance: number;
    /**
     * - The midtone adjustment, -1 to 1 range. Positive values brighten
     * midtones, negative values darken midtones, with shadows and highlights more strongly preserved
     * than by a linear exposure change. Defaults to 0.
     */
    midtones: number;
    /**
     * - The dehaze adjustment, -1 to 1 range. Positive values remove atmospheric
     * haze, increasing clarity and contrast. Negative values add a haze effect. Defaults to 0.
     */
    dehaze: number;
};
/**
 * Properties related to temporal anti-aliasing (TAA), which is a technique used to reduce aliasing
 * in the rendered image by blending multiple frames together over time.
 */
type Taa = {
    /**
     * - Whether TAA is enabled. Defaults to false.
     */
    enabled: boolean;
    /**
     * - The intensity of the camera jitter, 0-1 range. The larger the value,
     * the more jitter is applied to the camera, making the anti-aliasing effect more pronounced. This
     * also makes the image more blurry, and rendering.sharpness parameter can be used to counteract.
     * Defaults to 1.
     */
    jitter: number;
};
/**
 * Properties related to Depth of Field (DOF), a technique used to simulate the optical effect where
 * objects at certain distances appear sharp while others are blurred, enhancing the perception of
 * focus and depth in the rendered scene.
 */
type Dof = {
    /**
     * - Whether DoF is enabled. Defaults to false.
     */
    enabled: boolean;
    /**
     * - Whether the near blur is enabled. Defaults to false.
     */
    nearBlur: boolean;
    /**
     * - The distance at which the focus is set. Defaults to 100.
     */
    focusDistance: number;
    /**
     * - The range around the focus distance where the focus is sharp.
     * Defaults to 10.
     */
    focusRange: number;
    /**
     * - The radius of the blur effect, typically 2-10 range. Defaults to 3.
     */
    blurRadius: number;
    /**
     * - The number of rings in the blur effect, typically 3-8 range. Defaults
     * to 4.
     */
    blurRings: number;
    /**
     * - The number of points in each ring of the blur effect, typically
     * 3-8 range. Defaults to 5.
     */
    blurRingPoints: number;
    /**
     * - Whether the high quality implementation is used. This will have
     * a higher performance cost, but will produce better quality results. Defaults to true.
     */
    highQuality: boolean;
};
/**
 * @import { AppBase } from '../../framework/app-base.js'
 * @import { CameraComponent } from '../../framework/components/camera/component.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * @typedef {Object} Rendering
 * Properties related to scene rendering, encompassing settings that control the rendering resolution,
 * pixel format, multi-sampling for anti-aliasing, tone-mapping and similar.
 * @property {number[]} renderFormats - The preferred render formats of the frame buffer, in order of
 * preference. First format from this list that is supported by the hardware is used. When none of
 * the formats are supported, {@link PIXELFORMAT_RGBA8} is used, but this automatically disables
 * bloom effect, which requires HDR format. The list can contain the following formats:
 * {@link PIXELFORMAT_111110F}, {@link PIXELFORMAT_RGBA16F}, {@link PIXELFORMAT_RGBA32F} and {@link
 * PIXELFORMAT_RGBA8}. Typically the default option should be used, which prefers the faster formats,
 * but if higher dynamic range is needed, the list can be adjusted to prefer higher precision formats.
 * Defaults to [{@link PIXELFORMAT_111110F}, {@link PIXELFORMAT_RGBA16F}, {@link PIXELFORMAT_RGBA32F}].
 * @property {boolean} stencil - Whether the render buffer has a stencil buffer. Defaults to false.
 * @property {number} renderTargetScale - The scale of the render target, 0.1-1 range. This allows the
 * scene to be rendered to a lower resolution render target as an optimization. The post-processing
 * is also applied at this lower resolution. The image is then up-scaled to the full resolution and
 * any UI rendering that follows is applied at the full resolution. Defaults to 1 which represents
 * full resolution rendering.
 * @property {number} samples - The number of samples of the {@link RenderTarget} used for the scene
 * rendering, in 1-4 range. Value of 1 disables multisample anti-aliasing, other values enable
 * anti-aliasing, Typically set to 1 when TAA is used, even though both anti-aliasing options can be
 * used together at a higher cost. Defaults to 1.
 * @property {boolean} sceneColorMap - Whether rendering generates a scene color map. Defaults to false.
 * @property {boolean} sceneDepthMap - Whether rendering generates a scene depth map. Defaults to false.
 * @property {number} toneMapping - The tone mapping. Can be:
 *
 * - {@link TONEMAP_LINEAR}
 * - {@link TONEMAP_FILMIC}
 * - {@link TONEMAP_HEJL}
 * - {@link TONEMAP_ACES}
 * - {@link TONEMAP_ACES2}
 * - {@link TONEMAP_NEUTRAL}
 *
 * Defaults to {@link TONEMAP_LINEAR}.
 * @property {number} sharpness - The sharpening intensity, 0-1 range. This can be used to increase
 * the sharpness of the rendered image. Often used to counteract the blurriness of the TAA effect,
 * but also blurriness caused by rendering to a lower resolution render target by using
 * rendering.renderTargetScale property. Defaults to 0.
 */
/**
 * @typedef {Object} Ssao
 * Properties related to the Screen Space Ambient Occlusion (SSAO) effect, a postprocessing technique
 * that approximates ambient occlusion by calculating how exposed each point in the screen space is
 * to ambient light, enhancing depth perception and adding subtle shadowing in crevices and between
 * objects.
 * @property {string} type - The type of the SSAO determines how it is applied in the rendering
 * process. Defaults to {@link SSAOTYPE_NONE}. Can be:
 *
 * - {@link SSAOTYPE_NONE}
 * - {@link SSAOTYPE_LIGHTING}
 * - {@link SSAOTYPE_COMBINE}
 *
 * @property {boolean} blurEnabled - Whether the SSAO effect is blurred. Defaults to true.
 * @property {boolean} randomize - Whether the SSAO sampling is randomized. Useful when used instead
 * of blur effect together with TAA. Defaults to false.
 * @property {number} intensity - The intensity of the SSAO effect, 0-1 range. Defaults to 0.5.
 * @property {number} radius - The radius of the SSAO effect, 0-100 range. Defaults to 30.
 * @property {number} samples - The number of samples of the SSAO effect, 1-64 range. Defaults to 12.
 * @property {number} power - The power of the SSAO effect, 0.1-10 range. Defaults to 6.
 * @property {number} minAngle - The minimum angle of the SSAO effect, 1-90 range. Defaults to 10.
 * @property {number} scale - The scale of the SSAO effect, 0.5-1 range. Defaults to 1.
 */
/**
 * @typedef {Object} Bloom
 * Properties related to the HDR bloom effect, a postprocessing technique that simulates the natural
 * glow of bright light sources by spreading their intensity beyond their boundaries, creating a soft
 * and realistic blooming effect.
 * @property {number} intensity - The intensity of the bloom effect, 0-0.1 range. Defaults to 0,
 * making it disabled.
 * @property {number} blurLevel - The number of iterations for blurring the bloom effect, with each
 * level doubling the blur size. Once the blur size matches the dimensions of the render target,
 * further blur passes are skipped. The default value is 16.
 */
/**
 * @typedef {Object} Grading
 * Properties related to the color grading effect, a postprocessing technique used to adjust and the
 * visual tone of an image. This effect modifies brightness, contrast, saturation, and overall color
 * balance to achieve a specific aesthetic or mood.
 * @property {boolean} enabled - Whether grading is enabled. Defaults to false.
 * @property {number} brightness - The brightness of the grading effect, 0-3 range. Defaults to 1.
 * @property {number} contrast - The contrast of the grading effect, 0.5-1.5 range. Defaults to 1.
 * @property {number} saturation - The saturation of the grading effect, 0-2 range. Defaults to 1.
 * @property {Color} tint - The tint color of the grading effect. Defaults to white.
 */
/**
 * @typedef {Object} ColorLUT
 * Properties related to the color lookup table (LUT) effect, a postprocessing technique used to
 * apply a color transformation to the image.
 * @property {Texture|null} texture - The LUT texture. This must be a 2D "horizontal strip" texture
 * representing an unwrapped 3D LUT (the same format used by Unreal Engine). For an N×N×N 3D LUT,
 * the texture dimensions are N² × N pixels (width × height). For example, a 16×16×16 LUT uses a
 * 256×16 texture, and a 32×32×32 LUT uses a 1024×32 texture. The texture contains N horizontal
 * slices representing the blue channel, with each slice mapping red to the X-axis and green to
 * the Y-axis. Note that HALD LUTs (e.g. from ImageMagick) and Unity LUTs use different layouts
 * and are not compatible. Defaults to null.
 * @property {number} intensity - The intensity of the color LUT effect. Defaults to 1.
 */
/**
 * @typedef {Object} Vignette
 * Properties related to the vignette effect, a postprocessing technique that darkens the image
 * edges, creating a gradual falloff in brightness from the center outward. The effect can be also
 * reversed, making the center of the image darker than the edges, by specifying the outer distance
 * smaller than the inner distance.
 * @property {number} intensity - The intensity of the vignette effect, 0-1 range. Defaults to 0,
 * making it disabled.
 * @property {number} inner - The inner distance of the vignette effect measured from the center of
 * the screen, 0-3 range. This is where the vignette effect starts. Value larger than 1 represents
 * the value off screen, which allows more control. Defaults to 0.5, representing half the distance
 * from center.
 * @property {number} outer - The outer distance of the vignette effect measured from the center of
 * the screen, 0-3 range. This is where the vignette reaches full intensity. Value larger than 1
 * represents the value off screen, which allows more control. Defaults to 1, representing the full
 * screen.
 * @property {number} curvature - The curvature of the vignette effect, 0.01-10 range. The vignette
 * is rendered using a rectangle with rounded corners, and this parameter controls the curvature of
 * the corners. Value of 1 represents a circle. Smaller values make the corners more square, while
 * larger values make them more rounded. Defaults to 0.5.
 * @property {Color} color - The color of the vignette effect. Defaults to black.
 */
/**
 * @typedef {Object} Fringing
 * Properties related to the fringing effect, a chromatic aberration phenomenon where the red, green,
 * and blue color channels diverge increasingly with greater distance from the center of the screen.
 * @property {number} intensity - The intensity of the fringing effect, 0-100 range. Defaults to 0,
 * making it disabled.
 */
/**
 * @typedef {Object} ColorEnhance
 * Properties related to the color enhancement effect, a postprocessing technique that provides
 * HDR-aware adjustments for shadows, highlights, vibrance, and dehaze. Shadows and highlights allow
 * selective adjustment of dark and bright areas of the image, vibrance is a smart saturation
 * that boosts less-saturated colors more than already-saturated ones, and dehaze removes atmospheric
 * haze to increase clarity and contrast.
 * @property {boolean} enabled - Whether color enhancement is enabled. Defaults to false.
 * @property {number} shadows - The shadow adjustment, -3 to 3 range. Uses an exponential curve where
 * -3 gives 0.125x, 0 gives 1x, and +3 gives 8x brightness on dark areas. Defaults to 0.
 * @property {number} highlights - The highlight adjustment, -3 to 3 range. Uses an exponential curve
 * where -3 gives 0.125x, 0 gives 1x, and +3 gives 8x brightness on bright areas. Defaults to 0.
 * @property {number} vibrance - The vibrance (smart saturation), -1 to 1 range. Positive values boost
 * saturation of less-saturated colors more than already-saturated ones. Negative values desaturate.
 * Defaults to 0.
 * @property {number} midtones - The midtone adjustment, -1 to 1 range. Positive values brighten
 * midtones, negative values darken midtones, with shadows and highlights more strongly preserved
 * than by a linear exposure change. Defaults to 0.
 * @property {number} dehaze - The dehaze adjustment, -1 to 1 range. Positive values remove atmospheric
 * haze, increasing clarity and contrast. Negative values add a haze effect. Defaults to 0.
 */
/**
 * @typedef {Object} Taa
 * Properties related to temporal anti-aliasing (TAA), which is a technique used to reduce aliasing
 * in the rendered image by blending multiple frames together over time.
 * @property {boolean} enabled - Whether TAA is enabled. Defaults to false.
 * @property {number} jitter - The intensity of the camera jitter, 0-1 range. The larger the value,
 * the more jitter is applied to the camera, making the anti-aliasing effect more pronounced. This
 * also makes the image more blurry, and rendering.sharpness parameter can be used to counteract.
 * Defaults to 1.
 */
/**
 * @typedef {Object} Dof
 * Properties related to Depth of Field (DOF), a technique used to simulate the optical effect where
 * objects at certain distances appear sharp while others are blurred, enhancing the perception of
 * focus and depth in the rendered scene.
 * @property {boolean} enabled - Whether DoF is enabled. Defaults to false.
 * @property {boolean} nearBlur - Whether the near blur is enabled. Defaults to false.
 * @property {number} focusDistance - The distance at which the focus is set. Defaults to 100.
 * @property {number} focusRange - The range around the focus distance where the focus is sharp.
 * Defaults to 10.
 * @property {number} blurRadius - The radius of the blur effect, typically 2-10 range. Defaults to 3.
 * @property {number} blurRings - The number of rings in the blur effect, typically 3-8 range. Defaults
 * to 4.
 * @property {number} blurRingPoints - The number of points in each ring of the blur effect, typically
 * 3-8 range. Defaults to 5.
 * @property {boolean} highQuality - Whether the high quality implementation is used. This will have
 * a higher performance cost, but will produce better quality results. Defaults to true.
 */
/**
 * Implementation of a simple to use camera rendering pass, which supports SSAO, Bloom and
 * other rendering effects.
 *
 * Overriding compose shader chunks:
 * The final compose pass registers its shader chunks in a way that does not override any chunks
 * that were already provided. To customize the compose pass output, set your shader chunks on the
 * {@link ShaderChunks} map before creating the `CameraFrame`. Those chunks will be picked up by
 * the compose pass and preserved.
 *
 * Example (GLSL):
 *
 * @example
 * // Provide custom compose chunk(s) before constructing CameraFrame
 * ShaderChunks.get(graphicsDevice, SHADERLANGUAGE_GLSL).set('composeVignettePS', `
 *     #ifdef VIGNETTE
 *         vec3 applyVignette(vec3 color, vec2 uv) {
 *             return color * uv.u;
 *         }
 *     #endif
 * `);
 *
 * // For WebGPU, use SHADERLANGUAGE_WGSL instead.
 *
 * @category Graphics
 */
declare class CameraFrame {
    /**
     * Creates a new CameraFrame instance.
     *
     * @param {AppBase} app - The application.
     * @param {CameraComponent} cameraComponent - The camera component.
     */
    constructor(app: AppBase, cameraComponent: CameraComponent);
    /** @private */
    private _enabled;
    /**
     * Rendering settings.
     *
     * @type {Rendering}
     */
    rendering: Rendering;
    /**
     * SSAO settings.
     *
     * @type {Ssao}
     */
    ssao: Ssao;
    /**
     * Bloom settings.
     *
     * @type {Bloom}
     */
    bloom: Bloom;
    /**
     * Grading settings.
     *
     * @type {Grading}
     */
    grading: Grading;
    /**
     * Color LUT settings.
     *
     * @type {ColorLUT}
     */
    colorLUT: ColorLUT;
    /**
     * Vignette settings.
     *
     * @type {Vignette}
     */
    vignette: Vignette;
    /**
     * Taa settings.
     *
     * @type {Taa}
     */
    taa: Taa;
    /**
     * Fringing settings.
     *
     * @type {Fringing}
     */
    fringing: Fringing;
    /**
     * Color enhancement settings.
     *
     * @type {ColorEnhance}
     */
    colorEnhance: ColorEnhance;
    /**
     * DoF settings.
     *
     * @type {Dof}
     */
    dof: Dof;
    /**
     * Debug rendering. Set to null to disable.
     *
     * @type {null|'scene'|'ssao'|'bloom'|'vignette'|'dofcoc'|'dofblur'}
     */
    debug: null | "scene" | "ssao" | "bloom" | "vignette" | "dofcoc" | "dofblur";
    options: CameraFrameOptions;
    /**
     * @type {FramePassCameraFrame|null}
     * @private
     */
    private renderPassCamera;
    app: AppBase;
    cameraComponent: CameraComponent;
    cameraLayersChanged: EventHandle;
    /**
     * Destroys the camera frame, removing all render passes.
     */
    destroy(): void;
    enable(): void;
    disable(): void;
    /**
     * Creates a frame pass for the camera frame. Override this method to utilize a custom frame
     * pass, typically one that extends {@link FramePassCameraFrame}.
     *
     * @returns {FramePassCameraFrame} - The frame pass.
     */
    createRenderPass(): FramePassCameraFrame;
    /**
     * Sets the enabled state of the camera frame. Passing false will release associated resources.
     *
     * @type {boolean}
     */
    set enabled(value: boolean);
    /**
     * Gets the enabled state of the camera frame.
     *
     * @type {boolean}
     */
    get enabled(): boolean;
    updateOptions(): void;
    /**
     * Applies any changes made to the properties of this instance.
     */
    update(): void;
}

/**
 * Represents a pose in 3D space, including position and rotation.
 *
 * @category Input
 * @alpha
 */
declare class Pose {
    /**
     * Creates a new Pose instance.
     *
     * @param {Vec3} [position] - The position of the pose.
     * @param {Vec3} [angles] - The angles of the pose in degrees.
     * @param {number} [distance] - The focus distance from the position to the pose.
     */
    constructor(position?: Vec3, angles?: Vec3, distance?: number);
    /**
     * The position of the pose.
     *
     * @type {Vec3}
     */
    position: Vec3;
    /**
     * The angles of the pose in degrees calculated from the forward vector.
     *
     * @type {Vec3}
     */
    angles: Vec3;
    /**
     * The focus distance from the position to the pose.
     *
     * @type {number}
     */
    distance: number;
    /**
     * @type {Vec2}
     */
    pitchRange: Vec2;
    /**
     * @type {Vec2}
     */
    yawRange: Vec2;
    /**
     * @type {Vec2}
     */
    xRange: Vec2;
    /**
     * @type {Vec2}
     */
    yRange: Vec2;
    /**
     * @type {Vec2}
     */
    zRange: Vec2;
    /**
     * Copies the position and rotation from another pose.
     *
     * @param {Pose} other - The pose to copy from.
     * @returns {Pose} The updated Pose instance.
     */
    copy(other: Pose): Pose;
    /**
     * Creates a clone of this pose.
     *
     * @returns {Pose} A new Pose instance with the same position, angles, and distance.
     */
    clone(): Pose;
    /**
     * Checks if this pose is approximately equal to another pose within a given epsilon.
     *
     * @param {Pose} other - The pose to compare with.
     * @param {number} [epsilon] - The tolerance for comparison.
     * @returns {boolean} True if the poses are approximately equal, false otherwise.
     */
    equalsApprox(other: Pose, epsilon?: number): boolean;
    /**
     * Lerps between two poses based on the given alpha values.
     *
     * @param {Pose} lhs - The left-hand side pose.
     * @param {Pose} rhs - The right-hand side pose.
     * @param {number} alpha1 - The alpha value for position interpolation.
     * @param {number} [alpha2] - The alpha value for angles interpolation.
     * @param {number} [alpha3] - The alpha value for distance interpolation.
     * @returns {Pose} The updated Pose instance.
     */
    lerp(lhs: Pose, rhs: Pose, alpha1: number, alpha2?: number, alpha3?: number): Pose;
    /**
     * Moves the pose by the given vector.
     *
     * @param {Vec3} offset - The vector to move by.
     * @returns {Pose} The updated Pose instance.
     */
    move(offset: Vec3): Pose;
    /**
     * Rotates the pose by the given angles in degrees.
     *
     * @param {Vec3} euler - The angles to rotate by.
     * @returns {Pose} The updated Pose instance.
     */
    rotate(euler: Vec3): Pose;
    /**
     * Sets the position and rotation of the pose.
     *
     * @param {Vec3} position - The new position.
     * @param {Vec3} angles - The new angles in degrees.
     * @param {number} distance - The new focus distance.
     * @returns {Pose} The updated Pose instance.
     */
    set(position: Vec3, angles: Vec3, distance: number): Pose;
    /**
     * Sets the pose to look in the direction of the given vector.
     *
     * @param {Vec3} from - The point from which to look.
     * @param {Vec3} to - The point to look at.
     * @returns {Pose} The updated Pose instance.
     */
    look(from: Vec3, to: Vec3): Pose;
    /**
     * Gets the focus point of the pose, which is the position plus the forward vector scaled by the distance.
     *
     * @param {Vec3} [out] - The output vector to store the focus point.
     * @returns {Vec3} The focus point of the pose.
     */
    getFocus(out?: Vec3): Vec3;
}

/** @import { HandleEventCallback } from '../../core/event-handler.js' */
/**
 * Represents an input delta.
 *
 * @category Input Source
 * @alpha
 */
declare class InputDelta {
    /**
     * @param {number | number[]} arg - The size of the delta or an array of initial values.
     */
    constructor(arg: number | number[]);
    /**
     * @type {number[]}
     * @private
     */
    private _value;
    /**
     * Adds another InputDelta instance to this one.
     *
     * @param {InputDelta} other - The other InputDelta instance to add.
     * @returns {InputDelta} Self for chaining.
     */
    add(other: InputDelta): InputDelta;
    /**
     * Appends offsets to the current delta values.
     *
     * @param {number[]} offsets - The offsets.
     * @returns {InputDelta} Self for chaining.
     */
    append(offsets: number[]): InputDelta;
    /**
     * Copies the values from another InputDelta instance to this one.
     *
     * @param {InputDelta} other - The other InputDelta instance to copy from.
     * @returns {InputDelta} Self for chaining.
     */
    copy(other: InputDelta): InputDelta;
    /**
     * The magnitude of the delta, calculated as the square root of the sum of squares
     * of the values.
     *
     * @returns {number} - The magnitude of the delta.
     */
    length(): number;
    /**
     * Returns the current value of the delta and resets it to zero.
     *
     * @returns {number[]} - The current value of the delta.
     */
    read(): number[];
}
/**
 * Represents an input frame, which contains a map of input deltas.
 *
 * @category Input Source
 * @alpha
 *
 * @template {Record<string, number[]>} T - The shape of the input frame.
 */
declare class InputFrame<T extends Record<string, number[]>> {
    /**
     * @param {T} data - The input frame data, where each key corresponds to an input delta.
     */
    constructor(data: T);
    /**
     * @type {{ [K in keyof T]: InputDelta }}
     */
    deltas: { [K in keyof T]: InputDelta; };
    /**
     * Returns the current frame state and resets the deltas to zero.
     *
     * @returns {{ [K in keyof T]: number[] }} - The flushed input frame with current deltas.
     */
    read(): { [K in keyof T]: number[]; };
}
/**
 * The base class for all input devices.
 *
 * @category Input Source
 * @alpha
 *
 * @template {Record<string, number[]>} T - The shape of the input source.
 * @augments {InputFrame<T>}
 */
declare class InputSource<T extends Record<string, number[]>> extends InputFrame<T> {
    /**
     * @type {HTMLElement | null}
     * @protected
     */
    protected _element: HTMLElement | null;
    /**
     * @type {EventHandler}
     * @private
     */
    private _events;
    /**
     * Adds an event listener for the specified event.
     *
     * @param {string} event - The event name to listen for.
     * @param {HandleEventCallback} callback - The callback function to execute when the event is
     * triggered.
     */
    on(event: string, callback: HandleEventCallback): void;
    /**
     * Removes an event listener for the specified event.
     *
     * @param {string} event - The event name to stop listening for.
     * @param {HandleEventCallback} callback - The callback function to remove.
     */
    off(event: string, callback: HandleEventCallback): void;
    /**
     * Fires an event with the given name and arguments.
     *
     * @param {string} event - The event name to fire.
     * @param {...any} args - The arguments to pass to the event listeners.
     */
    fire(event: string, ...args: any[]): void;
    /**
     * @param {HTMLElement} element - The element.
     */
    attach(element: HTMLElement): void;
    detach(): void;
    destroy(): void;
}
/**
 * The base class for all input consumers, which are used to process input frames.
 *
 * @category Input Consumer
 * @alpha
 */
declare class InputConsumer {
    /**
     * @param {InputFrame} frame - The input frame.
     * @param {number} dt - The delta time.
     * @returns {any} - The controller pose.
     */
    update(frame: InputFrame<any>, dt: number): any;
}
/**
 * The base class for all input controllers.
 *
 * @category Input Consumer
 * @alpha
 */
declare class InputController extends InputConsumer {
    /**
     * @type {Pose}
     * @protected
     */
    protected _pose: Pose;
    /**
     * @param {Pose} pose - The initial pose of the controller.
     * @param {boolean} [smooth] - Whether to smooth the transition.
     */
    attach(pose: Pose, smooth?: boolean): void;
    detach(): void;
    /**
     * @param {InputFrame} frame - The input frame.
     * @param {number} dt - The delta time.
     * @returns {Pose} - The controller pose.
     * @override
     */
    override update(frame: InputFrame<any>, dt: number): Pose;
    destroy(): void;
}

declare class VirtualJoystick {
    /**
     * @param {object} options - The options.
     * @param {number} [options.range] - The inner max distance of the joystick.
     */
    constructor({ range }?: {
        range?: number;
    });
    /**
     * @type {number}
     * @private
     */
    private _range;
    /**
     * @type {Vec2}
     * @private
     */
    private _position;
    /**
     * @type {Vec2}
     * @private
     */
    private _value;
    /**
     * The vector value of the joystick, normalized to the range of -1 to 1.
     *
     * @type {Vec2}
     */
    get value(): Vec2;
    /**
     * @param {number} x - The x position.
     * @param {number} y - The y position.
     * @returns {number[]} - An array containing the base and stick positions.
     */
    down(x: number, y: number): number[];
    /**
     * @param {number} x - The x position of the stick
     * @param {number} y - The y position of the stick
     * @returns {number[]} - An array containing the base and stick positions.
     */
    move(x: number, y: number): number[];
    /**
     * Resets the joystick to its initial state.
     *
     * @returns {number[]} - An array containing the base and stick positions, both set to -1.
     */
    up(): number[];
}

/**
 * Single gesture input source.
 */
type SingleGestureSourceDeltas = {
    /**
     * - The input deltas, represented as an array of [x, y] coordinates.
     */
    input: number[];
    /**
     * - The double tap delta.
     */
    doubleTap: number[];
};
/**
 * Single gesture input source.
 *
 * @category Input Source
 * @alpha
 *
 * @typedef {object} SingleGestureSourceDeltas
 * @property {number[]} input - The input deltas, represented as an array of [x, y] coordinates.
 * @property {number[]} doubleTap - The double tap delta.
 * @augments {InputSource<SingleGestureSourceDeltas>}
 */
declare class SingleGestureSource extends InputSource<SingleGestureSourceDeltas> {
    constructor();
    /**
     * @type {ReturnType<typeof movementState>}
     * @private
     */
    private _movementState;
    /**
     * @type {'joystick' | 'touch'}
     * @private
     */
    private _layout;
    /**
     * @type {Map<number, { x: number, y: number }>}
     * @private
     */
    private _pointerData;
    /**
     * @type {{ x: number, y: number, time: number }}
     * @private
     */
    private _lastPointer;
    /**
     * @type {VirtualJoystick}
     * @private
     */
    private _joystick;
    /**
     * @private
     * @param {PointerEvent} event - The pointer event.
     */
    private _onPointerDown;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerMove;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerUp;
    /**
     * The layout of the single touch input source. The layout can be one of the following:
     *
     * - `joystick`: A virtual joystick.
     * - `touch`: A touch.
     *
     * Default is `joystick`.
     *
     * @type {'joystick' | 'touch'}
     */
    set layout(value: "touch" | "joystick");
    get layout(): "touch" | "joystick";
    get joystick(): VirtualJoystick;
}

/**
 * Dual gesture input source.
 */
type DualGestureSourceDeltas = {
    /**
     * - The left input deltas.
     */
    leftInput: number[];
    /**
     * - The right input deltas.
     */
    rightInput: number[];
    /**
     * - The double tap delta.
     */
    doubleTap: number[];
};
/**
 * Dual gesture input source.
 *
 * @category Input Source
 * @alpha
 *
 * @typedef {object} DualGestureSourceDeltas
 * @property {number[]} leftInput - The left input deltas.
 * @property {number[]} rightInput - The right input deltas.
 * @property {number[]} doubleTap - The double tap delta.
 * @augments {InputSource<DualGestureSourceDeltas>}
 */
declare class DualGestureSource extends InputSource<DualGestureSourceDeltas> {
    /**
     * @param {`${'joystick' | 'touch'}-${'joystick' | 'touch'}`} [layout] - The layout of the dual
     * gesture source.
     */
    constructor(layout?: `${"joystick" | "touch"}-${"joystick" | "touch"}`);
    /**
     * @type {ReturnType<typeof movementState>}
     * @private
     */
    private _movementState;
    /**
     * @type {`${'joystick' | 'touch'}-${'joystick' | 'touch'}`}
     * @private
     */
    private _layout;
    /**
     * @type {Map<number, { x: number, y: number, left: boolean }>}
     * @private
     */
    private _pointerData;
    /**
     * @type {{ x: number, y: number, time: number }}
     * @private
     */
    private _lastPointer;
    /**
     * @type {VirtualJoystick}
     * @private
     */
    private _leftJoystick;
    /**
     * @type {VirtualJoystick}
     * @private
     */
    private _rightJoystick;
    /**
     * @type {`${'joystick' | 'touch'}-${'joystick' | 'touch'}`}
     */
    set layout(value: "touch-touch" | "touch-joystick" | "joystick-touch" | "joystick-joystick");
    get layout(): "touch-touch" | "touch-joystick" | "joystick-touch" | "joystick-joystick";
    /**
     * @private
     * @param {PointerEvent} event - The pointer event.
     */
    private _onPointerDown;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerMove;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerUp;
    get leftJoystick(): VirtualJoystick;
    get rightJoystick(): VirtualJoystick;
}

/**
 * Multi-touch input source class
 */
type MultiTouchSourceDeltas = {
    /**
     * - The touch deltas, represented as an array of [x, y] coordinates.
     */
    touch: number[];
    /**
     * - The count deltas, represented as an array of integers.
     */
    count: number[];
    /**
     * - The pinch deltas, represented as an array of integers.
     */
    pinch: number[];
};
/**
 * Multi-touch input source class
 *
 * @category Input Source
 * @alpha
 *
 * @typedef {object} MultiTouchSourceDeltas
 * @property {number[]} touch - The touch deltas, represented as an array of [x, y] coordinates.
 * @property {number[]} count - The count deltas, represented as an array of integers.
 * @property {number[]} pinch - The pinch deltas, represented as an array of integers.
 * @augments {InputSource<MultiTouchSourceDeltas>}
 */
declare class MultiTouchSource extends InputSource<MultiTouchSourceDeltas> {
    constructor();
    /**
     * @type {ReturnType<typeof movementState>}
     * @private
     */
    private _movementState;
    /**
     * @type {Map<number, PointerEvent>}
     * @private
     */
    private _pointerEvents;
    /**
     * @type {Vec2}
     * @private
     */
    private _pointerPos;
    /**
     * @type {number}
     * @private
     */
    private _pinchDist;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerDown;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerMove;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerUp;
    /**
     * @param {MouseEvent} event - The mouse event.
     * @private
     */
    private _onContextMenu;
    /**
     * @param {Vec2} out - The output vector.
     * @returns {Vec2} The mid point.
     * @private
     */
    private _getMidPoint;
    /**
     * @returns {number} The pinch distance.
     * @private
     */
    private _getPinchDist;
}

/**
 * Keyboard and mouse input source class
 */
type KeyboardMouseSourceDeltas = {
    /**
     * - The key deltas.
     */
    key: number[];
    /**
     * - The button deltas.
     */
    button: number[];
    /**
     * - The mouse deltas.
     */
    mouse: number[];
    /**
     * - The wheel deltas.
     */
    wheel: number[];
};
/**
 * Keyboard and mouse input source class
 *
 * @category Input Source
 * @alpha
 *
 * @typedef {object} KeyboardMouseSourceDeltas
 * @property {number[]} key - The key deltas.
 * @property {number[]} button - The button deltas.
 * @property {number[]} mouse - The mouse deltas.
 * @property {number[]} wheel - The wheel deltas.
 * @augments {InputSource<KeyboardMouseSourceDeltas>}
 */
declare class KeyboardMouseSource extends InputSource<KeyboardMouseSourceDeltas> {
    /**
     * The key codes for the keyboard keys.
     *
     * @readonly
     */
    static readonly keyCode: {
        readonly A: 0;
        readonly B: 1;
        readonly C: 2;
        readonly D: 3;
        readonly E: 4;
        readonly F: 5;
        readonly G: 6;
        readonly H: 7;
        readonly I: 8;
        readonly J: 9;
        readonly K: 10;
        readonly L: 11;
        readonly M: 12;
        readonly N: 13;
        readonly O: 14;
        readonly P: 15;
        readonly Q: 16;
        readonly R: 17;
        readonly S: 18;
        readonly T: 19;
        readonly U: 20;
        readonly V: 21;
        readonly W: 22;
        readonly X: 23;
        readonly Y: 24;
        readonly Z: 25;
        readonly '0': 26;
        readonly '1': 27;
        readonly '2': 28;
        readonly '3': 29;
        readonly '4': 30;
        readonly '5': 31;
        readonly '6': 32;
        readonly '7': 33;
        readonly '8': 34;
        readonly '9': 35;
        readonly UP: 36;
        readonly DOWN: 37;
        readonly LEFT: 38;
        readonly RIGHT: 39;
        readonly SPACE: 40;
        readonly SHIFT: 41;
        readonly CTRL: 42;
    };
    /**
     * @param {object} [options] - The options.
     * @param {boolean} [options.pointerLock] - Whether to enable pointer lock.
     */
    constructor({ pointerLock }?: {
        pointerLock?: boolean;
    });
    /**
     * @type {ReturnType<typeof movementState>}
     * @private
     */
    private _movementState;
    /**
     * @type {number}
     * @private
     */
    private _pointerId;
    /**
     * @type {boolean}
     * @private
     */
    private _pointerLock;
    /**
     * @type {Map<string, number>}
     * @private
     */
    private _keyMap;
    /**
     * @type {number[]}
     * @private
     */
    private _keyPrev;
    /**
     * @type {number[]}
     * @private
     */
    private _keyNow;
    /**
     * @type {number[]}
     */
    _button: number[];
    /**
     * @param {WheelEvent} event - The wheel event.
     * @private
     */
    private _onWheel;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerDown;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerMove;
    /**
     * @param {PointerEvent} event - The pointer event.
     * @private
     */
    private _onPointerUp;
    /**
     * @param {MouseEvent} event - The mouse event.
     * @private
     */
    private _onContextMenu;
    /**
     * @param {KeyboardEvent} event - The keyboard event.
     * @private
     */
    private _onKeyDown;
    /**
     * @param {KeyboardEvent} event - The keyboard event.
     * @private
     */
    private _onKeyUp;
    /**
     * @private
     */
    private _clearButtons;
    /**
     * @param {string} code - The code.
     * @param {number} value - The value.
     * @private
     */
    private _setKey;
}

/**
 * Game pad input source class
 */
type GamepadSourceDeltas = {
    /**
     * - The button deltas, represented as an array of button states (0 or 1).
     */
    buttons: number[];
    /**
     * - The left stick deltas, represented as an array of [x, y] coordinates.
     */
    leftStick: number[];
    /**
     * - The right stick deltas, represented as an array of [x, y] coordinates.
     */
    rightStick: number[];
};
/**
 * Game pad input source class
 *
 * @category Input Source
 * @alpha
 *
 * @typedef {object} GamepadSourceDeltas
 * @property {number[]} buttons - The button deltas, represented as an array of button states (0 or 1).
 * @property {number[]} leftStick - The left stick deltas, represented as an array of [x, y] coordinates.
 * @property {number[]} rightStick - The right stick deltas, represented as an array of [x, y] coordinates.
 * @augments {InputSource<GamepadSourceDeltas>}
 */
declare class GamepadSource extends InputSource<GamepadSourceDeltas> {
    /**
     * The button codes (based on Xbox controller layout).
     *
     * @readonly
     */
    static readonly buttonCode: {
        readonly A: 0;
        readonly B: 1;
        readonly X: 2;
        readonly Y: 3;
        readonly LB: 4;
        readonly RB: 5;
        readonly LT: 6;
        readonly RT: 7;
        readonly SELECT: 8;
        readonly START: 9;
        readonly LEFT_STICK: 10;
        readonly RIGHT_STICK: 11;
    };
    constructor();
    /**
     * @type {number[]}
     * @private
     */
    private _buttonPrev;
}

/**
 * The fly controller.
 *
 * @category Input Controller
 * @alpha
 */
declare class FlyController extends InputController {
    /**
     * @type {Pose}
     * @private
     */
    private _targetPose;
    /**
     * The rotation damping. In the range 0 to 1, where a value of 0 means no damping and 1 means
     * full damping. Default is 0.98.
     *
     * @type {number}
     */
    rotateDamping: number;
    /**
     * The movement damping. In the range 0 to 1, where a value of 0 means no damping and 1 means
     * full damping. Default is 0.98.
     *
     * @type {number}
     */
    moveDamping: number;
    set pitchRange(value: Vec2);
    get pitchRange(): Vec2;
    set yawRange(value: Vec2);
    get yawRange(): Vec2;
    /**
     * @param {InputFrame<{ move: number[], rotate: number[] }>} frame - The input frame.
     * @param {number} dt - The delta time.
     * @returns {Pose} - The controller pose.
     */
    update(frame: InputFrame<{
        move: number[];
        rotate: number[];
    }>, dt: number): Pose;
}

/**
 * The orbit controller.
 *
 * @category Input Controller
 * @alpha
 */
declare class OrbitController extends InputController {
    /**
     * @type {Pose}
     * @private
     */
    private _targetRootPose;
    /**
     * @type {Pose}
     * @private
     */
    private _rootPose;
    /**
     * @type {Pose}
     * @private
     */
    private _targetChildPose;
    /**
     * @type {Pose}
     * @private
     */
    private _childPose;
    /**
     * The rotation damping. In the range 0 to 1, where a value of 0 means no damping and 1 means
     * full damping. Default is 0.98.
     *
     * @type {number}
     */
    rotateDamping: number;
    /**
     * The movement damping. In the range 0 to 1, where a value of 0 means no damping and 1 means
     * full damping. Default is 0.98.
     *
     * @type {number}
     */
    moveDamping: number;
    /**
     * The zoom damping. A higher value means more damping. A value of 0 means no damping.
     *
     * @type {number}
     */
    zoomDamping: number;
    set pitchRange(range: Vec2);
    get pitchRange(): Vec2;
    set yawRange(range: Vec2);
    get yawRange(): Vec2;
    set zoomRange(range: Vec2);
    get zoomRange(): Vec2;
    /**
     * @param {InputFrame<{ move: number[], rotate: number[] }>} frame - The input frame.
     * @param {number} dt - The delta time.
     * @returns {Pose} - The controller pose.
     */
    update(frame: InputFrame<{
        move: number[];
        rotate: number[];
    }>, dt: number): Pose;
}

/**
 * The focus controller.
 *
 * @category Input Controller
 * @alpha
 */
declare class FocusController extends InputController {
    /**
     * @type {Pose}
     * @private
     */
    private _targetRootPose;
    /**
     * @type {Pose}
     * @private
     */
    private _rootPose;
    /**
     * @type {Pose}
     * @private
     */
    private _targetChildPose;
    /**
     * @type {Pose}
     * @private
     */
    private _childPose;
    /**
     * The focus damping. In the range 0 to 1, where a value of 0 means no damping and 1 means
     * full damping. Default is 0.98.
     *
     * @type {number}
     */
    focusDamping: number;
    complete(): boolean;
    /**
     * @param {InputFrame<{ move: number[], rotate: number[] }>} frame - The input frame.
     * @param {number} dt - The delta time.
     * @returns {Pose} - The controller pose.
     */
    update(frame: InputFrame<{
        move: number[];
        rotate: number[];
    }>, dt: number): Pose;
}

/**
 * The gizmo space defines the coordinate system in which the gizmo operates. This can be one of the
 * following:
 *
 * - 'local': The local coordinate space
 * - 'world': The world coordinate space
 *
 * @typedef {'local' | 'world'} GizmoSpace
 */
/**
 * The gizmo axis defines the direction in which the gizmo operates. This can be one of the
 * following:
 *
 * - 'x': The X axis
 * - 'y': The Y axis
 * - 'z': The Z axis
 * - 'yz': The YZ plane
 * - 'xz': The XZ plane
 * - 'xy': The XY plane
 * - 'xyz': The XYZ space
 * - 'f': The axis facing the camera
 *
 * @typedef {'x' | 'y' | 'z' | 'yz' | 'xz' | 'xy' | 'xyz' | 'f'} GizmoAxis
 */
/**
 * The gizmo drag mode defines how the gizmo is rendered while being dragged. This can be one of the
 * following:
 *
 * - 'show': always show the shapes
 * - 'hide': hide the shapes when dragging
 * - 'selected': show only the axis shapes for the affected axes
 *
 * @typedef {'show' | 'hide' | 'selected'} GizmoDragMode
 */
/**
 * Local coordinate space.
 *
 * @category Gizmo
 * @deprecated Use the literal 'local' instead - {@link GizmoSpace}
 * @ignore
 */
declare const GIZMOSPACE_LOCAL: "local";
/**
 * World coordinate space.
 *
 * @category Gizmo
 * @deprecated Use the literal 'world' instead - {@link GizmoSpace}
 * @ignore
 */
declare const GIZMOSPACE_WORLD: "world";
/**
 * Gizmo axis for the line X.
 *
 * @category Gizmo
 * @deprecated Use the literal 'x' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_X: "x";
/**
 * Gizmo axis for the line Y.
 *
 * @category Gizmo
 * @deprecated Use the literal 'y' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_Y: "y";
/**
 * Gizmo axis for the line Z.
 *
 * @category Gizmo
 * @deprecated Use the literal 'z' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_Z: "z";
/**
 * Gizmo axis for the plane YZ.
 *
 * @category Gizmo
 * @deprecated Use the literal 'yz' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_YZ: "yz";
/**
 * Gizmo axis for the plane XZ.
 *
 * @category Gizmo
 * @deprecated Use the literal 'xz' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_XZ: "xz";
/**
 * Gizmo axis for the plane XY.
 *
 * @category Gizmo
 * @deprecated Use the literal 'xy' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_XY: "xy";
/**
 * Gizmo axis for all directions XYZ.
 *
 * @category Gizmo
 * @deprecated Use the literal 'xyz' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_XYZ: "xyz";
/**
 * Gizmo axis for facing the camera (facing the camera).
 *
 * @category Gizmo
 * @deprecated Use the literal 'f' instead - {@link GizmoAxis}.
 * @ignore
 */
declare const GIZMOAXIS_FACE: "face";
/**
 * The gizmo space defines the coordinate system in which the gizmo operates. This can be one of the
 * following:
 *
 * - 'local': The local coordinate space
 * - 'world': The world coordinate space
 */
type GizmoSpace = "local" | "world";
/**
 * The gizmo axis defines the direction in which the gizmo operates. This can be one of the
 * following:
 *
 * - 'x': The X axis
 * - 'y': The Y axis
 * - 'z': The Z axis
 * - 'yz': The YZ plane
 * - 'xz': The XZ plane
 * - 'xy': The XY plane
 * - 'xyz': The XYZ space
 * - 'f': The axis facing the camera
 */
type GizmoAxis = "x" | "y" | "z" | "yz" | "xz" | "xy" | "xyz" | "f";
/**
 * The gizmo drag mode defines how the gizmo is rendered while being dragged. This can be one of the
 * following:
 *
 * - 'show': always show the shapes
 * - 'hide': hide the shapes when dragging
 * - 'selected': show only the axis shapes for the affected axes
 */
type GizmoDragMode = "show" | "hide" | "selected";

/**
 * A triangle defined by three {@link Vec3} vectors.
 *
 * @category Math
 */
declare class Tri {
    /**
     * Creates a new Tri object.
     *
     * @param {Vec3} [v0] - The first 3-dimensional vector.
     * @param {Vec3} [v1] - The second 3-dimensional vector.
     * @param {Vec3} [v2] - The third 3-dimensional vector.
     * @example
     * const v0 = new pc.Vec3(1, 0, 0);
     * const v1 = new pc.Vec3(0, 1, 0);
     * const v2 = new pc.Vec3(2, 2, 1);
     * const t = new pc.Tri(v0, v1, v2);
     */
    constructor(v0?: Vec3, v1?: Vec3, v2?: Vec3);
    /**
     * The first 3-dimensional vector of the triangle.
     *
     * @type {Vec3}
     * @readonly
     */
    readonly v0: Vec3;
    /**
     * The second 3-dimensional vector of the triangle.
     *
     * @type {Vec3}
     * @readonly
     */
    readonly v1: Vec3;
    /**
     * The third 3-dimensional vector of the triangle.
     *
     * @type {Vec3}
     * @readonly
     */
    readonly v2: Vec3;
    /**
     * Sets the specified triangle to the supplied 3-dimensional vectors.
     *
     * @param {Vec3} v0 - The value set on the first 3-dimensional vector of the triangle.
     * @param {Vec3} v1 - The value set on the second 3-dimensional vector of the triangle.
     * @param {Vec3} v2 - The value set on the third 3-dimensional vector of the triangle.
     * @returns {Tri} Self for chaining.
     * @example
     * const t = new pc.Tri(pc.Vec3.UP, pc.Vec3.RIGHT, pc.Vec3.BACK);
     * const v0 = new pc.Vec3(1, 0, 0);
     * const v1 = new pc.Vec3(0, 1, 0);
     * const v2 = new pc.Vec3(2, 2, 1);
     * t.set(v0, v1, v2);
     *
     * // Outputs [[1, 0, 0], [0, 1, 0], [2, 2, 1]]
     * console.log("The result of the triangle set is: " + t.toString());
     */
    set(v0: Vec3, v1: Vec3, v2: Vec3): Tri;
    /**
     * Test if a ray intersects with the triangle.
     *
     * @param {Ray} ray - Ray to test against (direction must be normalized).
     * @param {Vec3} [point] - If there is an intersection, the intersection point will be copied
     * into here.
     * @returns {boolean} True if there is an intersection.
     */
    intersectsRay(ray: Ray, point?: Vec3): boolean;
    /**
     * Converts the specified triangle to string form.
     *
     * @returns {string} The triangle in string form.
     * @example
     * const t = new pc.Tri(pc.Vec3.UP, pc.Vec3.RIGHT, pc.Vec3.BACK);
     * // Outputs [[0, 1, 0], [1, 0, 0], [0, 0, 1]]
     * console.log(t.toString());
     */
    toString(): string;
}

/**
 * The class for holding triangle data.
 *
 * @ignore
 */
declare class TriData {
    /**
     * @param {Geometry} geometry - The geometry to create the triangle data from.
     * @param {number} [priority] - The priority of the triangle data.
     */
    constructor(geometry: Geometry, priority?: number);
    /**
     * The priority of the triangle data (Used for intersection ordering):
     *   - priority = 0 - no priority
     *   - priority > 0 - higher value represents a higher priority
     * defaults to 0.
     *
     * @type {number}
     */
    _priority: number;
    /**
     * The transform of the triangles.
     *
     * @type {Mat4}
     */
    _transform: Mat4;
    /**
     * The array of triangles for the geometry.
     *
     * @type {Tri[]}
     */
    tris: Tri[];
    get transform(): Mat4;
    get priority(): number;
    /**
     * Sets the transform of the triangle data.
     *
     * @param {Vec3} [pos] - The position of the transform.
     * @param {Quat} [rot] - The rotation of the transform.
     * @param {Vec3} [scale] - The scale of the transform.
     */
    setTransform(pos?: Vec3, rot?: Quat, scale?: Vec3): void;
    /**
     * @param {Geometry} geometry - The geometry to create the triangle data from.
     */
    fromGeometry(geometry: Geometry): void;
}

type ShapeArgs = {
    /**
     * - The axis of the shape (e.g., 'x', 'y', 'z').
     */
    axis?: string;
    /**
     * - The position of the shape.
     */
    position?: Vec3;
    /**
     * - The rotation of the shape.
     */
    rotation?: Vec3;
    /**
     * - The scale of the shape.
     */
    scale?: Vec3;
    /**
     * - Whether the shape is disabled.
     */
    disabled?: boolean;
    /**
     * - Whether the shape is visible.
     */
    visible?: boolean;
    /**
     * - The layers the shape belongs to.
     */
    layers?: number[];
    /**
     * - The default color of the shape.
     */
    defaultColor?: Color;
    /**
     * - The hover color of the shape.
     */
    hoverColor?: Color;
    /**
     * - The disabled color of the shape.
     */
    disabledColor?: Color;
    /**
     * - The culling mode of the shape.
     */
    cull?: number;
    /**
     * - The depth of the shape. -1 = interpolated depth.
     */
    depth?: number;
};
/**
 * @typedef {object} ShapeArgs
 * @property {string} [axis] - The axis of the shape (e.g., 'x', 'y', 'z').
 * @property {Vec3} [position] - The position of the shape.
 * @property {Vec3} [rotation] - The rotation of the shape.
 * @property {Vec3} [scale] - The scale of the shape.
 * @property {boolean} [disabled] - Whether the shape is disabled.
 * @property {boolean} [visible] - Whether the shape is visible.
 * @property {number[]} [layers] - The layers the shape belongs to.
 * @property {Color} [defaultColor] - The default color of the shape.
 * @property {Color} [hoverColor] - The hover color of the shape.
 * @property {Color} [disabledColor] - The disabled color of the shape.
 * @property {number} [cull] - The culling mode of the shape.
 * @property {number} [depth] - The depth of the shape. -1 = interpolated depth.
 */
/**
 * @ignore
 */
declare class Shape {
    /**
     * Create a shape.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {string} name - The name of the shape.
     * @param {ShapeArgs} args - The options for the shape.
     */
    constructor(device: GraphicsDevice, name: string, args: ShapeArgs);
    /**
     * The internal position of the shape.
     *
     * @type {Vec3}
     * @protected
     */
    protected _position: Vec3;
    /**
     * The internal rotation of the shape.
     *
     * @type {Vec3}
     * @protected
     */
    protected _rotation: Vec3;
    /**
     * The internal scale of the shape.
     *
     * @type {Vec3}
     * @protected
     */
    protected _scale: Vec3;
    /**
     * The internal render component layers of the shape.
     *
     * @type {number[]}
     * @protected
     */
    protected _layers: number[];
    /**
     * The internal material state of the shape.
     *
     * @type {ShaderMaterial}
     * @protected
     */
    protected _material: ShaderMaterial;
    /**
     * The internal disabled state of the shape.
     *
     * @protected
     * @type {boolean}
     */
    protected _disabled: boolean;
    /**
     * The internal visibility state of the shape.
     *
     * @type {boolean}
     * @protected
     */
    protected _visible: boolean;
    /**
     * The internal default color of the shape.
     *
     * @type {Color}
     * @protected
     */
    protected _defaultColor: Color;
    /**
     * The internal hover color of the shape.
     *
     * @type {Color}
     * @protected
     */
    protected _hoverColor: Color;
    /**
     * The internal disabled color of the shape.
     *
     * @type {Color}
     * @protected
     */
    protected _disabledColor: Color;
    /**
     * The internal culling state of the shape.
     *
     * @type {number}
     * @protected
     */
    protected _cull: number;
    /**
     * The internal depth state of the shape. -1 = interpolated depth.
     *
     * @type {number}
     * @protected
     */
    protected _depth: number;
    /**
     * The graphics device.
     *
     * @type {GraphicsDevice}
     */
    device: GraphicsDevice;
    /**
     * The axis of the shape.
     *
     * @type {string}
     */
    axis: string;
    /**
     * The entity of the shape.
     *
     * @type {Entity}
     */
    entity: Entity;
    /**
     * The triangle data of the shape.
     *
     * @type {TriData[]}
     */
    triData: TriData[];
    /**
     * The mesh instances of the shape.
     *
     * @type {MeshInstance[]}
     */
    meshInstances: MeshInstance[];
    /**
     * Set the disabled state of the shape.
     *
     * @type {boolean}
     */
    set disabled(value: boolean);
    /**
     * Get the disabled state of the shape.
     *
     * @type {boolean}
     */
    get disabled(): boolean;
    /**
     * Set the visibility state of the shape.
     *
     * @type {boolean}
     */
    set visible(value: boolean);
    /**
     * Get the visibility state of the shape.
     *
     * @type {boolean}
     */
    get visible(): boolean;
    /**
     * Create a render component for an entity.
     *
     * @param {Entity} entity - The entity to create the render component for.
     * @param {Mesh[]} meshes - The meshes to create the render component with.
     * @protected
     */
    protected _createRenderComponent(entity: Entity, meshes: Mesh[]): void;
    /**
     * Update the shape's transform.
     *
     * @protected
     */
    protected _update(): void;
    /**
     * Sets the hover state of the shape.
     *
     * @param {boolean} state - Whether the shape is hovered.
     * @returns {void}
     */
    hover(state: boolean): void;
    /**
     * Destroys the shape and its entity.
     *
     * @returns {void}
     */
    destroy(): void;
}

/**
 * The base class for all gizmos.
 *
 * @category Gizmo
 */
declare class Gizmo extends EventHandler {
    /**
     * Fired when the pointer is down on the gizmo.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('pointer:down', (x, y, meshInstance) => {
     *     console.log(`Pointer was down on ${meshInstance.node.name} at ${x}, ${y}`);
     * });
     */
    static EVENT_POINTERDOWN: string;
    /**
     * Fired when the pointer is moving over the gizmo.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('pointer:move', (x, y, meshInstance) => {
     *     console.log(`Pointer was moving on ${meshInstance.node.name} at ${x}, ${y}`);
     * });
     */
    static EVENT_POINTERMOVE: string;
    /**
     * Fired when the pointer is up off the gizmo.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('pointer:up', (x, y, meshInstance) => {
     *     console.log(`Pointer was up on ${meshInstance.node.name} at ${x}, ${y}`);
     * })
     */
    static EVENT_POINTERUP: string;
    /**
     * Fired when the gizmo's position is updated.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('position:update', (position) => {
     *     console.log(`The gizmo's position was updated to ${position}`);
     * })
     */
    static EVENT_POSITIONUPDATE: string;
    /**
     * Fired when the gizmo's rotation is updated.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('rotation:update', (rotation) => {
     *     console.log(`The gizmo's rotation was updated to ${rotation}`);
     * });
     */
    static EVENT_ROTATIONUPDATE: string;
    /**
     * Fired when the gizmo's scale is updated.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('scale:update', (scale) => {
     *     console.log(`The gizmo's scale was updated to ${scale}`);
     * });
     */
    static EVENT_SCALEUPDATE: string;
    /**
     * Fired when graph nodes are attached.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('nodes:attach', () => {
     *     console.log('Graph nodes attached');
     * });
     */
    static EVENT_NODESATTACH: string;
    /**
     * Fired when graph nodes are detached.
     *
     * @event
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.on('nodes:detach', () => {
     *     console.log('Graph nodes detached');
     * });
     */
    static EVENT_NODESDETACH: string;
    /**
     * Fired when when the gizmo render has updated.
     *
     * @event
     * @example
     * const gizmo = new pc.TransformGizmo(camera, layer);
     * gizmo.on('render:update', () => {
     *     console.log('Gizmo render has been updated');
     * });
     */
    static EVENT_RENDERUPDATE: string;
    /**
     * Creates a new gizmo layer and adds it to the scene.
     *
     * @param {AppBase} app - The app.
     * @param {string} [layerName] - The layer name. Defaults to 'Gizmo'.
     * @param {number} [layerIndex] - The layer index. Defaults to the end of the layer list.
     * @returns {Layer} The new layer.
     */
    static createLayer(app: AppBase, layerName?: string, layerIndex?: number): Layer;
    /**
     * Creates a new Gizmo object.
     *
     * @param {CameraComponent} camera - The camera component.
     * @param {Layer} layer - The render layer. This can be provided by the user or will be created
     * and added to the scene and camera if not provided. Successive gizmos will share the same layer
     * and will be removed from the camera and scene when the last gizmo is destroyed.
     * @param {string} [name] - The name of the gizmo. Defaults to 'gizmo'.
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     */
    constructor(camera: CameraComponent, layer: Layer, name?: string);
    /**
     * Internal version of the gizmo size. Defaults to 1.
     *
     * @type {number}
     * @private
     */
    private _size;
    /**
     * Internal version of the gizmo scale. Defaults to 1.
     *
     * @type {number}
     * @protected
     */
    protected _scale: number;
    /**
     * Internal version of coordinate space. Defaults to 'world'.
     *
     * @type {GizmoSpace}
     * @protected
     */
    protected _coordSpace: GizmoSpace;
    /**
     * Internal reference to the app containing the gizmo.
     *
     * @type {AppBase}
     * @protected
     */
    protected _app: AppBase;
    /**
     * Internal reference to the graphics device of the app.
     *
     * @type {GraphicsDevice}
     * @protected
     */
    protected _device: GraphicsDevice;
    /**
     * Internal list of app event handles for the gizmo.
     *
     * @type {EventHandle[]}
     * @protected
     */
    protected _handles: EventHandle[];
    /**
     * Internal array of mouse buttons that can interact with the gizmo.
     *
     * @type {[boolean, boolean, boolean]}
     * @protected
     */
    protected _mouseButtons: [boolean, boolean, boolean];
    /**
     * Internal reference to camera component to view the gizmo.
     *
     * @type {CameraComponent}
     * @protected
     */
    protected _camera: CameraComponent;
    /**
     * Internal reference to layer to render the gizmo..
     *
     * @type {Layer}
     * @protected
     */
    protected _layer: Layer;
    /**
     * Internal flag to track if a render update is required.
     *
     * @type {boolean}
     * @protected
     */
    protected _renderUpdate: boolean;
    /**
     * The graph nodes attached to the gizmo.
     *
     * @type {GraphNode[]}
     */
    nodes: GraphNode[];
    /**
     * The root gizmo entity.
     *
     * @type {Entity}
     */
    root: Entity;
    /**
     * The intersection shapes for the gizmo.
     *
     * @type {Shape[]}
     */
    intersectShapes: Shape[];
    /**
     * Flag to indicate whether to call `preventDefault` on pointer events.
     *
     * @type {boolean}
     */
    preventDefault: boolean;
    /**
     * @param {PointerEvent} e - The pointer event.
     * @private
     */
    private _onPointerDown;
    /**
     * @param {PointerEvent} e - The pointer event.
     * @private
     */
    private _onPointerMove;
    /**
     * @param {PointerEvent} e - The pointer event.
     * @private
     */
    private _onPointerUp;
    /**
     * Sets the gizmo enabled state.
     *
     * @type {boolean}
     */
    set enabled(state: boolean);
    /**
     * Gets the gizmo enabled state.
     *
     * @type {boolean}
     */
    get enabled(): boolean;
    /**
     * Array of mouse buttons that can interact with the gizmo. The button indices are defined as:
     *
     *  - 0: Left button
     *  - 1: Middle button
     *  - 2: Right button
     *
     * The full list of button indices can be found here:
     * {@link https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/button}
     *
     * @type {[boolean, boolean, boolean]}
     */
    get mouseButtons(): [boolean, boolean, boolean];
    /**
     * Sets the gizmo render layer.
     *
     * @param {Layer} layer - The layer to render the gizmo.
     */
    set layer(layer: Layer);
    /**
     * Gets the gizmo render layer.
     *
     * @type {Layer}
     */
    get layer(): Layer;
    /**
     * Sets the camera component to view the gizmo.
     *
     * @type {CameraComponent} camera - The camera component.
     */
    set camera(camera: CameraComponent);
    /**
     * Gets the camera component to view the gizmo.
     *
     * @type {CameraComponent} The camera component.
     */
    get camera(): CameraComponent;
    /**
     * Sets the gizmo coordinate space. Defaults to 'world'
     *
     * @type {GizmoSpace}
     */
    set coordSpace(value: GizmoSpace);
    /**
     * Gets the gizmo coordinate space.
     *
     * @type {GizmoSpace}
     */
    get coordSpace(): GizmoSpace;
    /**
     * Sets the gizmo size. Defaults to 1.
     *
     * @type {number}
     */
    set size(value: number);
    /**
     * Gets the gizmo size.
     *
     * @type {number}
     */
    get size(): number;
    /**
     * @type {Vec3}
     * @protected
     */
    protected get facingDir(): Vec3;
    /**
     * @type {Vec3}
     * @protected
     */
    protected get cameraDir(): Vec3;
    /**
     * @protected
     */
    protected _updatePosition(): void;
    /**
     * @protected
     */
    protected _updateRotation(): void;
    /**
     * @protected
     */
    protected _updateScale(): void;
    /**
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @returns {MeshInstance[]} - The mesh instances.
     * @private
     */
    private _getSelection;
    /**
     * Attach an array of graph nodes to the gizmo.
     *
     * @param {GraphNode[] | GraphNode} [nodes] - The graph nodes. Defaults to [].
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.attach([boxA, boxB]);
     */
    attach(nodes?: GraphNode[] | GraphNode): void;
    /**
     * Detaches all graph nodes from the gizmo.
     *
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.attach([boxA, boxB]);
     * gizmo.detach();
     */
    detach(): void;
    /**
     * Pre-render method. This is called before the gizmo is rendered.
     *
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.attach([boxA, boxB]);
     * gizmo.prerender();
     */
    prerender(): void;
    /**
     * Updates the gizmo position, rotation, and scale.
     *
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.attach([boxA, boxB]);
     * gizmo.update();
     */
    update(): void;
    /**
     * Detaches all graph nodes and destroys the gizmo instance.
     *
     * @example
     * const gizmo = new pc.Gizmo(camera, layer);
     * gizmo.attach([boxA, boxB]);
     * gizmo.destroy();
     */
    destroy(): void;
}

type GizmoTheme = {
    /**
     * - The axis colors.
     */
    shapeBase: { [K in "x" | "y" | "z" | "f" | "xyz"]: Color; };
    /**
     * - The hover colors.
     */
    shapeHover: { [K in "x" | "y" | "z" | "f" | "xyz"]: Color; };
    /**
     * - The guide line colors.
     */
    guideBase: { [K in "x" | "y" | "z"]: Color; };
    /**
     * - The guide occlusion value. Defaults to 0.8.
     */
    guideOcclusion: number;
    /**
     * - The disabled color.
     */
    disabled: Color;
};
/**
 * The base class for all transform gizmos.
 *
 * @category Gizmo
 */
declare class TransformGizmo extends Gizmo {
    /**
     * Fired when when the transformation has started.
     *
     * @event
     * @example
     * const gizmo = new pc.TransformGizmo(camera, layer);
     * gizmo.on('transform:start', () => {
     *     console.log('Transformation started');
     * });
     */
    static EVENT_TRANSFORMSTART: string;
    /**
     * Fired during the transformation.
     *
     * @event
     * @example
     * const gizmo = new pc.TransformGizmo(camera, layer);
     * gizmo.on('transform:move', (pointDelta, angleDelta) => {
     *     console.log('Transformation moved by ${pointDelta} (angle: ${angleDelta})');
     * });
     */
    static EVENT_TRANSFORMMOVE: string;
    /**
     * Fired when when the transformation has ended.
     *
     * @event
     * @example
     * const gizmo = new pc.TransformGizmo(camera, layer);
     * gizmo.on('transform:end', () => {
     *     console.log('Transformation ended');
     * });
     */
    static EVENT_TRANSFORMEND: string;
    /**
     * Internal theme.
     *
     * @type {GizmoTheme}
     * @protected
     */
    protected _theme: GizmoTheme;
    /**
     * Internal gizmo starting rotation in world space.
     *
     * @type {Vec3}
     * @protected
     */
    protected _rootStartPos: Vec3;
    /**
     * Internal gizmo starting rotation in world space.
     *
     * @type {Quat}
     * @protected
     */
    protected _rootStartRot: Quat;
    /**
     * Internal object containing the gizmo shapes to render.
     *
     * @type {{ [key in GizmoAxis]?: Shape }}
     * @protected
     */
    protected _shapes: { [key in GizmoAxis]?: Shape; };
    /**
     * Internal mapping of mesh instances to gizmo shapes.
     *
     * @type {Map<MeshInstance, Shape>}
     * @private
     */
    private _shapeMap;
    /**
     * Internal currently hovered axes
     *
     * @type {Set<GizmoAxis>}
     * @private
     */
    private _hovering;
    /**
     * Internal currently hovered axis.
     *
     * @type {GizmoAxis | ''}
     * @private
     */
    private _hoverAxis;
    /**
     * Internal state of if currently hovered shape is a plane.
     *
     * @type {boolean}
     * @private
     */
    private _hoverIsPlane;
    /**
     * Internal currently selected axis.
     *
     * @type {GizmoAxis | ''}
     * @protected
     */
    protected _selectedAxis: GizmoAxis | "";
    /**
     * Internal state of if currently selected shape is a plane.
     *
     * @type {boolean}
     * @protected
     */
    protected _selectedIsPlane: boolean;
    /**
     * Internal selection starting coordinates in world space.
     *
     * @type {Vec3}
     * @protected
     */
    protected _selectionStartPoint: Vec3;
    /**
     * Whether snapping is enabled. Defaults to false.
     *
     * @type {boolean}
     */
    snap: boolean;
    /**
     * Snapping increment. Defaults to 1.
     *
     * @type {number}
     */
    snapIncrement: number;
    /**
     * Whether to hide the shapes when dragging. Defaults to 'selected'.
     *
     * @type {GizmoDragMode}
     */
    dragMode: GizmoDragMode;
    /**
     * Gets the current theme for the gizmo.
     *
     * @type {GizmoTheme}
     */
    get theme(): GizmoTheme;
    /**
     * @type {Color}
     * @deprecated Use {@link TransformGizmo#setTheme} instead.
     * @ignore
     */
    set xAxisColor(value: Color);
    /**
     * @type {Color}
     * @deprecated Use {@link TransformGizmo#theme} instead.
     * @ignore
     */
    get xAxisColor(): Color;
    /**
     * @type {Color}
     * @deprecated Use {@link TransformGizmo#setTheme} instead.
     * @ignore
     */
    set yAxisColor(value: Color);
    /**
     * @type {Color}
     * @deprecated Use {@link TransformGizmo#theme} instead.
     * @ignore
     */
    get yAxisColor(): Color;
    /**
     * @type {Color}
     * @deprecated Use {@link TransformGizmo#setTheme} instead.
     * @ignore
     */
    set zAxisColor(value: Color);
    /**
     * @type {Color}
     * @deprecated Use {@link TransformGizmo#theme} instead.
     * @ignore
     */
    get zAxisColor(): Color;
    /**
     * @type {number}
     * @deprecated Use {@link TransformGizmo#setTheme} instead.
     * @ignore
     */
    set colorAlpha(value: number);
    /**
     * @type {number}
     * @deprecated Use {@link TransformGizmo#theme} instead.
     * @ignore
     */
    get colorAlpha(): number;
    /**
     * @type {boolean}
     * @protected
     */
    protected get _dragging(): boolean;
    /**
     * @param {MeshInstance} [meshInstance] - The mesh instance.
     * @returns {GizmoAxis | ''} - The axis.
     * @private
     */
    private _getAxis;
    /**
     * @param {MeshInstance} [meshInstance] - The mesh instance.
     * @returns {boolean} - Whether the mesh instance is a plane.
     * @private
     */
    private _getIsPlane;
    /**
     * @param {MeshInstance} [meshInstance] - The mesh instance.
     * @private
     */
    private _hover;
    /**
     * @param {Vec3} mouseWPos - The mouse world position.
     * @returns {Ray} - The ray.
     * @protected
     */
    protected _createRay(mouseWPos: Vec3): Ray;
    /**
     * @param {string} axis - The axis to create the plane for.
     * @param {boolean} isFacing - Whether the axis is facing the camera.
     * @param {boolean} isLine - Whether the axis is a line.
     * @returns {Plane} - The plane.
     * @protected
     */
    protected _createPlane(axis: string, isFacing: boolean, isLine: boolean): Plane;
    /**
     * @param {string} axis - The axis
     * @param {Vec3} dir - The direction
     * @returns {Vec3} - The direction
     * @protected
     */
    protected _dirFromAxis(axis: string, dir: Vec3): Vec3;
    /**
     * @param {Vec3} point - The point to project.
     * @param {string} axis - The axis to project to.
     * @protected
     */
    protected _projectToAxis(point: Vec3, axis: string): void;
    /**
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @param {boolean} isFacing - Whether the axis is facing the camera.
     * @param {boolean} isLine - Whether the axis is a line.
     * @returns {Vec3} The point (space is {@link TransformGizmo#coordSpace}).
     * @protected
     */
    protected _screenToPoint(x: number, y: number, isFacing?: boolean, isLine?: boolean): Vec3;
    /**
     * @param {Vec3} pos - The position.
     * @param {Quat} rot - The rotation.
     * @param {GizmoAxis | ''} activeAxis - The active axis.
     * @param {boolean} activeIsPlane - Whether the active axis is a plane.
     * @protected
     */
    protected _drawGuideLines(pos: Vec3, rot: Quat, activeAxis: GizmoAxis | "", activeIsPlane: boolean): void;
    /**
     * @param {Vec3} pos - The position.
     * @param {Quat} rot - The rotation.
     * @param {'x' | 'y' | 'z'} axis - The axis.
     * @protected
     */
    protected _drawSpanLine(pos: Vec3, rot: Quat, axis: "x" | "y" | "z"): void;
    /**
     * @protected
     */
    protected _createTransform(): void;
    /**
     * Set the shape to be enabled or disabled.
     *
     * @param {GizmoAxis | 'face'} shapeAxis - The shape axis.
     * @param {boolean} enabled - The enabled state of shape.
     */
    enableShape(shapeAxis: GizmoAxis | "face", enabled: boolean): void;
    /**
     * Get the enabled state of the shape.
     *
     * @param {GizmoAxis | 'face'} shapeAxis - The shape axis. Can be:
     * @returns {boolean} - Then enabled state of the shape
     */
    isShapeEnabled(shapeAxis: GizmoAxis | "face"): boolean;
    /**
     * Sets the theme or partial theme for the gizmo.
     *
     * @param {{ [K in keyof GizmoTheme]?: Partial<GizmoTheme[K]> }} partial - The partial theme to set.
     */
    setTheme(partial: { [K in keyof GizmoTheme]?: Partial<GizmoTheme[K]>; }): void;
}

type SphereShapeArgs = {
    /**
     * - The radius of the sphere.
     */
    radius?: number;
};
/** @import { ShapeArgs } from './shape.js' */
/** @import { GraphicsDevice } from '../../../platform/graphics/graphics-device.js' */
/**
 * @typedef {object} SphereShapeArgs
 * @property {number} [radius] - The radius of the sphere.
 */
/**
 * @ignore
 */
declare class SphereShape extends Shape {
    /**
     * Create a new SphereShape.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {ShapeArgs & SphereShapeArgs} args - The shape options.
     */
    constructor(device: GraphicsDevice, args?: ShapeArgs & SphereShapeArgs);
    /**
     * The internal size of the sphere.
     *
     * @type {number}
     * @private
     */
    private _radius;
    /**
     * Set the rendered radius of the sphere.
     *
     * @param {number} value - The new radius of the sphere.
     */
    set radius(value: number);
    /**
     * Get the rendered radius of the sphere.
     *
     * @returns {number} The radius of the sphere.
     */
    get radius(): number;
}

type PlaneShapeArgs = {
    /**
     * - The size of the plane
     */
    size?: number;
    /**
     * - The gap between the plane and the center
     */
    gap?: number;
};
/**
 * @typedef {object} PlaneShapeArgs
 * @property {number} [size] - The size of the plane
 * @property {number} [gap] - The gap between the plane and the center
 */
/**
 * @ignore
 */
declare class PlaneShape extends Shape {
    /**
     * Create a new PlaneShape.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {ShapeArgs & PlaneShapeArgs} args - The shape options.
     */
    constructor(device: GraphicsDevice, args?: ShapeArgs & PlaneShapeArgs);
    /**
     * The size of the plane.
     *
     * @type {number}
     * @private
     */
    private _size;
    /**
     * The gap between the plane and the center.
     *
     * @type {number}
     * @private
     */
    private _gap;
    /**
     * The internal flipped state of the plane.
     *
     * @type {Vec3}
     * @private
     */
    private _flipped;
    /**
     * Set the size of the plane.
     *
     * @type {number}
     */
    set size(value: number);
    /**
     * Get the size of the plane.
     *
     * @type {number}
     */
    get size(): number;
    /**
     * Set the gap between the plane and the center.
     *
     * @type {number}
     */
    set gap(value: number);
    /**
     * Get the gap between the plane and the center.
     *
     * @type {number}
     */
    get gap(): number;
    /**
     * Set the flipped state of the plane.
     *
     * @type {Vec3}
     */
    set flipped(value: Vec3);
    /**
     * Get the flipped state of the plane.
     *
     * @type {Vec3}
     */
    get flipped(): Vec3;
}

type ArrowShapeArgs = {
    /**
     * - The gap between the arrow base and the center
     */
    gap?: number;
    /**
     * - The thickness of the line
     */
    lineThickness?: number;
    /**
     * - The length of the line
     */
    lineLength?: number;
    /**
     * - The thickness of the arrow head
     */
    arrowThickness?: number;
    /**
     * - The length of the arrow head
     */
    arrowLength?: number;
    /**
     * - The tolerance for intersection tests
     */
    tolerance?: number;
};
/**
 * @typedef {object} ArrowShapeArgs
 * @property {number} [gap] - The gap between the arrow base and the center
 * @property {number} [lineThickness] - The thickness of the line
 * @property {number} [lineLength] - The length of the line
 * @property {number} [arrowThickness] - The thickness of the arrow head
 * @property {number} [arrowLength] - The length of the arrow head
 * @property {number} [tolerance] - The tolerance for intersection tests
 */
/**
 * @ignore
 */
declare class ArrowShape extends Shape {
    /**
     * Create a new ArrowShape.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {ShapeArgs & ArrowShapeArgs} args - The shape options.
     */
    constructor(device: GraphicsDevice, args?: ShapeArgs & ArrowShapeArgs);
    /**
     * The internal gap between the arrow base and the center.
     *
     * @type {number}
     * @private
     */
    private _gap;
    /**
     * The internal line thickness of the arrow.
     *
     * @type {number}
     * @private
     */
    private _lineThickness;
    /**
     * The internal line length of the arrow.
     *
     * @type {number}
     * @private
     */
    private _lineLength;
    /**
     * The internal arrow thickness of the arrow.
     *
     * @type {number}
     * @private
     */
    private _arrowThickness;
    /**
     * The internal arrow length of the arrow.
     *
     * @type {number}
     * @private
     */
    private _arrowLength;
    /**
     * The internal tolerance of the arrow.
     *
     * @type {number}
     * @private
     */
    private _tolerance;
    /**
     * The internal head entity of the arrow.
     *
     * @type {Entity}
     * @private
     */
    private _head;
    /**
     * The internal line entity of the arrow.
     *
     * @type {Entity}
     * @private
     */
    private _line;
    /**
     * Set the gap between the arrow base and the center.
     *
     * @type {number}
     */
    set gap(value: number);
    /**
     * Get the gap between the arrow base and the center.
     *
     * @type {number}
     */
    get gap(): number;
    /**
     * Set the line thickness of the arrow.
     *
     * @type {number}
     */
    set lineThickness(value: number);
    /**
     * Get the line thickness of the arrow.
     *
     * @type {number}
     */
    get lineThickness(): number;
    /**
     * Set the line length of the arrow.
     *
     * @type {number}
     */
    set lineLength(value: number);
    /**
     * Get the line length of the arrow.
     *
     * @type {number}
     */
    get lineLength(): number;
    /**
     * Set the arrow thickness of the arrow.
     *
     * @type {number}
     */
    set arrowThickness(value: number);
    /**
     * Get the arrow thickness of the arrow.
     *
     * @type {number}
     */
    get arrowThickness(): number;
    /**
     * Set the arrow length of the arrow.
     *
     * @type {number}
     */
    set arrowLength(value: number);
    /**
     * Get the arrow length of the arrow.
     *
     * @type {number}
     */
    get arrowLength(): number;
    /**
     * Set the tolerance of the arrow.
     *
     * @type {number}
     */
    set tolerance(value: number);
    /**
     * Get the tolerance of the arrow.
     *
     * @type {number}
     */
    get tolerance(): number;
}

/**
 * The TranslateGizmo provides interactive 3D manipulation handles for translating/moving
 * {@link Entity}s in a {@link Scene}. It creates a visual widget with arrows along the X, Y
 * and Z axes, planes at their intersections, and a center sphere, allowing precise control over
 * object positioning through direct manipulation. The gizmo's visual appearance can be customized
 * away from the defaults as required.
 *
 * Note that the gizmo can be driven by both mouse+keyboard and touch input.
 *
 * ```javascript
 * // Create a layer for rendering all gizmos
 * const gizmoLayer = pc.Gizmo.createLayer(app);
 *
 * // Create a translate gizmo
 * const gizmo = new pc.TranslateGizmo(cameraComponent, gizmoLayer);
 *
 * // Create an entity to attach the gizmo to
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     type: 'box'
 * });
 * app.root.addChild(entity);
 *
 * // Attach the gizmo to the entity
 * gizmo.attach([entity]);
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Translate Gizmo](https://playcanvas.github.io/#/gizmos/transform-translate)
 * - [Editor](https://playcanvas.github.io/#/misc/editor)
 *
 * @category Gizmo
 */
declare class TranslateGizmo extends TransformGizmo {
    /**
     * Creates a new TranslateGizmo object. Use {@link Gizmo.createLayer} to create the layer
     * required to display the gizmo.
     *
     * @param {CameraComponent} camera - The camera component.
     * @param {Layer} layer - The layer responsible for rendering the gizmo.
     * @example
     * const gizmo = new pc.TranslateGizmo(camera, layer);
     */
    constructor(camera: CameraComponent, layer: Layer);
    _shapes: {
        xyz: SphereShape;
        yz: PlaneShape;
        xz: PlaneShape;
        xy: PlaneShape;
        x: ArrowShape;
        y: ArrowShape;
        z: ArrowShape;
    };
    /**
     * Internal mapping from each attached node to their starting position in local space.
     *
     * @type {Map<GraphNode, Vec3>}
     * @private
     */
    private _nodeLocalPositions;
    /**
     * Internal mapping from each attached node to their starting position in world space.
     *
     * @type {Map<GraphNode, Vec3>}
     * @private
     */
    private _nodePositions;
    /**
     * Flips the planes to face the camera.
     *
     * @type {boolean}
     */
    flipPlanes: boolean;
    /**
     * Sets the axis gap.
     *
     * @type {number}
     */
    set axisGap(value: number);
    /**
     * Gets the axis gap.
     *
     * @type {number}
     */
    get axisGap(): number;
    /**
     * Sets the axis line thickness.
     *
     * @type {number}
     */
    set axisLineThickness(value: number);
    /**
     * Gets the axis line thickness.
     *
     * @type {number}
     */
    get axisLineThickness(): number;
    /**
     * Sets the axis line length.
     *
     * @type {number}
     */
    set axisLineLength(value: number);
    /**
     * Gets the axis line length.
     *
     * @type {number}
     */
    get axisLineLength(): number;
    /**
     * Sets the axis line tolerance.
     *
     * @type {number}
     */
    set axisLineTolerance(value: number);
    /**
     * Gets the axis line tolerance.
     *
     * @type {number}
     */
    get axisLineTolerance(): number;
    /**
     * Sets the arrow thickness.
     *
     * @type {number}
     */
    set axisArrowThickness(value: number);
    /**
     * Gets the arrow thickness.
     *
     * @type {number}
     */
    get axisArrowThickness(): number;
    /**
     * Sets the arrow length.
     *
     * @type {number}
     */
    set axisArrowLength(value: number);
    /**
     * Gets the arrow length.
     *
     * @type {number}
     */
    get axisArrowLength(): number;
    /**
     * Sets the plane size.
     *
     * @type {number}
     */
    set axisPlaneSize(value: number);
    /**
     * Gets the plane size.
     *
     * @type {number}
     */
    get axisPlaneSize(): number;
    /**
     * Sets the plane gap.
     *
     * @type {number}
     */
    set axisPlaneGap(value: number);
    /**
     * Gets the plane gap.
     *
     * @type {number}
     */
    get axisPlaneGap(): number;
    /**
     * Sets the axis center size.
     *
     * @type {number}
     */
    set axisCenterSize(value: number);
    /**
     * Gets the axis center size.
     *
     * @type {number}
     */
    get axisCenterSize(): number;
    /**
     * @type {boolean}
     * @deprecated Use {@link TranslateGizmo#flipPlanes} instead.
     * @ignore
     */
    set flipShapes(value: boolean);
    /**
     * @type {boolean}
     * @deprecated Use {@link TranslateGizmo#flipPlanes} instead.
     * @ignore
     */
    get flipShapes(): boolean;
    /**
     * @param {string} prop - The property to set.
     * @param {any} value - The value to set.
     * @private
     */
    private _setArrowProp;
    /**
     * @param {string} prop - The property to set.
     * @param {any} value - The value to set.
     * @private
     */
    private _setPlaneProp;
    /**
     * @private
     */
    private _shapesLookAtCamera;
    /**
     * @param {boolean} state - The state.
     * @private
     */
    private _drag;
    /**
     * @private
     */
    private _storeNodePositions;
    /**
     * @param {Vec3} translateDelta - The delta to apply to the node positions.
     * @private
     */
    private _setNodePositions;
    /**
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @returns {Vec3} The point (space is {@link TransformGizmo#coordSpace}).
     * @protected
     */
    protected _screenToPoint(x: number, y: number): Vec3;
    /**
     * @param {Vec3} pos - The position.
     * @param {Quat} rot - The rotation.
     * @param {GizmoAxis} activeAxis - The active axis.
     * @param {boolean} activeIsPlane - Whether the active axis is a plane.
     * @override
     */
    override _drawGuideLines(pos: Vec3, rot: Quat, activeAxis: GizmoAxis, activeIsPlane: boolean): void;
}

type ArcShapeArgs = {
    /**
     * - The tube radius.
     */
    tubeRadius?: number;
    /**
     * - The ring radius.
     */
    ringRadius?: number;
    /**
     * - The sector angle.
     */
    sectorAngle?: number;
};
/**
 * @typedef {object} ArcShapeArgs
 * @property {number} [tubeRadius] - The tube radius.
 * @property {number} [ringRadius] - The ring radius.
 * @property {number} [sectorAngle] - The sector angle.
 */
/**
 * @ignore
 */
declare class ArcShape extends Shape {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {ShapeArgs & ArcShapeArgs} args - The shape options.
     */
    constructor(device: GraphicsDevice, args?: ShapeArgs & ArcShapeArgs);
    /**
     * The internal tube radius of the arc.
     *
     * @type {number}
     * @private
     */
    private _tubeRadius;
    /**
     * The internal ring radius of the arc.
     *
     * @type {number}
     * @private
     */
    private _ringRadius;
    /**
     * The internal sector angle of the arc.
     *
     * @type {number}
     * @private
     */
    private _sectorAngle;
    /**
     * The internal intersection tolerance of the arc.
     *
     * @type {number}
     * @private
     */
    private _tolerance;
    /**
     * The internal cache for triangle data.
     *
     * @type {[TriData, TriData]}
     * @private
     */
    private _triDataCache;
    /**
     * Create the torus geometry.
     *
     * @param {number} sectorAngle - The sector angle.
     * @returns {TorusGeometry} The torus geometry.
     * @private
     */
    private _createTorusGeometry;
    /**
     * Create the torus mesh.
     *
     * @param {number} sectorAngle - The sector angle.
     * @returns {Mesh} The torus mesh.
     * @private
     */
    private _createTorusMesh;
    /**
     * Set the tube radius.
     *
     * @type {number}
     */
    set tubeRadius(value: number);
    /**
     * Get the tube radius.
     *
     * @type {number}
     */
    get tubeRadius(): number;
    /**
     * Set the ring radius.
     *
     * @type {number}
     */
    set ringRadius(value: number);
    /**
     * Get the ring radius.
     *
     * @type {number}
     */
    get ringRadius(): number;
    /**
     * Set the intersection tolerance.
     *
     * @type {number}
     */
    set tolerance(value: number);
    /**
     * Get the intersection tolerance.
     *
     * @type {number}
     */
    get tolerance(): number;
    /**
     * @param {'sector' | 'ring' | 'none'} state - The visibility state.
     */
    show(state: "sector" | "ring" | "none"): void;
}

/**
 * The RotateGizmo provides interactive 3D manipulation handles for rotating/reorienting
 * {@link Entity}s in a {@link Scene}. It creates a visual widget with a draggable ring for each
 * axis of rotation, plus a fourth ring for rotation in the camera's view plane, allowing precise
 * control over object orientation through direct manipulation. The gizmo's visual appearance can
 * be customized away from the defaults as required.
 *
 * Note that the gizmo can be driven by both mouse+keyboard and touch input.
 *
 * ```javascript
 * // Create a layer for rendering all gizmos
 * const gizmoLayer = pc.Gizmo.createLayer(app);
 *
 * // Create a rotate gizmo
 * const gizmo = new pc.RotateGizmo(cameraComponent, gizmoLayer);
 *
 * // Create an entity to attach the gizmo to
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     type: 'box'
 * });
 * app.root.addChild(entity);
 *
 * // Attach the gizmo to the entity
 * gizmo.attach([entity]);
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Rotate Gizmo](https://playcanvas.github.io/#/gizmos/transform-rotate)
 * - [Editor](https://playcanvas.github.io/#/misc/editor)
 *
 * @category Gizmo
 */
declare class RotateGizmo extends TransformGizmo {
    /**
     * Creates a new RotateGizmo object. Use {@link Gizmo.createLayer} to create the layer
     * required to display the gizmo.
     *
     * @param {CameraComponent} camera - The camera component.
     * @param {Layer} layer - The layer responsible for rendering the gizmo.
     * @example
     * const gizmo = new pc.RotateGizmo(camera, layer);
     */
    constructor(camera: CameraComponent, layer: Layer);
    _shapes: {
        z: ArcShape;
        x: ArcShape;
        y: ArcShape;
        f: ArcShape;
        xyz: SphereShape;
    };
    /**
     * Internal selection starting angle in world space.
     *
     * @type {number}
     * @private
     */
    private _selectionStartAngle;
    /**
     * Internal mapping from each attached node to their starting rotation in local space.
     *
     * @type {Map<GraphNode, Quat>}
     * @private
     */
    private _nodeLocalRotations;
    /**
     * Internal mapping from each attached node to their starting rotation in world space.
     *
     * @type {Map<GraphNode, Quat>}
     * @private
     */
    private _nodeRotations;
    /**
     * Internal mapping from each attached node to their offset position from the gizmo.
     *
     * @type {Map<GraphNode, Vec3>}
     * @private
     */
    private _nodeOffsets;
    /**
     * Internal vector for storing the mouse position in screen space.
     *
     * @type {Vec2}
     * @private
     */
    private _screenPos;
    /**
     * Internal vector for storing the mouse start position in screen space.
     *
     * @type {Vec2}
     * @private
     */
    private _screenStartPos;
    /**
     * Internal vector for the start point of the guide line angle.
     *
     * @type {Vec3}
     * @private
     */
    private _guideAngleStart;
    /**
     * Internal vector for the end point of the guide line angle.
     *
     * @type {Vec3}
     * @private
     */
    private _guideAngleEnd;
    /**
     * Internal mesh lines for guide angles.
     *
     * @type {[MeshLine, MeshLine]}
     * @private
     */
    private _guideAngleLines;
    /**
     * Internal copy of facing direction to avoid unnecessary updates.
     *
     * @type {Vec3}
     * @private
     */
    private _facingDir;
    /**
     * The rotation mode of the gizmo. This can be either:
     *
     * - 'absolute': The rotation is calculated based on the mouse displacement relative to the
     * initial click point.
     * - 'orbit': The rotation is calculated based on the gizmos position around the center of
     * rotation.
     *
     * @type {'absolute' | 'orbit'}
     */
    rotationMode: "absolute" | "orbit";
    /**
     * Sets the XYZ tube radius.
     *
     * @type {number}
     */
    set xyzTubeRadius(value: number);
    /**
     * Gets the XYZ tube radius.
     *
     * @type {number}
     */
    get xyzTubeRadius(): number;
    /**
     * Sets the XYZ ring radius.
     *
     * @type {number}
     */
    set xyzRingRadius(value: number);
    /**
     * Gets the XYZ ring radius.
     *
     * @type {number}
     */
    get xyzRingRadius(): number;
    /**
     * Sets the face tube radius.
     *
     * @type {number}
     */
    set faceTubeRadius(value: number);
    /**
     * Gets the face tube radius.
     *
     * @type {number}
     */
    get faceTubeRadius(): number;
    /**
     * Sets the face ring radius.
     *
     * @type {number}
     */
    set faceRingRadius(value: number);
    /**
     * Gets the face ring radius.
     *
     * @type {number}
     */
    get faceRingRadius(): number;
    /**
     * Sets the center radius.
     *
     * @type {number}
     */
    set centerRadius(value: number);
    /**
     * Gets the center radius.
     *
     * @type {number}
     */
    get centerRadius(): number;
    /**
     * Sets the ring tolerance.
     *
     * @type {number}
     */
    set ringTolerance(value: number);
    /**
     * Gets the ring tolerance.
     *
     * @type {number}
     */
    get ringTolerance(): number;
    /**
     * Sets the angle guide line thickness.
     *
     * @type {number}
     */
    set angleGuideThickness(value: number);
    /**
     * Gets the angle guide line thickness.
     *
     * @type {number}
     */
    get angleGuideThickness(): number;
    /**
     * @type {boolean}
     * @deprecated Use {@link RotationGizmo#rotationMode} instead.
     * @ignore
     */
    set orbitRotation(value: boolean);
    /**
     * @type {boolean}
     * @deprecated Use {@link RotationGizmo#rotationMode} instead.
     * @ignore
     */
    get orbitRotation(): boolean;
    /**
     * @param {string} prop - The property.
     * @param {any} value - The value.
     * @private
     */
    private _setDiskProp;
    /**
     * @private
     */
    private _storeGuidePoints;
    /**
     * @param {number} angleDelta - The angle delta.
     * @private
     */
    private _updateGuidePoints;
    /**
     * @param {boolean} state - The state.
     * @private
     */
    private _angleGuide;
    /**
     * @private
     */
    private _shapesLookAtCamera;
    /**
     * @param {boolean} state - The state.
     * @private
     */
    private _drag;
    /**
     * @private
     */
    private _storeNodeRotations;
    /**
     * @param {GizmoAxis} axis - The axis.
     * @param {Vec3} angleAxis - The angle axis.
     * @param {number} angleDelta - The angle delta.
     * @private
     */
    private _setNodeRotations;
    /**
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @returns {Vec3} The point (space is {@link TransformGizmo#coordSpace}).
     * @protected
     */
    protected _screenToPoint(x: number, y: number): Vec3;
    /**
     * @param {Vec3} point - The point.
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @returns {number} The angle.
     * @protected
     */
    protected _calculateArcAngle(point: Vec3, x: number, y: number): number;
    /**
     * @param {Vec3} pos - The position.
     * @param {Quat} rot - The rotation.
     * @param {GizmoAxis} activeAxis - The active axis.
     * @param {boolean} activeIsPlane - Whether the active axis is a plane.
     * @override
     */
    override _drawGuideLines(pos: Vec3, rot: Quat, activeAxis: GizmoAxis, activeIsPlane: boolean): void;
}

type BoxShapeArgs = {
    /**
     * - The size of the box.
     */
    size?: number;
};
/** @import { ShapeArgs } from './shape.js' */
/** @import { GraphicsDevice } from '../../../platform/graphics/graphics-device.js' */
/**
 * @typedef {object} BoxShapeArgs
 * @property {number} [size] - The size of the box.
 */
/**
 * @ignore
 */
declare class BoxShape extends Shape {
    /**
     * Create a new BoxShape.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {ShapeArgs & BoxShapeArgs} args - The shape options.
     */
    constructor(device: GraphicsDevice, args?: ShapeArgs & BoxShapeArgs);
    /**
     * The internal size of the box.
     *
     * @type {number}
     * @private
     */
    private _size;
    /**
     * Set the rendered size of the box.
     *
     * @param {number} value - The new size of the box.
     */
    set size(value: number);
    /**
     * Get the rendered size of the box.
     *
     * @returns {number} The size of the box.
     */
    get size(): number;
}

type BoxLineShapeArgs = {
    /**
     * - The gap between the box and the line
     */
    gap?: number;
    /**
     * - The thickness of the line
     */
    lineThickness?: number;
    /**
     * - The length of the line
     */
    lineLength?: number;
    /**
     * - The size of the box
     */
    boxSize?: number;
    /**
     * - The tolerance for intersection tests
     */
    tolerance?: number;
};
/**
 * @typedef {object} BoxLineShapeArgs
 * @property {number} [gap] - The gap between the box and the line
 * @property {number} [lineThickness] - The thickness of the line
 * @property {number} [lineLength] - The length of the line
 * @property {number} [boxSize] - The size of the box
 * @property {number} [tolerance] - The tolerance for intersection tests
 */
/**
 * @ignore
 */
declare class BoxLineShape extends Shape {
    /**
     * Create a new BoxLineShape.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {ShapeArgs & BoxLineShapeArgs} args - The shape options.
     */
    constructor(device: GraphicsDevice, args?: ShapeArgs & BoxLineShapeArgs);
    /**
     * The internal gap between the box and the line.
     *
     * @type {number}
     * @private
     */
    private _gap;
    /**
     * The internal line thickness of the box line.
     *
     * @type {number}
     * @private
     */
    private _lineThickness;
    /**
     * The internal line length of the box line.
     *
     * @type {number}
     * @private
     */
    private _lineLength;
    /**
     * The internal box size of the box line.
     *
     * @type {number}
     * @private
     */
    private _boxSize;
    /**
     * The internal tolerance of the box line.
     *
     * @type {number}
     * @private
     */
    private _tolerance;
    /**
     * The internal box entity of the box line.
     *
     * @type {Entity}
     * @private
     */
    private _box;
    /**
     * The internal line entity of the box line.
     *
     * @type {Entity}
     * @private
     */
    private _line;
    /**
     * The internal flipped state of the box line.
     *
     * @type {boolean}
     * @private
     */
    private _flipped;
    /**
     * Set the gap between the box and the line.
     *
     * @type {number}
     */
    set gap(value: number);
    /**
     * Get the gap between the box and the line.
     *
     * @type {number}
     */
    get gap(): number;
    /**
     * Set the line thickness of the box line.
     *
     * @type {number}
     */
    set lineThickness(value: number);
    /**
     * Get the line thickness of the box line.
     *
     * @type {number}
     */
    get lineThickness(): number;
    /**
     * Set the line length of the box line.
     *
     * @type {number}
     */
    set lineLength(value: number);
    /**
     * Get the line length of the box line.
     *
     * @type {number}
     */
    get lineLength(): number;
    /**
     * Set the box size of the box line.
     *
     * @type {number}
     */
    set boxSize(value: number);
    /**
     * Get the box size of the box line.
     *
     * @type {number}
     */
    get boxSize(): number;
    /**
     * Set the tolerance of the box line.
     *
     * @type {number}
     */
    set tolerance(value: number);
    /**
     * Get the tolerance of the box line.
     *
     * @type {number}
     */
    get tolerance(): number;
    /**
     * Set the flipped state of the box line.
     *
     * @type {boolean}
     */
    set flipped(value: boolean);
    /**
     * Get the flipped state of the box line.
     *
     * @type {boolean}
     */
    get flipped(): boolean;
}

/**
 * The ScaleGizmo provides interactive 3D manipulation handles for scaling/resizing
 * {@link Entity}s in a {@link Scene}. It creates a visual widget with box-tipped lines along the
 * X, Y and Z axes, planes at their intersections, and a center box, allowing precise control over
 * object scaling through direct manipulation. The gizmo's visual appearance can be customized
 * away from the defaults as required.
 *
 * Note that the gizmo can be driven by both mouse+keyboard and touch input.
 *
 * ```javascript
 * // Create a layer for rendering all gizmos
 * const gizmoLayer = pc.Gizmo.createLayer(app);
 *
 * // Create a scale gizmo
 * const gizmo = new pc.ScaleGizmo(cameraComponent, gizmoLayer);
 *
 * // Create an entity to attach the gizmo to
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     type: 'box'
 * });
 * app.root.addChild(entity);
 *
 * // Attach the gizmo to the entity
 * gizmo.attach([entity]);
 * ```
 *
 * Relevant Engine API examples:
 *
 * - [Scale Gizmo](https://playcanvas.github.io/#/gizmos/transform-scale)
 * - [Editor](https://playcanvas.github.io/#/misc/editor)
 *
 * @category Gizmo
 */
declare class ScaleGizmo extends TransformGizmo {
    /**
     * Creates a new ScaleGizmo object. Use {@link Gizmo.createLayer} to create the layer
     * required to display the gizmo.
     *
     * @param {CameraComponent} camera - The camera component.
     * @param {Layer} layer - The layer responsible for rendering the gizmo.
     * @example
     * const gizmo = new pc.ScaleGizmo(camera, layer);
     */
    constructor(camera: CameraComponent, layer: Layer);
    _shapes: {
        xyz: BoxShape;
        yz: PlaneShape;
        xz: PlaneShape;
        xy: PlaneShape;
        x: BoxLineShape;
        y: BoxLineShape;
        z: BoxLineShape;
    };
    /**
     * Internal mapping from each attached node to their starting scale.
     *
     * @type {Map<GraphNode, Vec3>}
     * @private
     */
    private _nodeScales;
    /**
     * Internal state if transform should use uniform scaling.
     *
     * @type {boolean}
     * @protected
     */
    protected _uniform: boolean;
    /**
     * Flips the planes to face the camera.
     *
     * @type {boolean}
     */
    flipPlanes: boolean;
    /**
     * The lower bound for scaling.
     *
     * @type {Vec3}
     */
    lowerBoundScale: Vec3;
    /**
     * Sets the uniform scaling state for planes.
     *
     * @type {boolean}
     */
    set uniform(value: boolean);
    /**
     * Gets the uniform scaling state for planes.
     *
     * @type {boolean}
     */
    get uniform(): boolean;
    /**
     * Sets the axis gap.
     *
     * @type {number}
     */
    set axisGap(value: number);
    /**
     * Gets the axis gap.
     *
     * @type {number}
     */
    get axisGap(): number;
    /**
     * Sets the axis line thickness.
     *
     * @type {number}
     */
    set axisLineThickness(value: number);
    /**
     * Gets the axis line thickness.
     *
     * @type {number}
     */
    get axisLineThickness(): number;
    /**
     * Sets the axis line length.
     *
     * @type {number}
     */
    set axisLineLength(value: number);
    /**
     * Gets the axis line length.
     *
     * @type {number}
     */
    get axisLineLength(): number;
    /**
     * Sets the axis line tolerance.
     *
     * @type {number}
     */
    set axisLineTolerance(value: number);
    /**
     * Gets the axis line tolerance.
     *
     * @type {number}
     */
    get axisLineTolerance(): number;
    /**
     * Sets the axis box size.
     *
     * @type {number}
     */
    set axisBoxSize(value: number);
    /**
     * Gets the axis box size.
     *
     * @type {number}
     */
    get axisBoxSize(): number;
    /**
     * Sets the plane size.
     *
     * @type {number}
     */
    set axisPlaneSize(value: number);
    /**
     * Gets the plane size.
     *
     * @type {number}
     */
    get axisPlaneSize(): number;
    /**
     * Sets the plane gap.
     *
     * @type {number}
     */
    set axisPlaneGap(value: number);
    /**
     * Gets the plane gap.
     *
     * @type {number}
     */
    get axisPlaneGap(): number;
    /**
     * Sets the axis center size.
     *
     * @type {number}
     */
    set axisCenterSize(value: number);
    /**
     * Gets the axis center size.
     *
     * @type {number}
     */
    get axisCenterSize(): number;
    /**
     * @type {boolean}
     * @deprecated Use {@link ScaleGizmo#flipPlanes} instead.
     * @ignore
     */
    set flipShapes(value: boolean);
    /**
     * @type {boolean}
     * @deprecated Use {@link ScaleGizmo#flipPlanes} instead.
     * @ignore
     */
    get flipShapes(): boolean;
    /**
     * @param {string} prop - The property name.
     * @param {any} value - The property value.
     * @private
     */
    private _setArrowProp;
    /**
     * @param {string} prop - The property name.
     * @param {any} value - The property value.
     * @private
     */
    private _setPlaneProp;
    /**
     * @private
     */
    private _shapesLookAtCamera;
    /**
     * @param {boolean} state - The state.
     * @private
     */
    private _drag;
    /**
     * @private
     */
    private _storeNodeScales;
    /**
     * @param {Vec3} scaleDelta - The point delta.
     * @private
     */
    private _setNodeScales;
    /**
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @returns {Vec3} The point (space is {@link TransformGizmo#coordSpace}).
     * @protected
     */
    protected _screenToPoint(x: number, y: number): Vec3;
}

declare class ViewCube extends EventHandler {
    /**
     * Fired when the user clicks on a face of the view cube.
     *
     * @event
     * @example
     * const viewCube = new ViewCube()
     * viewCube.on(ViewCube.EVENT_CAMERAALIGN, function (face) {
     *    console.log('Camera aligned to face: ' + face);
     * });
     */
    static EVENT_CAMERAALIGN: string;
    /**
     * @param {Vec4} [anchor] - The anchor.
     */
    constructor(anchor?: Vec4);
    /**
     * @type {number}
     * @private
     */
    private _size;
    /**
     * @type {SVGSVGElement}
     * @private
     */
    private _svg;
    /**
     * @type {Element}
     * @private
     */
    private _group;
    /**
     * @type {Vec4}
     * @private
     */
    private _anchor;
    /**
     * @type {Color}
     * @private
     */
    private _colorX;
    /**
     * @type {Color}
     * @private
     */
    private _colorY;
    /**
     * @type {Color}
     * @private
     */
    private _colorZ;
    /**
     * @type {Color}
     * @private
     */
    private _colorNeg;
    /**
     * @type {number}
     * @private
     */
    private _radius;
    /**
     * @type {number}
     * @private
     */
    private _textSize;
    /**
     * @type {number}
     * @private
     */
    private _lineThickness;
    /**
     * @type {number}
     * @private
     */
    private _lineLength;
    /**
     * @type {{
     *     nx: SVGAElement,
     *     ny: SVGAElement,
     *     nz: SVGAElement,
     *     px: SVGAElement,
     *     py: SVGAElement,
     *     pz: SVGAElement,
     *     xaxis: SVGLineElement,
     *     yaxis: SVGLineElement,
     *     zaxis: SVGLineElement
     * }}
     */
    _shapes: {
        nx: SVGAElement;
        ny: SVGAElement;
        nz: SVGAElement;
        px: SVGAElement;
        py: SVGAElement;
        pz: SVGAElement;
        xaxis: SVGLineElement;
        yaxis: SVGLineElement;
        zaxis: SVGLineElement;
    };
    dom: HTMLDivElement;
    set anchor(value: Vec4);
    get anchor(): Vec4;
    /**
     * @type {Color}
     */
    set colorX(value: Color);
    get colorX(): Color;
    /**
     * @type {Color}
     */
    set colorY(value: Color);
    get colorY(): Color;
    /**
     * @type {Color}
     */
    set colorZ(value: Color);
    get colorZ(): Color;
    /**
     * @type {Color}
     */
    set colorNeg(value: Color);
    get colorNeg(): Color;
    /**
     * @type {number}
     */
    set radius(value: number);
    get radius(): number;
    /**
     * @type {number}
     */
    set textSize(value: number);
    get textSize(): number;
    /**
     * @type {number}
     */
    set lineThickness(value: number);
    get lineThickness(): number;
    /**
     * @type {number}
     */
    set lineLength(value: number);
    get lineLength(): number;
    /**
     * @private
     */
    private _resize;
    /**
     * @private
     * @param {SVGAElement} group - The group.
     * @param {number} x - The x.
     * @param {number} y - The y.
     */
    private _transform;
    /**
     * @private
     * @param {SVGLineElement} line - The line.
     * @param {number} x - The x.
     * @param {number} y - The y.
     */
    private _x2y2;
    /**
     * @private
     * @param {string} color - The color.
     * @returns {SVGLineElement} - The line.
     */
    private _line;
    /**
     * @private
     * @param {string} color - The color.
     * @param {boolean} [fill] - The fill.
     * @param {string} [text] - The text.
     * @returns {SVGAElement} - The circle.
     */
    private _circle;
    /**
     * @param {Mat4} cameraMatrix - The camera matrix.
     */
    update(cameraMatrix: Mat4): void;
    destroy(): void;
}

/**
 * SSAO is disabled.
 *
 * @category Graphics
 */
declare const SSAOTYPE_NONE: "none";
/**
 * SSAO is applied during the lighting calculation stage, allowing it to blend seamlessly with scene
 * lighting. This results in ambient occlusion being more pronounced in areas where direct light is
 * obstructed, enhancing realism.
 *
 * @category Graphics
 */
declare const SSAOTYPE_LIGHTING: "lighting";
/**
 * SSAO is applied as a standalone effect after the scene is rendered. This method uniformly
 * overlays ambient occlusion across the image, disregarding direct lighting interactions. While
 * this may sacrifice some realism, it can be advantageous for achieving specific artistic styles.
 *
 * @category Graphics
 */
declare const SSAOTYPE_COMBINE: "combine";

declare namespace guid {
    /**
     * Create an RFC4122 version 4 compliant GUID.
     *
     * @returns {string} A new GUID.
     */
    function create(): string;
}

declare namespace path {
    let delimiter: string;
    /**
     * Join two or more sections of file path together, inserting a delimiter if needed.
     *
     * @param {...string} sections - Sections of the path to join.
     * @returns {string} The joined file path.
     * @example
     * const path = pc.path.join('foo', 'bar');
     * console.log(path); // Prints 'foo/bar'
     * @example
     * const path = pc.path.join('alpha', 'beta', 'gamma');
     * console.log(path); // Prints 'alpha/beta/gamma'
     */
    function join(...sections: string[]): string;
    /**
     * Normalize the path by removing '.' and '..' instances.
     *
     * @param {string} pathname - The path to normalize.
     * @returns {string} The normalized path.
     */
    function normalize(pathname: string): string;
    /**
     * Split the pathname path into a pair [head, tail] where tail is the final part of the path
     * after the last delimiter and head is everything leading up to that. tail will never contain
     * a slash.
     *
     * @param {string} pathname - The path to split.
     * @returns {string[]} The split path which is an array of two strings, the path and the
     * filename.
     */
    function split(pathname: string): string[];
    /**
     * Return the basename of the path. That is the second element of the pair returned by passing
     * path into {@link path.split}.
     *
     * @param {string} pathname - The path to process.
     * @returns {string} The basename.
     * @example
     * pc.path.getBasename("/path/to/file.txt"); // returns "file.txt"
     * pc.path.getBasename("/path/to/dir"); // returns "dir"
     */
    function getBasename(pathname: string): string;
    /**
     * Get the directory name from the path. This is everything up to the final instance of
     * {@link path.delimiter}.
     *
     * @param {string} pathname - The path to get the directory from.
     * @returns {string} The directory part of the path.
     */
    function getDirectory(pathname: string): string;
    /**
     * Return the extension of the path. Pop the last value of a list after path is split by
     * question mark and comma.
     *
     * @param {string} pathname - The path to process.
     * @returns {string} The extension.
     * @example
     * pc.path.getExtension("/path/to/file.txt"); // returns ".txt"
     * pc.path.getExtension("/path/to/file.jpg"); // returns ".jpg"
     * pc.path.getExtension("/path/to/file.txt?function=getExtension"); // returns ".txt"
     */
    function getExtension(pathname: string): string;
    /**
     * Check if a string s is relative path.
     *
     * @param {string} pathname - The path to process.
     * @returns {boolean} True if s doesn't start with slash and doesn't include colon and double
     * slash.
     *
     * @example
     * pc.path.isRelativePath("file.txt"); // returns true
     * pc.path.isRelativePath("path/to/file.txt"); // returns true
     * pc.path.isRelativePath("./path/to/file.txt"); // returns true
     * pc.path.isRelativePath("../path/to/file.jpg"); // returns true
     * pc.path.isRelativePath("/path/to/file.jpg"); // returns false
     * pc.path.isRelativePath("http://path/to/file.jpg"); // returns false
     */
    function isRelativePath(pathname: string): boolean;
    /**
     * Return the path without file name. If path is relative path, start with period.
     *
     * @param {string} pathname - The full path to process.
     * @returns {string} The path without a last element from list split by slash.
     * @example
     * pc.path.extractPath("path/to/file.txt");    // returns "./path/to"
     * pc.path.extractPath("./path/to/file.txt");  // returns "./path/to"
     * pc.path.extractPath("../path/to/file.txt"); // returns "../path/to"
     * pc.path.extractPath("/path/to/file.txt");   // returns "/path/to"
     */
    function extractPath(pathname: string): string;
}

declare namespace platform {
    export { platformName as name };
    export { environment };
    export let global: object;
    export let browser: boolean;
    export let worker: boolean;
    export let desktop: boolean;
    export let mobile: boolean;
    export let ios: boolean;
    export let android: boolean;
    export { xbox };
    export { gamepads };
    export { touch };
    export { workers };
    export { passiveEvents };
    export { browserName };
}
declare const platformName: "android" | "ios" | "windows" | "osx" | "linux" | "cros";
declare const environment: "worker" | "browser" | "node";
declare const xbox: boolean;
declare const gamepads: boolean;
declare const touch: boolean;
declare const workers: boolean;
declare const passiveEvents: boolean;
declare const browserName: "other" | "chrome" | "safari" | "firefox";

declare namespace string {
    export { ASCII_LOWERCASE };
    export { ASCII_UPPERCASE };
    export { ASCII_LETTERS };
    /**
     * Return a string with \{n\} replaced with the n-th argument.
     *
     * @param {string} s - The string to format.
     * @param {...*} args - All other arguments are substituted into the string.
     * @returns {string} The formatted string.
     * @example
     * const s = pc.string.format("Hello {0}", "world");
     * console.log(s); // Prints "Hello world"
     */
    export function format(s: string, ...args: any[]): string;
    /**
     * Get the code point number for a character in a string. Polyfill for
     * [`codePointAt`]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/codePointAt}.
     *
     * @param {string} string - The string to get the code point from.
     * @param {number} [i] - The index in the string.
     * @returns {number} The code point value for the character in the string.
     */
    export function getCodePoint(string: string, i?: number): number;
    /**
     * Gets an array of all code points in a string.
     *
     * @param {string} string - The string to get code points from.
     * @returns {number[]} The code points in the string.
     */
    export function getCodePoints(string: string): number[];
    /**
     * Gets an array of all grapheme clusters (visible symbols) in a string. This is needed because
     * some symbols (such as emoji or accented characters) are actually made up of multiple
     * character codes. See {@link https://mathiasbynens.be/notes/javascript-unicode here} for more
     * info.
     *
     * @param {string} string - The string to break into symbols.
     * @returns {string[]} The symbols in the string.
     */
    export function getSymbols(string: string): string[];
    /**
     * Get the string for a given code point or set of code points. Polyfill for
     * [`fromCodePoint`]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/fromCodePoint}.
     *
     * @param {...number} args - The code points to convert to a string.
     * @returns {string} The converted string.
     * @ignore
     */
    export function fromCodePoint(...args: number[]): string;
}
declare const ASCII_LOWERCASE: "abcdefghijklmnopqrstuvwxyz";
declare const ASCII_UPPERCASE: "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
declare const ASCII_LETTERS: string;

/**
 * Callback used by {@link WasmModule.setConfig}.
 */
type ModuleErrorCallback = (error: string) => void;
/**
 * Callback used by {@link WasmModule.getInstance}.
 */
type ModuleInstanceCallback = (moduleInstance: any) => void;
/**
 * @callback ModuleErrorCallback
 * Callback used by {@link WasmModule.setConfig}.
 * @param {string} error - If the instance fails to load this will contain a description of the error.
 * @returns {void}
 */
/**
 * @callback ModuleInstanceCallback
 * Callback used by {@link WasmModule.getInstance}.
 * @param {any} moduleInstance - The module instance.
 * @returns {void}
 */
/**
 * A pure static utility class which supports immediate and lazy loading of
 * [WebAssembly](https://developer.mozilla.org/en-US/docs/WebAssembly) modules. Note that you can
 * load WebAssembly modules even before instantiating your {@link AppBase} instance.
 *
 * This class is generally only needed if you are developing against the Engine directly. Editor
 * projects automatically load WebAssembly modules included in the project's assets.
 *
 * Do not use this class to load the Basis WebAssembly module. Instead, please refer to
 * {@link basisInitialize}.
 *
 * @example
 * // Load the Ammo.js physics engine
 * pc.WasmModule.setConfig('Ammo', {
 *     glueUrl: `ammo.wasm.js`,
 *     wasmUrl: `ammo.wasm.wasm`,
 *     fallbackUrl: `ammo.js`
 * });
 * await new Promise((resolve) => {
 *     pc.WasmModule.getInstance('Ammo', () => resolve());
 * });
 */
declare class WasmModule {
    /**
     * Set a wasm module's configuration.
     *
     * @param {string} moduleName - Name of the module.
     * @param {object} [config] - The configuration object.
     * @param {string} [config.glueUrl] - URL of glue script.
     * @param {string} [config.wasmUrl] - URL of the wasm script.
     * @param {string} [config.fallbackUrl] - URL of the fallback script to use when wasm modules
     * aren't supported.
     * @param {number} [config.numWorkers] - For modules running on worker threads, the number of
     * threads to use. Default value is based on module implementation.
     * @param {ModuleErrorCallback} [config.errorHandler] - Function to be called if the module fails
     * to download.
     */
    static setConfig(moduleName: string, config?: {
        glueUrl?: string;
        wasmUrl?: string;
        fallbackUrl?: string;
        numWorkers?: number;
        errorHandler?: ModuleErrorCallback;
    }): void;
    /**
     * Get a wasm module's configuration.
     *
     * @param {string} moduleName - Name of the module.
     * @returns {object | undefined} The previously set configuration.
     */
    static getConfig(moduleName: string): object | undefined;
    /**
     * Get a wasm module instance. The instance will be created if necessary and returned
     * in the second parameter to callback.
     *
     * @param {string} moduleName - Name of the module.
     * @param {ModuleInstanceCallback} callback - The function called when the instance is
     * available.
     */
    static getInstance(moduleName: string, callback: ModuleInstanceCallback): void;
}

/**
 * Helper class for organized reading of memory.
 *
 * @ignore
 */
declare class ReadStream {
    /**
     * @param {ArrayBuffer} arraybuffer - The buffer to read from.
     */
    constructor(arraybuffer: ArrayBuffer);
    /** @type {ArrayBuffer} */
    arraybuffer: ArrayBuffer;
    /** @type {DataView} */
    dataView: DataView;
    /** @type {number} */
    offset: number;
    /**
     * The number of bytes remaining to be read.
     *
     * @type {number}
     */
    get remainingBytes(): number;
    /**
     * Resets the offset to a given value. If no value is given, the offset is reset to 0.
     *
     * @param {number} offset - The new offset.
     */
    reset(offset?: number): void;
    /**
     * Skips a number of bytes.
     *
     * @param {number} bytes - The number of bytes to skip.
     */
    skip(bytes: number): void;
    /**
     * Aligns the offset to a multiple of a number of bytes.
     *
     * @param {number} bytes - The number of bytes to align to.
     */
    align(bytes: number): void;
    /**
     * Increments the offset by the specified number of bytes and returns the previous offset.
     *
     * @param {number} amount - The number of bytes to increment by.
     * @returns {number} The previous offset.
     * @private
     */
    private _inc;
    /**
     * Reads a single character.
     *
     * @returns {string} The character.
     */
    readChar(): string;
    /**
     * Reads a string of a given length.
     *
     * @param {number} numChars - The number of characters to read.
     * @returns {string} The string.
     */
    readChars(numChars: number): string;
    /**
     * Read an unsigned 8-bit integer.
     *
     * @returns {number} The integer.
     */
    readU8(): number;
    /**
     * Read an unsigned 16-bit integer.
     *
     * @returns {number} The integer.
     */
    readU16(): number;
    /**
     * Read an unsigned 32-bit integer.
     *
     * @returns {number} The integer.
     */
    readU32(): number;
    /**
     * Read an unsigned 64-bit integer.
     *
     * @returns {number} The integer.
     */
    readU64(): number;
    /**
     * Read a big endian unsigned 32-bit integer.
     *
     * @returns {number} The integer.
     */
    readU32be(): number;
    /**
     * Read unsigned 8-bit integers into an array.
     *
     * @param {number[]} result - The array to read into.
     */
    readArray(result: number[]): void;
    /**
     * Read a line of text from the stream.
     *
     * @returns {string} The line of text.
     */
    readLine(): string;
}

/**
 * Get current time in milliseconds. Use it to measure time difference. Reference time may differ
 * on different platforms.
 *
 * @returns {number} The time in milliseconds.
 * @ignore
 */
declare const now: any;

/**
 * Log tracing functionality, allowing for tracing of the internal functionality of the engine.
 * Note that the trace logging only takes place in the debug build of the engine and is stripped
 * out in other builds.
 *
 * @category Debug
 */
declare class Tracing {
    /**
     * Set storing the names of enabled trace channels.
     *
     * @type {Set<string>}
     * @private
     */
    private static _traceChannels;
    /**
     * Enable call stack logging for trace calls. Defaults to false.
     *
     * @type {boolean}
     */
    static stack: boolean;
    /**
     * Enable or disable a trace channel.
     *
     * @param {string} channel - Name of the trace channel. Can be:
     *
     * - {@link TRACEID_RENDER_FRAME}
     * - {@link TRACEID_RENDER_FRAME_TIME}
     * - {@link TRACEID_RENDER_PASS}
     * - {@link TRACEID_RENDER_PASS_DETAIL}
     * - {@link TRACEID_RENDER_ACTION}
     * - {@link TRACEID_RENDER_TARGET_ALLOC}
     * - {@link TRACEID_TEXTURE_ALLOC}
     * - {@link TRACEID_SHADER_ALLOC}
     * - {@link TRACEID_SHADER_COMPILE}
     * - {@link TRACEID_VRAM_TEXTURE}
     * - {@link TRACEID_VRAM_VB}
     * - {@link TRACEID_VRAM_IB}
     * - {@link TRACEID_RENDERPIPELINE_ALLOC}
     * - {@link TRACEID_COMPUTEPIPELINE_ALLOC}
     * - {@link TRACEID_PIPELINELAYOUT_ALLOC}
     * - {@link TRACEID_TEXTURES}
     * - {@link TRACEID_ASSETS}
     * - {@link TRACEID_GPU_TIMINGS}
     *
     * @param {boolean} enabled - New enabled state for the channel.
     */
    static set(channel: string, enabled?: boolean): void;
    /**
     * Test if the trace channel is enabled.
     *
     * @param {string} channel - Name of the trace channel.
     * @returns {boolean} - True if the trace channel is enabled.
     */
    static get(channel: string): boolean;
}

declare namespace math {
    let DEG_TO_RAD: number;
    let RAD_TO_DEG: number;
    /**
     * Clamp a number between min and max inclusive.
     *
     * @param {number} value - Number to clamp.
     * @param {number} min - Min value.
     * @param {number} max - Max value.
     * @returns {number} The clamped value.
     * @example
     * pc.math.clamp(5, 0, 10);  // returns 5
     * pc.math.clamp(-5, 0, 10); // returns 0
     * pc.math.clamp(15, 0, 10); // returns 10
     */
    function clamp(value: number, min: number, max: number): number;
    /**
     * Convert an 24 bit integer into an array of 3 bytes.
     *
     * @param {number} i - Number holding an integer value.
     * @returns {number[]} An array of 3 bytes.
     * @example
     * // Set bytes to [0x11, 0x22, 0x33]
     * const bytes = pc.math.intToBytes24(0x112233);
     */
    function intToBytes24(i: number): number[];
    /**
     * Convert an 32 bit integer into an array of 4 bytes.
     *
     * @param {number} i - Number holding an integer value.
     * @returns {number[]} An array of 4 bytes.
     * @example
     * // Set bytes to [0x11, 0x22, 0x33, 0x44]
     * const bytes = pc.math.intToBytes32(0x11223344);
     */
    function intToBytes32(i: number): number[];
    /**
     * Convert 3 8 bit Numbers into a single unsigned 24 bit Number.
     *
     * @param {number} r - A single byte (0-255).
     * @param {number} g - A single byte (0-255).
     * @param {number} b - A single byte (0-255).
     * @returns {number} A single unsigned 24 bit Number.
     * @example
     * // Set result1 to 0x112233 from an array of 3 values
     * const result1 = pc.math.bytesToInt24([0x11, 0x22, 0x33]);
     *
     * // Set result2 to 0x112233 from 3 discrete values
     * const result2 = pc.math.bytesToInt24(0x11, 0x22, 0x33);
     */
    function bytesToInt24(r: number, g: number, b: number): number;
    /**
     * Convert 4 1-byte Numbers into a single unsigned 32bit Number.
     *
     * @param {number} r - A single byte (0-255).
     * @param {number} g - A single byte (0-255).
     * @param {number} b - A single byte (0-255).
     * @param {number} a - A single byte (0-255).
     * @returns {number} A single unsigned 32bit Number.
     * @example
     * // Set result1 to 0x11223344 from an array of 4 values
     * const result1 = pc.math.bytesToInt32([0x11, 0x22, 0x33, 0x44]);
     *
     * // Set result2 to 0x11223344 from 4 discrete values
     * const result2 = pc.math.bytesToInt32(0x11, 0x22, 0x33, 0x44);
     */
    function bytesToInt32(r: number, g: number, b: number, a: number): number;
    /**
     * Calculates the linear interpolation of two numbers.
     *
     * @param {number} a - Number to linearly interpolate from.
     * @param {number} b - Number to linearly interpolate to.
     * @param {number} alpha - The value controlling the result of interpolation. When alpha is 0,
     * a is returned. When alpha is 1, b is returned. Between 0 and 1, a linear interpolation
     * between a and b is returned. alpha is clamped between 0 and 1.
     * @returns {number} The linear interpolation of two numbers.
     * @example
     * pc.math.lerp(0, 10, 0);   // returns 0
     * pc.math.lerp(0, 10, 0.5); // returns 5
     * pc.math.lerp(0, 10, 1);   // returns 10
     */
    function lerp(a: number, b: number, alpha: number): number;
    /**
     * Calculates the linear interpolation of two angles ensuring that interpolation is correctly
     * performed across the 360 to 0 degree boundary. Angles are supplied in degrees.
     *
     * @param {number} a - Angle (in degrees) to linearly interpolate from.
     * @param {number} b - Angle (in degrees) to linearly interpolate to.
     * @param {number} alpha - The value controlling the result of interpolation. When alpha is 0,
     * a is returned. When alpha is 1, b is returned. Between 0 and 1, a linear interpolation
     * between a and b is returned. alpha is clamped between 0 and 1.
     * @returns {number} The linear interpolation of two angles.
     * @example
     * pc.math.lerpAngle(350, 10, 0.5); // returns 0 (shortest path crosses 360/0 boundary)
     * pc.math.lerpAngle(0, 90, 0.5);   // returns 45
     */
    function lerpAngle(a: number, b: number, alpha: number): number;
    /**
     * Returns true if argument is a power-of-two and false otherwise.
     *
     * @param {number} x - Number to check for power-of-two property.
     * @returns {boolean} true if power-of-two and false otherwise.
     * @example
     * pc.math.powerOfTwo(32); // returns true
     * pc.math.powerOfTwo(17); // returns false
     */
    function powerOfTwo(x: number): boolean;
    /**
     * Returns the next power of 2 for the specified value.
     *
     * @param {number} val - The value for which to calculate the next power of 2.
     * @returns {number} The next power of 2.
     * @example
     * pc.math.nextPowerOfTwo(17); // returns 32
     * pc.math.nextPowerOfTwo(32); // returns 32
     */
    function nextPowerOfTwo(val: number): number;
    /**
     * Returns the nearest (smaller or larger) power of 2 for the specified value.
     *
     * @param {number} val - The value for which to calculate the nearest power of 2.
     * @returns {number} The nearest power of 2.
     * @example
     * pc.math.nearestPowerOfTwo(17); // returns 16
     * pc.math.nearestPowerOfTwo(24); // returns 32
     */
    function nearestPowerOfTwo(val: number): number;
    /**
     * Return a pseudo-random number between min and max. The number generated is in the range
     * [min, max), that is inclusive of the minimum but exclusive of the maximum.
     *
     * @param {number} min - Lower bound for range.
     * @param {number} max - Upper bound for range.
     * @returns {number} Pseudo-random number between the supplied range.
     * @example
     * pc.math.random(0, 10); // returns a random number between 0 and 10
     */
    function random(min: number, max: number): number;
    /**
     * The function interpolates smoothly between two input values based on a third one that should
     * be between the first two. The returned value is clamped between 0 and 1.
     *
     * The slope (i.e. derivative) of the smoothstep function starts at 0 and ends at 0. This makes
     * it easy to create a sequence of transitions using smoothstep to interpolate each segment
     * rather than using a more sophisticated or expensive interpolation technique.
     *
     * See https://en.wikipedia.org/wiki/Smoothstep for more details.
     *
     * @param {number} min - The lower bound of the interpolation range.
     * @param {number} max - The upper bound of the interpolation range.
     * @param {number} x - The value to interpolate.
     * @returns {number} The smoothly interpolated value clamped between zero and one.
     * @example
     * pc.math.smoothstep(0, 10, 5); // returns 0.5
     */
    function smoothstep(min: number, max: number, x: number): number;
    /**
     * An improved version of the {@link math.smoothstep} function which has zero 1st and 2nd order
     * derivatives at t=0 and t=1.
     *
     * See https://en.wikipedia.org/wiki/Smoothstep#Variations for more details.
     *
     * @param {number} min - The lower bound of the interpolation range.
     * @param {number} max - The upper bound of the interpolation range.
     * @param {number} x - The value to interpolate.
     * @returns {number} The smoothly interpolated value clamped between zero and one.
     * @example
     * pc.math.smootherstep(0, 10, 5); // returns 0.5
     */
    function smootherstep(min: number, max: number, x: number): number;
    /**
     * Rounds a number up to nearest multiple.
     *
     * @param {number} numToRound - The number to round up.
     * @param {number} multiple - The multiple to round up to.
     * @returns {number} A number rounded up to nearest multiple.
     * @example
     * pc.math.roundUp(17, 4); // returns 20
     * pc.math.roundUp(16, 4); // returns 16
     */
    function roundUp(numToRound: number, multiple: number): number;
    /**
     * Checks whether a given number resides between two other given numbers.
     *
     * @param {number} num - The number to check the position of.
     * @param {number} a - The first upper or lower threshold to check between.
     * @param {number} b - The second upper or lower threshold to check between.
     * @param {boolean} inclusive - If true, a num param which is equal to a or b will return true.
     * @returns {boolean} true if between or false otherwise.
     * @ignore
     */
    function between(num: number, a: number, b: number, inclusive: boolean): boolean;
}

/**
 * Utility static class providing functionality to pack float values to various storage
 * representations.
 *
 * @category Math
 */
declare class FloatPacking {
    /**
     * Packs a float to a 16-bit half-float representation used by the GPU.
     *
     * @param {number} value - The float value to pack.
     * @returns {number} The 16-bit half-float representation as an integer.
     * @example
     * const half = pc.FloatPacking.float2Half(1.5);
     */
    static float2Half(value: number): number;
    /**
     * Converts bits of a 32-bit float into RGBA8 format and stores the result in a provided color.
     * The float can be reconstructed in shader using the uintBitsToFloat instruction.
     *
     * @param {number} value - The float value to convert.
     * @param {Color} data - The color to store the RGBA8 packed value in.
     *
     * @ignore
     */
    static float2RGBA8(value: number, data: Color): void;
}

/**
 * Sampling kernels.
 *
 * @category Math
 */
declare class Kernel {
    /**
     * Generate a set of points distributed in a series of concentric rings around the origin. The
     * spacing between points is determined by the number of points in the first ring, and subsequent
     * rings maintain this spacing by adjusting their number of points accordingly.
     *
     * @param {number} numRings - The number of concentric rings to generate.
     * @param {number} numPoints - The number of points in the first ring.
     * @returns {number[]} An array where each point is represented by two consecutive numbers (x, y).
     * @example
     * // Generate a kernel with 3 rings and 8 points in the first ring
     * const kernel = pc.Kernel.concentric(3, 8);
     * // kernel is a flat array: [x0, y0, x1, y1, x2, y2, ...]
     */
    static concentric(numRings: number, numPoints: number): number[];
}

/**
 * An oriented bounding box is a box that can be rotated and translated in 3D space. It is defined
 * by a world transform and half extents. Unlike an axis-aligned bounding box, an OBB can be
 * oriented arbitrarily.
 *
 * @category Math
 */
declare class OrientedBox {
    /**
     * Create a new OrientedBox instance.
     *
     * @param {Mat4} [worldTransform] - Transform that has the orientation and position of the box.
     * Scale is assumed to be one. Defaults to identity matrix.
     * @param {Vec3} [halfExtents] - Half the distance across the box in each local axis. Defaults
     * to (0.5, 0.5, 0.5).
     */
    constructor(worldTransform?: Mat4, halfExtents?: Vec3);
    /**
     * @type {Vec3}
     * @private
     */
    private halfExtents;
    /**
     * @type {Mat4}
     * @private
     */
    private _modelTransform;
    /**
     * @type {Mat4}
     * @private
     */
    private _worldTransform;
    /**
     * @type {BoundingBox}
     * @private
     */
    private _aabb;
    /**
     * Sets the world transform of the OBB.
     *
     * @type {Mat4}
     */
    set worldTransform(value: Mat4);
    /**
     * Gets the world transform of the OBB.
     *
     * @type {Mat4}
     */
    get worldTransform(): Mat4;
    /**
     * Test if a ray intersects with the OBB.
     *
     * @param {Ray} ray - Ray to test against (direction must be normalized).
     * @param {Vec3} [point] - If there is an intersection, the intersection point will be copied
     * into here.
     * @returns {boolean} True if there is an intersection.
     */
    intersectsRay(ray: Ray, point?: Vec3): boolean;
    /**
     * Test if a point is inside an OBB.
     *
     * @param {Vec3} point - Point to test.
     * @returns {boolean} True if the point is inside the OBB and false otherwise.
     */
    containsPoint(point: Vec3): boolean;
    /**
     * Test if a Bounding Sphere is overlapping, enveloping, or inside this OBB.
     *
     * @param {BoundingSphere} sphere - Bounding Sphere to test.
     * @returns {boolean} True if the Bounding Sphere is overlapping, enveloping or inside this OBB
     * and false otherwise.
     */
    intersectsBoundingSphere(sphere: BoundingSphere): boolean;
}

/**
 * Creates a graphics device.
 *
 * @param {HTMLCanvasElement} canvas - The canvas element.
 * @param {object} options - Graphics device options.
 * @param {string[]} [options.deviceTypes] - An array of DEVICETYPE_*** constants, defining the
 * order in which the devices are attempted to get created. Defaults to an empty array. If the
 * specified array does not contain {@link DEVICETYPE_WEBGL2}, it is internally added to its end.
 * Typically, you'd only specify {@link DEVICETYPE_WEBGPU}, or leave it empty. Use
 * {@link DEVICETYPE_WEBGPU_BARE} to create a WebGPU device without optional features and with
 * default spec limits, useful for testing on constrained devices.
 * @param {boolean} [options.antialias] - Boolean that indicates whether or not to perform
 * anti-aliasing if possible. Defaults to true.
 * @param {string} [options.displayFormat] - The display format of the canvas. Defaults to
 * {@link DISPLAYFORMAT_LDR}. Can be:
 *
 * - {@link DISPLAYFORMAT_LDR}
 * - {@link DISPLAYFORMAT_LDR_SRGB}
 * - {@link DISPLAYFORMAT_HDR}
 *
 * @param {boolean} [options.depth] - Boolean that indicates that the drawing buffer is
 * requested to have a depth buffer of at least 16 bits. Defaults to true.
 * @param {boolean} [options.stencil] - Boolean that indicates that the drawing buffer is
 * requested to have a stencil buffer of at least 8 bits. Defaults to true.
 * @param {string} [options.glslangUrl] - The URL to the glslang script. Required only if
 * user-defined shaders or shader chunk overrides are specified in GLSL and need to be transpiled to
 * WGSL for use with the {@link DEVICETYPE_WEBGPU} device type. This is not required if only the
 * engine's built-in shaders are used, as those are provided directly in WGSL. Not used for
 * {@link DEVICETYPE_WEBGL2} device type creation.
 * @param {string} [options.twgslUrl] - An url to twgsl script, required if glslangUrl was specified.
 * @param {boolean} [options.xrCompatible] - Boolean that hints to the user agent to use a
 * compatible graphics adapter for an immersive XR device.
 * @param {'default'|'high-performance'|'low-power'} [options.powerPreference] - A hint indicating
 * what configuration of GPU would be selected. Possible values are:
 *
 * - 'default': Let the user agent decide which GPU configuration is most suitable. This is the
 * default value.
 * - 'high-performance': Prioritizes rendering performance over power consumption.
 * - 'low-power': Prioritizes power saving over rendering performance.
 *
 * Defaults to 'default'.
 * @returns {Promise} - Promise object representing the created graphics device.
 * @category Graphics
 */
declare function createGraphicsDevice(canvas: HTMLCanvasElement, options?: {
    deviceTypes?: string[];
    antialias?: boolean;
    displayFormat?: string;
    depth?: boolean;
    stencil?: boolean;
    glslangUrl?: string;
    twgslUrl?: string;
    xrCompatible?: boolean;
    powerPreference?: "default" | "high-performance" | "low-power";
}): Promise<any>;

/**
 * @import { Vec2 } from '../../core/math/vec2.js'
 */
/**
 * A class providing utility functions for textures.
 *
 * @ignore
 */
declare class TextureUtils {
    /**
     * Calculate the dimension of a texture at a specific mip level.
     *
     * @param {number} dimension - Texture dimension at level 0.
     * @param {number} mipLevel - Mip level.
     * @returns {number} The dimension of the texture at the specified mip level.
     */
    static calcLevelDimension(dimension: number, mipLevel: number): number;
    /**
     * Calculate the number of mip levels for a texture with the specified dimensions.
     *
     * @param {number} width - Texture's width.
     * @param {number} height - Texture's height.
     * @param {number} [depth] - Texture's depth. Defaults to 1.
     * @returns {number} The number of mip levels required for the texture.
     */
    static calcMipLevelsCount(width: number, height: number, depth?: number): number;
    /**
     * Calculate the size in bytes of the texture level given its format and dimensions.
     *
     * @param {number} width - Texture's width.
     * @param {number} height - Texture's height.
     * @param {number} depth - Texture's depth.
     * @param {number} format - Texture's pixel format PIXELFORMAT_***.
     * @returns {number} The number of bytes of GPU memory required for the texture.
     */
    static calcLevelGpuSize(width: number, height: number, depth: number, format: number): number;
    /**
     * Calculate the GPU memory required for a texture.
     *
     * @param {number} width - Texture's width.
     * @param {number} height - Texture's height.
     * @param {number} depth - Texture's depth.
     * @param {number} format - Texture's pixel format PIXELFORMAT_***.
     * @param {boolean} mipmaps - True if the texture includes mipmaps, false otherwise.
     * @param {boolean} cubemap - True is the texture is a cubemap, false otherwise.
     * @returns {number} The number of bytes of GPU memory required for the texture.
     */
    static calcGpuSize(width: number, height: number, depth: number, format: number, mipmaps: boolean, cubemap: boolean): number;
    /**
     * Calculate roughly square texture dimensions that can hold the given number of texels.
     *
     * @param {number} count - The number of texels to fit.
     * @param {Vec2} result - Output vector to receive width (x) and height (y).
     * @param {number} [widthMultiple] - If greater than 1, the width is rounded up to the
     * nearest multiple of this value. Useful for ensuring rows align to a specific stride (e.g.
     * 4 texels per matrix row, or N lights per cell).
     * @returns {Vec2} The result vector with dimensions set.
     */
    static calcTextureSize(count: number, result: Vec2, widthMultiple?: number): Vec2;
}

/**
 * @import { GraphicsDevice } from './graphics-device.js'
 */
/**
 * This object allows you to configure and use the transform feedback feature (WebGL2 only). How to
 * use:
 *
 * 1. First, check that you're on WebGL2, by looking at the `app.graphicsDevice.isWebGL2`` value.
 * 2. Define the outputs in your vertex shader. The syntax is `out vec3 out_vertex_position`,
 * note that there must be out_ in the name. You can then simply assign values to these outputs in
 * VS. The order and size of shader outputs must match the output buffer layout.
 * 3. Create the shader using `TransformFeedback.createShader(device, vsCode, yourShaderName)`.
 * 4. Create/acquire the input vertex buffer. Can be any VertexBuffer, either manually created, or
 * from a Mesh.
 * 5. Create the TransformFeedback object: `const tf = new TransformFeedback(inputBuffer)`. This
 * object will internally create an output buffer.
 * 6. Run the shader: `tf.process(shader)`. Shader will take the input buffer, process it and write
 * to the output buffer, then the input/output buffers will be automatically swapped, so you'll
 * immediately see the result.
 *
 * ```javascript
 * // *** shader asset ***
 * attribute vec3 vertex_position;
 * attribute vec3 vertex_normal;
 * attribute vec2 vertex_texCoord0;
 * out vec3 out_vertex_position;
 * out vec3 out_vertex_normal;
 * out vec2 out_vertex_texCoord0;
 * void main(void) {
 *     // read position and normal, write new position (push away)
 *     out_vertex_position = vertex_position + vertex_normal * 0.01;
 *     // pass other attributes unchanged
 *     out_vertex_normal = vertex_normal;
 *     out_vertex_texCoord0 = vertex_texCoord0;
 * }
 * ```
 *
 * ```javascript
 * // *** script asset ***
 * var TransformExample = pc.createScript('transformExample');
 *
 * // attribute that references shader asset and material
 * TransformExample.attributes.add('shaderCode', { type: 'asset', assetType: 'shader' });
 * TransformExample.attributes.add('material', { type: 'asset', assetType: 'material' });
 *
 * TransformExample.prototype.initialize = function() {
 *     const device = this.app.graphicsDevice;
 *     const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, new pc.TorusGeometry({ tubeRadius: 0.01, ringRadius: 3 }));
 *     const meshInstance = new pc.MeshInstance(mesh, this.material.resource);
 *     const entity = new pc.Entity();
 *     entity.addComponent('render', {
 *         type: 'asset',
 *         meshInstances: [meshInstance]
 *     });
 *     app.root.addChild(entity);
 *
 *     // if webgl2 is not supported, transform-feedback is not available
 *     if (!device.isWebGL2) return;
 *     const inputBuffer = mesh.vertexBuffer;
 *     this.tf = new pc.TransformFeedback(inputBuffer);
 *     this.shader = pc.TransformFeedback.createShader(device, this.shaderCode.resource, "tfMoveUp");
 * };
 *
 * TransformExample.prototype.update = function(dt) {
 *     if (!this.app.graphicsDevice.isWebGL2) return;
 *     this.tf.process(this.shader);
 * };
 * ```
 *
 * @category Graphics
 */
declare class TransformFeedback {
    /**
     * Creates a transform feedback ready vertex shader from code.
     *
     * @param {GraphicsDevice} graphicsDevice - The graphics device used by the renderer.
     * @param {string} vertexCode - Vertex shader code. Should contain output variables starting with "out_" or feedbackVaryings.
     * @param {string} name - Unique name for caching the shader.
     * @param {string[]} [feedbackVaryings] - A list of shader output variable names that will be captured.
     * @returns {Shader} A shader to use in the process() function.
     */
    static createShader(graphicsDevice: GraphicsDevice, vertexCode: string, name: string, feedbackVaryings?: string[]): Shader;
    /**
     * Create a new TransformFeedback instance.
     *
     * @param {VertexBuffer} inputBuffer - The input vertex buffer.
     * @param {VertexBuffer} [outputBuffer] - The optional output buffer.
     * If not specified, a buffer with parameters matching the input buffer will be created.
     * @param {number} [usage] - The optional usage type of the output vertex buffer. Can be:
     *
     * - {@link BUFFER_STATIC}
     * - {@link BUFFER_DYNAMIC}
     * - {@link BUFFER_STREAM}
     * - {@link BUFFER_GPUDYNAMIC}
     *
     * Defaults to {@link BUFFER_GPUDYNAMIC} (which is recommended for continuous update).
     */
    constructor(inputBuffer: VertexBuffer, outputBuffer?: VertexBuffer, usage?: number);
    device: GraphicsDevice;
    _inputBuffer: VertexBuffer;
    _destroyOutputBuffer: boolean;
    _outputBuffer: VertexBuffer;
    /**
     * Destroys the transform feedback helper object.
     */
    destroy(): void;
    /**
     * Runs the specified shader on the input buffer, writes results into the new buffer, then
     * optionally swaps input/output.
     *
     * @param {Shader} shader - A vertex shader to run. Should be created with
     * {@link TransformFeedback.createShader}.
     * @param {boolean} [swap] - Swap input/output buffer data. Useful for continuous buffer
     * processing. Default is true.
     */
    process(shader: Shader, swap?: boolean): void;
    /**
     * The current input buffer.
     *
     * @type {VertexBuffer}
     */
    get inputBuffer(): VertexBuffer;
    /**
     * The current output buffer.
     *
     * @type {VertexBuffer}
     */
    get outputBuffer(): VertexBuffer;
}

/**
 * A vertex iterator simplifies the process of writing vertex data to a vertex buffer.
 *
 * @category Graphics
 */
declare class VertexIterator {
    /**
     * Create a new VertexIterator instance.
     *
     * @param {VertexBuffer} vertexBuffer - The vertex buffer to be iterated.
     */
    constructor(vertexBuffer: VertexBuffer);
    vertexBuffer: VertexBuffer;
    vertexFormatSize: number;
    buffer: ArrayBuffer;
    accessors: VertexIteratorAccessor[];
    /**
     * The vertex buffer elements.
     *
     * @type {Object<string, VertexIteratorAccessor>}
     */
    element: {
        [x: string]: VertexIteratorAccessor;
    };
    /**
     * Moves the vertex iterator on to the next vertex.
     *
     * @param {number} [count] - Number of steps to move on when calling next. Defaults to 1.
     * @example
     * const iterator = new pc.VertexIterator(vertexBuffer);
     * iterator.element[pc.SEMANTIC_POSITION].set(-0.9, -0.9, 0.0);
     * iterator.element[pc.SEMANTIC_COLOR].set(255, 0, 0, 255);
     * iterator.next();
     * iterator.element[pc.SEMANTIC_POSITION].set(0.9, -0.9, 0.0);
     * iterator.element[pc.SEMANTIC_COLOR].set(0, 255, 0, 255);
     * iterator.next();
     * iterator.element[pc.SEMANTIC_POSITION].set(0.0, 0.9, 0.0);
     * iterator.element[pc.SEMANTIC_COLOR].set(0, 0, 255, 255);
     * iterator.end();
     */
    next(count?: number): void;
    /**
     * Notifies the vertex buffer being iterated that writes are complete. Internally the vertex
     * buffer is unlocked and vertex data is uploaded to video memory.
     *
     * @example
     * const iterator = new pc.VertexIterator(vertexBuffer);
     * iterator.element[pc.SEMANTIC_POSITION].set(-0.9, -0.9, 0.0);
     * iterator.element[pc.SEMANTIC_COLOR].set(255, 0, 0, 255);
     * iterator.next();
     * iterator.element[pc.SEMANTIC_POSITION].set(0.9, -0.9, 0.0);
     * iterator.element[pc.SEMANTIC_COLOR].set(0, 255, 0, 255);
     * iterator.next();
     * iterator.element[pc.SEMANTIC_POSITION].set(0.0, 0.9, 0.0);
     * iterator.element[pc.SEMANTIC_COLOR].set(0, 0, 255, 255);
     * iterator.end();
     */
    end(): void;
    /**
     * Copies data for specified semantic into vertex buffer. Works with both interleaved (slower)
     * and non-interleaved (fast) vertex buffers.
     *
     * @param {string} semantic - The semantic of the vertex element to set.
     * @param {number[]|ArrayBufferView} data - The data to set.
     * @param {number} numVertices - The number of vertices to write.
     * @ignore
     */
    writeData(semantic: string, data: number[] | ArrayBufferView, numVertices: number): void;
    /**
     * Function to extract elements of a specified semantic from vertex buffer into flat array
     * (data). Works with both interleaved (slower) and non-interleaved (fast) vertex buffers.
     * Returns number of vertices. Note: when data is a typed array and is smaller than needed,
     * only part of the data gets copied out (typed arrays ignore read/write out of range).
     *
     * @param {string} semantic - The semantic of the vertex element to read.
     * @param {number[]|ArrayBufferView} data - The array to receive the data.
     * @returns {number} The number of vertices read.
     * @ignore
     */
    readData(semantic: string, data: number[] | ArrayBufferView): number;
}

/**
 * Helps with accessing a specific vertex attribute.
 *
 * @category Graphics
 * @ignore
 */
declare class VertexIteratorAccessor {
    /**
     * Create a new VertexIteratorAccessor instance.
     *
     * @param {ArrayBuffer} buffer - The vertex buffer containing the attribute to be accessed.
     * @param {object} vertexElement - The vertex attribute to be accessed.
     * @param {string} vertexElement.name - The meaning of the vertex element. This is used to link
     * the vertex data to a shader input. Can be:
     *
     * - {@link SEMANTIC_POSITION}
     * - {@link SEMANTIC_NORMAL}
     * - {@link SEMANTIC_TANGENT}
     * - {@link SEMANTIC_BLENDWEIGHT}
     * - {@link SEMANTIC_BLENDINDICES}
     * - {@link SEMANTIC_COLOR}
     * - {@link SEMANTIC_TEXCOORD0}
     * - {@link SEMANTIC_TEXCOORD1}
     * - {@link SEMANTIC_TEXCOORD2}
     * - {@link SEMANTIC_TEXCOORD3}
     * - {@link SEMANTIC_TEXCOORD4}
     * - {@link SEMANTIC_TEXCOORD5}
     * - {@link SEMANTIC_TEXCOORD6}
     * - {@link SEMANTIC_TEXCOORD7}
     *
     * If vertex data has a meaning other that one of those listed above, use the user-defined
     * semantics: {@link SEMANTIC_ATTR0} to {@link SEMANTIC_ATTR15}.
     * @param {number} vertexElement.numComponents - The number of components of the vertex
     * attribute. Can be 1, 2, 3 or 4.
     * @param {number} vertexElement.dataType - The data type of the attribute. Can be:
     *
     * - {@link TYPE_INT8}
     * - {@link TYPE_UINT8}
     * - {@link TYPE_INT16}
     * - {@link TYPE_UINT16}
     * - {@link TYPE_INT32}
     * - {@link TYPE_UINT32}
     * - {@link TYPE_FLOAT32}
     * @param {boolean} vertexElement.normalize - If true, vertex attribute data will be mapped
     * from a 0 to 255 range down to 0 to 1 when fed to a shader. If false, vertex attribute data
     * is left unchanged. If this property is unspecified, false is assumed.
     * @param {number} vertexElement.offset - The number of initial bytes at the start of a vertex
     * that are not relevant to this attribute.
     * @param {number} vertexElement.stride - The number of total bytes that are between the start
     * of one vertex, and the start of the next.
     * @param {ScopeId} vertexElement.scopeId - The shader input variable corresponding to the
     * attribute.
     * @param {number} vertexElement.size - The size of the attribute in bytes.
     * @param {VertexFormat} vertexFormat - A vertex format that defines the layout of vertex data
     * inside the buffer.
     */
    constructor(buffer: ArrayBuffer, vertexElement: {
        name: string;
        numComponents: number;
        dataType: number;
        normalize: boolean;
        offset: number;
        stride: number;
        scopeId: ScopeId;
        size: number;
    }, vertexFormat: VertexFormat);
    index: number;
    numComponents: number;
    array: Int8Array<ArrayBuffer> | Uint8Array<ArrayBuffer> | Int16Array<ArrayBuffer> | Uint16Array<ArrayBuffer> | Int32Array<ArrayBuffer> | Uint32Array<ArrayBuffer> | Float32Array<ArrayBuffer>;
    stride: number;
    /**
     * Set all the attribute components at the iterator's current index.
     *
     * @param {number} a - The first component value.
     * @param {number} [b] - The second component value (if applicable).
     * @param {number} [c] - The third component value (if applicable).
     * @param {number} [d] - The fourth component value (if applicable).
     */
    set(a: number, b?: number, c?: number, d?: number): void;
    /**
     * Read attribute components to an output array.
     *
     * @param {number} offset - The component offset at which to read data from the buffer. Will be
     * used instead of the iterator's current index.
     * @param {number[]|ArrayBufferView} outputArray - The output array to write data into.
     * @param {number} outputIndex - The output index at which to write into the output array.
     */
    getToArray(offset: number, outputArray: number[] | ArrayBufferView, outputIndex: number): void;
    /**
     * Write attribute components from an input array.
     *
     * @param {number} index - The starting index at which to write data into the buffer. Will be
     * used instead of the iterator's current index.
     * @param {number[]|ArrayBufferView} inputArray - The input array to read data from.
     * @param {number} inputIndex - The input index at which to read from the input array.
     */
    setFromArray(index: number, inputArray: number[] | ArrayBufferView, inputIndex: number): void;
    /**
     * Get a attribute component at the iterator's current index.
     *
     * @param {number} offset - The component offset. Should be either 0, 1, 2, or 3.
     * @returns {number} The value of a attribute component.
     */
    get(offset: number): number;
}

/**
 * Base class for render and compute pipelines.
 *
 * @ignore
 */
declare class WebgpuPipeline {
    constructor(device: any);
    /** @type {WebgpuGraphicsDevice} */
    device: WebgpuGraphicsDevice;
    /**
     * @param {BindGroupFormat[]} bindGroupFormats - An array of bind group formats.
     * @returns {any} Returns the pipeline layout.
     */
    getPipelineLayout(bindGroupFormats: BindGroupFormat[]): any;
}

declare class WebgpuVertexBufferLayout {
    /**
     * @type {Map<string, GPUVertexBufferLayout[]>}
     * @private
     */
    private cache;
    /**
     * Obtain a vertex layout of one or two vertex formats.
     *
     * @param {VertexFormat} vertexFormat0 - The first vertex format.
     * @param {VertexFormat} [vertexFormat1] - The second vertex format.
     * @returns {any[]} - The vertex layout.
     */
    get(vertexFormat0: VertexFormat, vertexFormat1?: VertexFormat): any[];
    getKey(vertexFormat0: any, vertexFormat1?: any): string;
    /**
     * @param {VertexFormat} vertexFormat0 - The first vertex format.
     * @param {VertexFormat} vertexFormat1 - The second vertex format.
     * @returns {any[]} - The vertex buffer layout.
     */
    create(vertexFormat0: VertexFormat, vertexFormat1: VertexFormat): any[];
}

declare class WebgpuRenderPipeline extends WebgpuPipeline {
    lookupHashes: Uint32Array<ArrayBuffer>;
    /**
     * The cache of vertex buffer layouts
     *
     * @type {WebgpuVertexBufferLayout}
     */
    vertexBufferLayout: WebgpuVertexBufferLayout;
    /**
     * The cache of render pipelines
     *
     * @type {Map<number, CacheEntry[]>}
     */
    cache: Map<number, CacheEntry$1[]>;
    /**
     * @param {object} primitive - The primitive.
     * @param {VertexFormat} vertexFormat0 - The first vertex format.
     * @param {VertexFormat} vertexFormat1 - The second vertex format.
     * @param {number|undefined} ibFormat - The index buffer format.
     * @param {Shader} shader - The shader.
     * @param {RenderTarget} renderTarget - The render target.
     * @param {BindGroupFormat[]} bindGroupFormats - An array of bind group formats.
     * @param {BlendState} blendState - The blend state.
     * @param {DepthState} depthState - The depth state.
     * @param {number} cullMode - The cull mode.
     * @param {boolean} stencilEnabled - Whether stencil is enabled.
     * @param {StencilParameters} stencilFront - The stencil state for front faces.
     * @param {StencilParameters} stencilBack - The stencil state for back faces.
     * @param {number} frontFace - The front face.
     * @returns {GPURenderPipeline} Returns the render pipeline.
     * @private
     */
    private get;
    getBlend(blendState: any): {
        color: {
            operation: string;
            srcFactor: string;
            dstFactor: string;
        };
        alpha: {
            operation: string;
            srcFactor: string;
            dstFactor: string;
        };
    };
    /**
     * @param {DepthState} depthState - The depth state.
     * @param {RenderTarget} renderTarget - The render target.
     * @param {boolean} stencilEnabled - Whether stencil is enabled.
     * @param {StencilParameters} stencilFront - The stencil state for front faces.
     * @param {StencilParameters} stencilBack - The stencil state for back faces.
     * @param {string} primitiveTopology - The primitive topology.
     * @returns {object} Returns the depth stencil state.
     * @private
     */
    private getDepthStencil;
    create(primitiveTopology: any, ibFormat: any, shader: any, renderTarget: any, pipelineLayout: any, blendState: any, depthState: any, vertexBufferLayout: any, cullMode: any, stencilEnabled: any, stencilFront: any, stencilBack: any, frontFace: any): any;
}

declare class CacheEntry$1 {
    /**
     * Render pipeline
     *
     * @type {GPURenderPipeline}
     * @private
     */
    private pipeline;
    /**
     * The full array of hashes used to lookup the pipeline, used in case of hash collision.
     *
     * @type {Uint32Array}
     */
    hashes: Uint32Array;
}

declare class WebgpuComputePipeline extends WebgpuPipeline {
    lookupHashes: Uint32Array<ArrayBuffer>;
    /**
     * The cache of compute pipelines
     *
     * @type {Map<number, CacheEntry[]>}
     */
    cache: Map<number, CacheEntry[]>;
    get(shader: any, bindGroupFormat: any): any;
    create(shader: any, pipelineLayout: any): any;
}

declare class CacheEntry {
    /**
     * Compute pipeline
     *
     * @type {GPUComputePipeline|null}
     * @private
     */
    private pipeline;
    /**
     * The full array of hashes used to lookup the pipeline, used in case of hash collision.
     *
     * @type {Uint32Array|null}
     */
    hashes: Uint32Array | null;
}

/**
 * A WebGPU helper class implementing a viewport clear operation. When rendering to a texture,
 * the whole surface can be cleared using loadOp, but if only a viewport needs to be cleared, or if
 * it needs to be cleared later during the rendering, this need to be achieved by rendering a quad.
 * This class renders a full-screen quad, and expects the viewport / scissor to be set up to clip
 * it to only required area.
 *
 * @ignore
 */
declare class WebgpuClearRenderer {
    constructor(device: any);
    shader: Shader;
    uniformBuffer: UniformBuffer;
    dynamicBindGroup: DynamicBindGroup;
    colorData: Float32Array<ArrayBuffer>;
    destroy(): void;
    clear(device: any, renderTarget: any, options: any, defaultOptions: any): void;
}

/**
 * A WebGPU implementation of the Texture.
 *
 * @ignore
 */
declare class WebgpuTexture {
    constructor(texture: any);
    /**
     * @type {GPUTexture}
     * @private
     */
    private gpuTexture;
    /**
     * @type {GPUTextureView}
     * @private
     */
    private view;
    /**
     * An array of samplers, addressed by SAMPLETYPE_*** constant, allowing texture to be sampled
     * using different samplers. Most textures are sampled as interpolated floats, but some can
     * additionally be sampled using non-interpolated floats (raw data) or compare sampling
     * (shadow maps).
     *
     * @type {GPUSampler[]}
     * @private
     */
    private samplers;
    /**
     * @type {GPUTextureDescriptor}
     * @private
     */
    private desc;
    /**
     * @type {GPUTextureFormat}
     * @private
     */
    private format;
    /**
     * A cache of texture views keyed by TextureView.key, used for storage texture bindings.
     *
     * @type {Map<number, GPUTextureView>}
     * @private
     */
    private viewCache;
    /** @type {Texture} */
    texture: Texture;
    create(device: any): void;
    destroy(device: any): void;
    propertyChanged(flag: any): void;
    /**
     * Returns a texture view. If a TextureView is provided, returns a cached view for those
     * specific parameters (creating it if needed). Otherwise returns the default view.
     *
     * @param {WebgpuGraphicsDevice} device - The graphics device.
     * @param {TextureView} [textureView] - Optional TextureView specifying view parameters.
     * @returns {GPUTextureView} - Returns the view.
     * @private
     */
    private getView;
    createView(viewDescr: any): any;
    /**
     * @param {any} device - The Graphics Device.
     * @param {number} [sampleType] - A sample type for the sampler, SAMPLETYPE_*** constant. If not
     * specified, the sampler type is based on the texture format / texture sampling type.
     * @returns {any} - Returns the sampler.
     */
    getSampler(device: any, sampleType?: number): any;
    loseContext(): void;
    /**
     * @param {WebgpuGraphicsDevice} device - The graphics device.
     * @param {Texture} texture - The texture.
     */
    uploadImmediate(device: WebgpuGraphicsDevice, texture: Texture): void;
    /**
     * @param {WebgpuGraphicsDevice} device - The graphics
     * device.
     */
    uploadData(device: WebgpuGraphicsDevice): void;
    isExternalImage(image: any): boolean;
    uploadExternalImage(device: any, image: any, mipLevel: any, index: any): void;
    uploadTypedArrayData(device: any, data: any, mipLevel: any, index: any): void;
    read(x: any, y: any, width: any, height: any, options: any): Promise<any>;
}

/**
 * @import { WebgpuGraphicsDevice } from './webgpu-graphics-device.js'
 * @import { WebgpuShader } from './webgpu-shader.js'
 * @import { WebgpuTexture } from './webgpu-texture.js'
 */
/**
 * A WebGPU helper class implementing texture mipmap generation.
 *
 * @ignore
 */
declare class WebgpuMipmapRenderer {
    constructor(device: any);
    /** @type {WebgpuGraphicsDevice} */
    device: WebgpuGraphicsDevice;
    /**
     * Cache of render pipelines keyed by texture format.
     *
     * @type {Map<string, GPURenderPipeline>}
     * @private
     */
    private pipelineCache;
    shader: Shader;
    minSampler: any;
    destroy(): void;
    /**
     * Generates mipmaps for the specified WebGPU texture.
     *
     * @param {WebgpuTexture} webgpuTexture - The texture to generate mipmaps for.
     */
    generate(webgpuTexture: WebgpuTexture): void;
}

/**
 * A WebGPU implementation of the BindGroupFormat, which is a wrapper over GPUBindGroupLayout.
 *
 * @ignore
 */
declare class WebgpuBindGroupFormat {
    /**
     * @param {BindGroupFormat} bindGroupFormat - Bind group format.
     */
    constructor(bindGroupFormat: BindGroupFormat);
    /**
     * Unique key, used for caching
     *
     * @type {number}
     */
    key: number;
    desc: any;
    /**
     * @type {GPUBindGroupLayout}
     * @private
     */
    private bindGroupLayout;
    destroy(): void;
    loseContext(): void;
    /**
     * @param {any} bindGroupFormat - The format of the bind group.
     * @returns {any} Returns the bind group descriptor.
     */
    createDescriptor(bindGroupFormat: any): any;
}

/**
 * @import { WebgpuGraphicsDevice } from './webgpu-graphics-device.js'
 * @import { WebgpuShader } from './webgpu-shader.js'
 */
/**
 * A WebGPU helper class implementing custom resolve of multi-sampled textures.
 *
 * @ignore
 */
declare class WebgpuResolver {
    constructor(device: any);
    /** @type {WebgpuGraphicsDevice} */
    device: WebgpuGraphicsDevice;
    /**
     * Cache of render pipelines for each texture format, to avoid their per frame creation.
     *
     * @type {Map<GPUTextureFormat, GPURenderPipeline>}
     * @private
     */
    private pipelineCache;
    shader: Shader;
    destroy(): void;
    /**
     * @param {GPUTextureFormat} format - Texture format.
     * @returns {GPURenderPipeline} Pipeline for the given format.
     * @private
     */
    private getPipeline;
    /**
     * @param {GPUTextureFormat} format - Texture format.
     * @returns {GPURenderPipeline} Pipeline for the given format.
     * @private
     */
    private createPipeline;
    /**
     * @param {GPUCommandEncoder} commandEncoder - Command encoder to use for the resolve.
     * @param {GPUTexture} sourceTexture - Source multi-sampled depth texture to resolve.
     * @param {GPUTexture} destinationTexture - Destination depth texture to resolve to.
     * @private
     */
    private resolveDepth;
}

/**
 * @import { WebgpuGraphicsDevice } from './webgpu-graphics-device.js'
 */
/**
 * A WebGPU implementation of the Buffer.
 *
 * @ignore
 */
declare class WebgpuBuffer {
    constructor(usageFlags?: number);
    /**
     * @type {GPUBuffer|null}
     * @private
     */
    private buffer;
    usageFlags: number;
    destroy(device: any): void;
    get initialized(): boolean;
    loseContext(): void;
    allocate(device: any, size: any): void;
    /**
     * @param {WebgpuGraphicsDevice} device - Graphics device.
     * @param {*} storage -
     */
    unlock(device: WebgpuGraphicsDevice, storage: any): void;
    read(device: any, offset: any, size: any, data: any, immediate: any): any;
    write(device: any, bufferOffset: any, data: any, dataOffset: any, size: any): void;
    clear(device: any, offset: any, size: any): void;
}

/**
 * A WebGPU implementation of the UniformBuffer.
 *
 * @ignore
 */
declare class WebgpuUniformBuffer extends WebgpuBuffer {
    constructor(uniformBuffer: any);
    unlock(uniformBuffer: any): void;
}

/**
 * A WebGPU implementation of the VertexBuffer.
 *
 * @ignore
 */
declare class WebgpuVertexBuffer extends WebgpuBuffer {
    constructor(vertexBuffer: any, format: any, options: any);
    unlock(vertexBuffer: any): void;
}

/**
 * A WebGPU implementation of the IndexBuffer.
 *
 * @ignore
 */
declare class WebgpuIndexBuffer extends WebgpuBuffer {
    constructor(indexBuffer: any, options: any);
    format: any;
    unlock(indexBuffer: any): void;
}

/**
 * A WebGPU implementation of the Shader.
 *
 * @ignore
 */
declare class WebgpuShader {
    /**
     * @param {Shader} shader - The shader.
     */
    constructor(shader: Shader);
    /**
     * Transpiled vertex shader code.
     *
     * @type {string|null}
     */
    _vertexCode: string | null;
    /**
     * Transpiled fragment shader code.
     *
     * @type {string|null}
     */
    _fragmentCode: string | null;
    /**
     * Compute shader code.
     *
     * @type {string|null}
     */
    _computeCode: string | null;
    /**
     * Cached content-based key for compute shader.
     *
     * @type {number|undefined}
     * @private
     */
    private _computeKey;
    /**
     * Name of the vertex entry point function.
     */
    vertexEntryPoint: string;
    /**
     * Name of the fragment entry point function.
     */
    fragmentEntryPoint: string;
    /**
     * Name of the compute entry point function.
     */
    computeEntryPoint: string;
    /** @type {Shader} */
    shader: Shader;
    computeUniformBufferFormats: any;
    computeBindGroupFormat: any;
    /**
     * Free the WebGPU resources associated with a shader.
     *
     * @param {Shader} shader - The shader to free.
     */
    destroy(shader: Shader): void;
    createShaderModule(code: any, shaderType: any): any;
    getVertexShaderModule(): any;
    getFragmentShaderModule(): any;
    getComputeShaderModule(): any;
    processGLSL(): void;
    processed: any;
    processWGSL(): void;
    transpile(src: any, shaderType: any, originalSrc: any): any;
    get vertexCode(): string;
    get fragmentCode(): string;
    /**
     * Content-based key for compute shader caching. Returns the same key for identical
     * shader code and entry point combinations, regardless of how many Shader instances exist.
     *
     * @type {number}
     * @ignore
     */
    get computeKey(): number;
    /**
     * Dispose the shader when the context has been lost.
     */
    loseContext(): void;
    /**
     * Restore shader after the context has been obtained.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {Shader} shader - The shader to restore.
     */
    restoreContext(device: GraphicsDevice, shader: Shader): void;
}

/**
 * @import { GraphicsDevice } from '../graphics-device.js'
 */
/**
 * WebGPU implementation of DrawCommands.
 *
 * @ignore
 */
declare class WebgpuDrawCommands {
    /**
     * @param {GraphicsDevice} device - Graphics device.
     */
    constructor(device: GraphicsDevice);
    /** @type {GraphicsDevice} */
    device: GraphicsDevice;
    /** @type {Uint32Array|null} */
    gpuIndirect: Uint32Array | null;
    /** @type {Int32Array|null} */
    gpuIndirectSigned: Int32Array | null;
    /**
     * @type {StorageBuffer|null}
     */
    storage: StorageBuffer | null;
    /**
     * Allocate AoS buffer and backing storage buffer.
     * @param {number} maxCount - Number of sub-draws.
     */
    allocate(maxCount: number): void;
    /**
     * Write a single draw entry.
     * @param {number} i - Draw index.
     * @param {number} indexOrVertexCount - Count of indices/vertices.
     * @param {number} instanceCount - Instance count.
     * @param {number} firstIndexOrVertex - First index/vertex.
     * @param {number} baseVertex - Base vertex (signed).
     * @param {number} firstInstance - First instance.
     */
    add(i: number, indexOrVertexCount: number, instanceCount: number, firstIndexOrVertex: number, baseVertex?: number, firstInstance?: number): void;
    /**
     * Upload AoS data to storage buffer.
     * @param {number} count - Number of active draws.
     * @returns {number} Total primitive count.
     */
    update(count: number): number;
    destroy(): void;
}

/**
 * A WebGPU implementation of the RenderTarget.
 *
 * @ignore
 */
declare class WebgpuRenderTarget {
    /**
     * @param {RenderTarget} renderTarget - The render target owning this implementation.
     */
    constructor(renderTarget: RenderTarget);
    /** @type {boolean} */
    initialized: boolean;
    /**
     * Unique key used by render pipeline creation
     *
     * @type {number}
     */
    key: number;
    /** @type {ColorAttachment[]} */
    colorAttachments: ColorAttachment[];
    /** @type {DepthAttachment|null} */
    depthAttachment: DepthAttachment | null;
    /**
     * Texture assigned each frame, and not owned by this render target. This is used on the
     * framebuffer to assign per frame texture obtained from the context.
     *
     * @type {GPUTexture}
     * @private
     */
    private assignedColorTexture;
    /**
     * Render pass descriptor used when starting a render pass for this render target.
     *
     * @type {GPURenderPassDescriptor}
     * @private
     */
    private renderPassDescriptor;
    /**
     * True if this is the backbuffer of the device.
     *
     * @type {boolean}
     */
    isBackbuffer: boolean;
    renderTarget: RenderTarget;
    /**
     * Release associated resources. Note that this needs to leave this instance in a state where
     * it can be re-initialized again, which is used by render target resizing.
     *
     * @param {WebgpuGraphicsDevice} device - The graphics device.
     */
    destroy(device: WebgpuGraphicsDevice): void;
    updateKey(): void;
    /**
     * Assign a color buffer. This allows the color buffer of the main framebuffer
     * to be swapped each frame to a buffer provided by the context.
     *
     * @param {WebgpuGraphicsDevice} device - The WebGPU graphics device.
     * @param {any} gpuTexture - The color buffer.
     */
    assignColorTexture(device: WebgpuGraphicsDevice, gpuTexture: any): void;
    setColorAttachment(index: any, multisampledBuffer: any, format: any): void;
    /**
     * Initialize render target for rendering one time.
     *
     * @param {WebgpuGraphicsDevice} device - The graphics device.
     * @param {RenderTarget} renderTarget - The render target.
     */
    init(device: WebgpuGraphicsDevice, renderTarget: RenderTarget): void;
    initDepthStencil(device: any, wgpu: any, renderTarget: any): void;
    /**
     * @param {WebgpuGraphicsDevice} device - The graphics device.
     * @param {GPUDevice} wgpu - The WebGPU device.
     * @param {RenderTarget} renderTarget - The render target.
     * @param {number} index - The color buffer index.
     * @returns {GPURenderPassColorAttachment} The color attachment.
     * @private
     */
    private initColor;
    /**
     * Update WebGPU render pass descriptor by RenderPass settings.
     *
     * @param {RenderPass} renderPass - The render pass to start.
     * @param {RenderTarget} renderTarget - The render target to render to.
     */
    setupForRenderPass(renderPass: RenderPass, renderTarget: RenderTarget): void;
    loseContext(): void;
    resolve(device: any, target: any, color: any, depth: any): void;
}
/**
 * Private class storing info about color buffer.
 *
 * @private
 */
declare class ColorAttachment {
    /**
     * @type {GPUTextureFormat}
     * @private
     */
    private format;
    /**
     * @type {GPUTexture}
     * @private
     */
    private multisampledBuffer;
    destroy(): void;
}
/**
 * Private class storing info about depth-stencil buffer.
 *
 * @private
 */
declare class DepthAttachment {
    /**
     * @param {string} gpuFormat - The WebGPU format (GPUTextureFormat).
     */
    constructor(gpuFormat: string);
    /**
     * @type {GPUTextureFormat}
     * @private
     */
    private format;
    /** @type {boolean} */
    hasStencil: boolean;
    /**
     * @type {GPUTexture|null}
     * @private
     */
    private depthTexture;
    /**
     * True if the depthTexture is internally allocated / owned
     *
     * @type {boolean}
     */
    depthTextureInternal: boolean;
    /**
     * Multi-sampled depth buffer allocated over the user provided depth buffer.
     *
     * @type {GPUTexture|null}
     * @private
     */
    private multisampledDepthBuffer;
    /**
     * Key used to store multisampledDepthBuffer in the cache.
     */
    multisampledDepthBufferKey: any;
    destroy(device: any): void;
}

/**
 * WebGPU implementation of UploadStream.
 * Can use either simple direct writes or optimized staging buffer strategy.
 *
 * @ignore
 */
declare class WebgpuUploadStream {
    /**
     * @param {UploadStream} uploadStream - The upload stream.
     */
    constructor(uploadStream: UploadStream);
    /**
     * Available staging buffers ready for immediate use.
     *
     * @type {GPUBuffer[]}
     * @private
     */
    private availableStagingBuffers;
    /**
     * Staging buffers currently in use by the GPU.
     *
     * @type {GPUBuffer[]}
     * @private
     */
    private pendingStagingBuffers;
    _destroyed: boolean;
    /**
     * The device's _submitVersion at the time the last staging copy was recorded.
     * Used to detect whether the copy has been submitted before the next upload.
     *
     * @type {number}
     * @private
     */
    private _lastUploadSubmitVersion;
    uploadStream: UploadStream;
    useSingleBuffer: boolean;
    /**
     * Handles device lost event.
     * TODO: Implement proper WebGPU device lost handling if needed.
     *
     * @protected
     */
    protected _onDeviceLost(): void;
    destroy(): void;
    /**
     * Update staging buffers: recycle completed ones and remove undersized buffers.
     *
     * @param {number} minByteSize - Minimum size for buffers to keep. Smaller buffers are destroyed.
     */
    update(minByteSize: number): void;
    /**
     * Upload data to a storage buffer using staging buffers (optimized) or direct write (simple).
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload.
     * @param {import('../storage-buffer.js').StorageBuffer} target - The target storage buffer.
     * @param {number} offset - The element offset in the target. Byte offset must be a multiple of 4.
     * @param {number} size - The number of elements to upload. Byte size must be a multiple of 4.
     */
    upload(data: Uint8Array | Uint32Array | Float32Array, target: StorageBuffer, offset: number, size: number): void;
    /**
     * Direct storage buffer write (simple, blocking).
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload.
     * @param {import('../storage-buffer.js').StorageBuffer} target - The target storage buffer.
     * @param {number} offset - The element offset in the target.
     * @param {number} size - The number of elements to upload.
     * @private
     */
    private uploadDirect;
    /**
     * Staging buffer-based upload.
     *
     * @param {Uint8Array|Uint32Array|Float32Array} data - The data to upload.
     * @param {import('../storage-buffer.js').StorageBuffer} target - The target storage buffer.
     * @param {number} offset - The element offset in the target.
     * @param {number} size - The number of elements to upload.
     * @private
     */
    private uploadStaging;
}

/**
 * @import { BindGroup } from '../bind-group.js'
 * @import { WebgpuGraphicsDevice } from './webgpu-graphics-device.js'
 * @import { WebgpuTexture } from './webgpu-texture.js'
 */
/**
 * A WebGPU implementation of the BindGroup, which is a wrapper over GPUBindGroup.
 *
 * @ignore
 */
declare class WebgpuBindGroup {
    /**
     * @type {GPUBindGroup}
     * @private
     */
    private bindGroup;
    update(bindGroup: any): void;
    destroy(): void;
    /**
     * Creates a bind group descriptor in WebGPU format
     *
     * @param {WebgpuGraphicsDevice} device - Graphics device.
     * @param {BindGroup} bindGroup - Bind group to create the
     * descriptor for.
     * @returns {object} - Returns the generated descriptor of type GPUBindGroupDescriptor, which
     * can be used to create a GPUBindGroup
     */
    createDescriptor(device: WebgpuGraphicsDevice, bindGroup: BindGroup): object;
    debugFormat: string;
}

/**
 * A WebGPU implementation of the Compute.
 *
 * @ignore
 */
declare class WebgpuCompute {
    constructor(compute: any);
    /** @type {UniformBuffer[]} */
    uniformBuffers: UniformBuffer[];
    /** @type {BindGroup} */
    bindGroup: BindGroup;
    compute: any;
    pipeline: any;
    destroy(): void;
    updateBindGroup(): void;
    dispatch(x: any, y: any, z: any): void;
}

declare class WebgpuGraphicsDevice extends GraphicsDevice {
    constructor(canvas: any, options?: {});
    /**
     * Array of GPU resources pending destruction. Resources are destroyed after the current
     * command buffers are submitted to ensure they're not in use.
     *
     * @type {Array<GPUTexture|GPUBuffer|GPUQuerySet>}
     * @private
     */
    private _deferredDestroys;
    /**
     * Object responsible for caching and creation of render pipelines.
     */
    renderPipeline: WebgpuRenderPipeline;
    /**
     * Object responsible for caching and creation of compute pipelines.
     */
    computePipeline: WebgpuComputePipeline;
    /**
     * Buffer used to store arguments for indirect draw calls.
     *
     * @type {StorageBuffer|null}
     * @private
     */
    private _indirectDrawBuffer;
    /**
     * Number of indirect draw slots allocated.
     *
     * @type {number}
     * @private
     */
    private _indirectDrawBufferCount;
    /**
     * Next unused index in indirectDrawBuffer.
     *
     * @type {number}
     * @private
     */
    private _indirectDrawNextIndex;
    /**
     * Buffer used to store arguments for indirect dispatch calls.
     *
     * @type {StorageBuffer|null}
     * @private
     */
    private _indirectDispatchBuffer;
    /**
     * Number of indirect dispatch slots allocated.
     *
     * @type {number}
     * @private
     */
    private _indirectDispatchBufferCount;
    /**
     * Next unused index in indirectDispatchBuffer.
     *
     * @type {number}
     * @private
     */
    private _indirectDispatchNextIndex;
    /**
     * Object responsible for clearing the rendering surface by rendering a quad.
     *
     * @type { WebgpuClearRenderer }
     */
    clearRenderer: WebgpuClearRenderer;
    /**
     * Object responsible for mipmap generation.
     *
     * @type { WebgpuMipmapRenderer }
     */
    mipmapRenderer: WebgpuMipmapRenderer;
    /**
     * Render pipeline currently set on the device.
     *
     * @type {GPURenderPipeline|null}
     * @private
     */
    private pipeline;
    /**
     * An array of bind group formats, based on currently assigned bind groups
     *
     * @type {WebgpuBindGroupFormat[]}
     */
    bindGroupFormats: WebgpuBindGroupFormat[];
    /**
     * An empty bind group, used when the draw call is using a typical bind group layout based on
     * BINDGROUP_*** constants but some bind groups are not needed, for example clear renderer.
     *
     * @type {BindGroup}
     */
    emptyBindGroup: BindGroup;
    /**
     * Monotonically increasing counter incremented each time queue.submit() is called.
     *
     * @type {number}
     * @ignore
     */
    submitVersion: number;
    /**
     * Current command buffer encoder.
     *
     * @type {GPUCommandEncoder|null}
     * @private
     */
    private commandEncoder;
    /**
     * Command buffers scheduled for execution on the GPU.
     *
     * @type {GPUCommandBuffer[]}
     * @private
     */
    private commandBuffers;
    /**
     * @type {GPUSupportedLimits}
     * @private
     */
    private limits;
    /** GLSL to SPIR-V transpiler */
    glslang: any;
    /** SPIR-V to WGSL transpiler */
    twgsl: any;
    backBufferAntialias: any;
    isWebGPU: boolean;
    _deviceType: string;
    featureLevel: any;
    resolver: WebgpuResolver;
    initDeviceCaps(): void;
    maxPrecision: string;
    maxTextures: number;
    fragmentUniformsCount: number;
    vertexUniformsCount: number;
    supportsAreaLights: boolean;
    supportsGpuParticles: boolean;
    supportsImageBitmap: boolean;
    initWebGpu(glslangUrl: any, twgslUrl: any): Promise<this>;
    createDevice(): Promise<this>;
    /**
     * @type {GPUAdapter}
     * @private
     */
    private gpuAdapter;
    textureFloatBlendable: any;
    extCompressedTextureS3TC: any;
    extCompressedTextureS3TCSliced3D: any;
    extCompressedTextureETC: any;
    extCompressedTextureASTC: any;
    extCompressedTextureASTCSliced3D: any;
    supportsTimestampQuery: any;
    supportsDepthClip: any;
    supportsDepth32Stencil: any;
    supportsIndirectFirstInstance: any;
    supportsStorageRGBA8: any;
    /**
     * @type {GPUDevice}
     * @private
     */
    private wgpu;
    /**
     * Compute performance index measured at startup (milliseconds for a fixed benchmark
     * workload). Used by GSplat auto-selection to choose between compute and V/F renderers.
     * -1 if timestamp queries are unavailable.
     *
     * @type {number}
     * @ignore
     */
    computePerfIndex: number;
    gpuContext: RenderingContext;
    backBufferViewFormat: any;
    /**
     * Configuration of the main colorframebuffer we obtain using getCurrentTexture
     *
     * @type {GPUCanvasConfiguration}
     * @private
     */
    private canvasConfig;
    handleDeviceLost(info: any): Promise<void>;
    createBackbuffer(): void;
    createBufferImpl(usageFlags: any): WebgpuBuffer;
    createUniformBufferImpl(uniformBuffer: any): WebgpuUniformBuffer;
    createVertexBufferImpl(vertexBuffer: any, format: any, options: any): WebgpuVertexBuffer;
    createIndexBufferImpl(indexBuffer: any, options: any): WebgpuIndexBuffer;
    createShaderImpl(shader: any): WebgpuShader;
    createDrawCommandImpl(drawCommands: any): WebgpuDrawCommands;
    createTextureImpl(texture: any): WebgpuTexture;
    createRenderTargetImpl(renderTarget: any): WebgpuRenderTarget;
    createUploadStreamImpl(uploadStream: any): WebgpuUploadStream;
    createBindGroupFormatImpl(bindGroupFormat: any): WebgpuBindGroupFormat;
    createBindGroupImpl(bindGroup: any): WebgpuBindGroup;
    createComputeImpl(compute: any): WebgpuCompute;
    allocateIndirectDrawBuffer(): void;
    allocateIndirectDispatchBuffer(): void;
    /**
     * @param {number} index - Index of the bind group slot
     * @param {BindGroup} bindGroup - Bind group to attach
     * @param {number[]} [offsets] - Byte offsets for all uniform buffers in the bind group.
     */
    setBindGroup(index: number, bindGroup: BindGroup, offsets?: number[]): void;
    submitVertexBuffer(vertexBuffer: any, slot: any): any;
    validateVBLocations(vb0: any, vb1: any): void;
    draw(primitive: any, indexBuffer: any, numInstances: number, drawCommands: any, first?: boolean, last?: boolean): void;
    setShader(shader: any, asyncCompile?: boolean): void;
    setBlendState(blendState: any): void;
    setDepthState(depthState: any): void;
    setStencilState(stencilFront: any, stencilBack: any): void;
    stencilRef: any;
    setBlendColor(r: any, g: any, b: any, a: any): void;
    setCullMode(cullMode: any): void;
    setFrontFace(frontFace: any): void;
    setAlphaToCoverage(state: any): void;
    /**
     * Set up default values for the render pass encoder.
     */
    setupPassEncoderDefaults(): void;
    _uploadDirtyTextures(): void;
    setupTimeStampWrites(passDesc: any, name: any): any;
    /**
     * Start a render pass.
     *
     * @param {RenderPass} renderPass - The render pass to start.
     * @ignore
     */
    startRenderPass(renderPass: RenderPass): void;
    passEncoder: any;
    /**
     * End a render pass.
     *
     * @param {RenderPass} renderPass - The render pass to end.
     * @ignore
     */
    endRenderPass(renderPass: RenderPass): void;
    computeDispatch(computes: any, name?: string): void;
    getCommandEncoder(): any;
    endCommandEncoder(): void;
    addCommandBuffer(commandBuffer: any, front?: boolean): void;
    submit(): void;
    /**
     * Defer destruction of a GPU resource until after the current command buffers are submitted.
     * This ensures the resource is not destroyed while still referenced by pending GPU commands.
     *
     * @param {GPUTexture|GPUBuffer|GPUQuerySet} gpuResource - The GPU resource to destroy.
     * @private
     */
    private deferDestroy;
    clear(options: any): void;
    setViewport(x: any, y: any, w: any, h: any): void;
    setScissor(x: any, y: any, w: any, h: any): void;
    /**
     * Clear the content of a storage buffer to 0.
     *
     * @param {WebgpuBuffer} storageBuffer - The storage buffer.
     * @param {number} [offset] - The offset of data to clear. Defaults to 0.
     * @param {number} [size] - The size of data to clear. Defaults to the full size of the buffer.
     * @ignore
     */
    clearStorageBuffer(storageBuffer: WebgpuBuffer, offset?: number, size?: number): void;
    /**
     * Read a content of a storage buffer.
     *
     * @param {WebgpuBuffer} storageBuffer - The storage buffer.
     * @param {number} [offset] - The byte offset of data to read. Defaults to 0.
     * @param {number} [size] - The byte size of data to read. Defaults to the full size of the
     * buffer minus the offset.
     * @param {ArrayBufferView} [data] - Typed array to populate with the data read from the storage
     * buffer. When typed array is supplied, enough space needs to be reserved, otherwise only
     * partial data is copied. If not specified, the data is returned in an Uint8Array. Defaults to
     * null.
     * @param {boolean} [immediate] - If true, the read operation will be executed as soon as
     * possible. This has a performance impact, so it should be used only when necessary. Defaults
     * to false.
     * @returns {Promise<ArrayBufferView>} A promise that resolves with the data read from the storage
     * buffer.
     * @ignore
     */
    readStorageBuffer(storageBuffer: WebgpuBuffer, offset?: number, size?: number, data?: ArrayBufferView, immediate?: boolean): Promise<ArrayBufferView>;
    readBuffer(stagingBuffer: any, size: any, data?: any, immediate?: boolean): Promise<any>;
    /**
     * Issues a write operation of the provided data into a storage buffer.
     *
     * @param {WebgpuBuffer} storageBuffer - The storage buffer.
     * @param {number} bufferOffset - The offset in bytes to start writing to the storage buffer.
     * @param {ArrayBufferView} data - The data to write to the storage buffer.
     * @param {number} dataOffset - Offset in data to begin writing from. Given in elements if data
     * is a TypedArray and bytes otherwise.
     * @param {number} size - Size of content to write from data to buffer. Given in elements if
     * data is a TypedArray and bytes otherwise.
     */
    writeStorageBuffer(storageBuffer: WebgpuBuffer, bufferOffset: number, data: ArrayBufferView, dataOffset: number, size: number): void;
    /**
     * Copies source render target into destination render target. Mostly used by post-effects.
     *
     * @param {RenderTarget} [source] - The source render target. Defaults to frame buffer.
     * @param {RenderTarget} [dest] - The destination render target. Defaults to frame buffer.
     * @param {boolean} [color] - If true, will copy the color buffer. Defaults to false.
     * @param {boolean} [depth] - If true, will copy the depth buffer. Defaults to false.
     * @returns {boolean} True if the copy was successful, false otherwise.
     */
    copyRenderTarget(source?: RenderTarget, dest?: RenderTarget, color?: boolean, depth?: boolean): boolean;
    get hasTranspilers(): any;
    pushMarker(name: any): void;
    popMarker(): void;
}

/**
 * A Null implementation of the VertexBuffer.
 *
 * @ignore
 */
declare class NullVertexBuffer {
    destroy(device: any): void;
    unlock(vertexBuffer: any): void;
}

/**
 * A Null implementation of the IndexBuffer.
 *
 * @ignore
 */
declare class NullIndexBuffer {
    unlock(indexBuffer: any): void;
}

/**
 * A Null implementation of the Shader.
 *
 * @ignore
 */
declare class NullShader {
    destroy(shader: any): void;
    loseContext(): void;
    restoreContext(device: any, shader: any): void;
}

/**
 * A Null implementation of the Texture.
 *
 * @ignore
 */
declare class NullTexture {
    destroy(device: any): void;
    propertyChanged(flag: any): void;
    loseContext(): void;
}

/**
 * A Null implementation of the RenderTarget.
 *
 * @ignore
 */
declare class NullRenderTarget {
    destroy(device: any): void;
    init(device: any, renderTarget: any): void;
    loseContext(): void;
    resolve(device: any, target: any, color: any, depth: any): void;
}

/**
 * Null implementation of DrawCommands.
 *
 * @ignore
 */
declare class NullDrawCommands {
    add(i: any, indexOrVertexCount: any, instanceCount: any, firstIndexOrVertex: any): void;
}

declare class NullGraphicsDevice extends GraphicsDevice {
    constructor(canvas: any, options?: {});
    isNull: boolean;
    _deviceType: string;
    samples: number;
    initDeviceCaps(): void;
    disableParticleSystem: boolean;
    maxPrecision: string;
    maxTextures: number;
    supportsAreaLights: boolean;
    supportsGpuParticles: boolean;
    supportsImageBitmap: boolean;
    updateBegin(): void;
    updateEnd(): void;
    readPixels(x: any, y: any, w: any, h: any, pixels: any): void;
    createVertexBufferImpl(vertexBuffer: any, format: any): NullVertexBuffer;
    createIndexBufferImpl(indexBuffer: any): NullIndexBuffer;
    createShaderImpl(shader: any): NullShader;
    createTextureImpl(texture: any): NullTexture;
    createRenderTargetImpl(renderTarget: any): NullRenderTarget;
    createDrawCommandImpl(drawCommands: any): NullDrawCommands;
    createUploadStreamImpl(uploadStream: any): any;
    draw(primitive: any, indexBuffer: any, numInstances: any, drawCommands: any, first?: boolean, last?: boolean): void;
    setShader(shader: any, asyncCompile?: boolean): void;
    setBlendState(blendState: any): void;
    setDepthState(depthState: any): void;
    setStencilState(stencilFront: any, stencilBack: any): void;
    setBlendColor(r: any, g: any, b: any, a: any): void;
    setCullMode(cullMode: any): void;
    setFrontFace(frontFace: any): void;
    setAlphaToCoverage(state: any): void;
    clear(options: any): void;
    setViewport(x: any, y: any, w: any, h: any): void;
    setScissor(x: any, y: any, w: any, h: any): void;
    copyRenderTarget(source: any, dest: any, color: any, depth: any): boolean;
    pushMarker(name: any): void;
    popMarker(): void;
}

/**
 * @import { GamePads } from './game-pads.js'
 */
/**
 * A general input handler which handles both mouse and keyboard input assigned to named actions.
 * This allows you to define input handlers separately to defining keyboard/mouse configurations.
 *
 * @category Input
 */
declare class Controller {
    /**
     * Create a new instance of a Controller.
     *
     * @param {Element} [element] - Element to attach Controller to.
     * @param {object} [options] - Optional arguments.
     * @param {Keyboard} [options.keyboard] - A Keyboard object to use.
     * @param {Mouse} [options.mouse] - A Mouse object to use.
     * @param {GamePads} [options.gamepads] - A Gamepads object to use.
     * @example
     * const c = new pc.Controller(document);
     *
     * // Register the "fire" action and assign it to both the Enter key and the space bar.
     * c.registerKeys("fire", [pc.KEY_ENTER, pc.KEY_SPACE]);
     */
    constructor(element?: Element, options?: {
        keyboard?: Keyboard;
        mouse?: Mouse;
        gamepads?: GamePads;
    });
    /**
     * @type {Keyboard|null}
     * @private
     */
    private _keyboard;
    /**
     * @type {Mouse|null}
     * @private
     */
    private _mouse;
    /**
     * @type {GamePads|null}
     * @private
     */
    private _gamepads;
    /**
     * @type {Element|null}
     * @private
     */
    private _element;
    /** @private */
    private _actions;
    /** @private */
    private _axes;
    /** @private */
    private _axesValues;
    /**
     * Attach Controller to an Element. This is required before you can monitor for key/mouse
     * inputs.
     *
     * @param {Element} element - The element to attach mouse and keyboard event handler too.
     */
    attach(element: Element): void;
    /**
     * Detach Controller from an Element. This should be done before the Controller is destroyed.
     */
    detach(): void;
    /**
     * Disable the context menu usually activated with the right mouse button.
     */
    disableContextMenu(): void;
    /**
     * Enable the context menu usually activated with the right mouse button. This is enabled by
     * default.
     */
    enableContextMenu(): void;
    /**
     * Update the Keyboard and Mouse handlers.
     *
     * @param {object} dt - The time since the last frame.
     */
    update(dt: object): void;
    /**
     * Helper function to append an action.
     *
     * @param {string} action_name - The name of the action.
     * @param {object} action - An action object to add.
     * @param {ACTION_KEYBOARD | ACTION_MOUSE | ACTION_GAMEPAD} action.type - The name of the action.
     * @param {number[]} [action.keys] - Keyboard: A list of keycodes e.g. `[pc.KEY_A, pc.KEY_ENTER]`.
     * @param {number} [action.button] - Mouse: e.g. `pc.MOUSEBUTTON_LEFT` - Gamepad: e.g. `pc.PAD_FACE_1`
     * @param {number} [action.pad] - Gamepad: An index of the pad to register (use {@link PAD_1}, etc).
     */
    appendAction(action_name: string, action: {
        type: "keyboard" | "mouse" | "gamepad";
        keys?: number[];
        button?: number;
        pad?: number;
    }): void;
    /**
     * Create or update a action which is enabled when the supplied keys are pressed.
     *
     * @param {string} action - The name of the action.
     * @param {number[]} keys - A list of keycodes.
     */
    registerKeys(action: string, keys: number[]): void;
    /**
     * Create or update an action which is enabled when the supplied mouse button is pressed.
     *
     * @param {string} action - The name of the action.
     * @param {number} button - The mouse button.
     */
    registerMouse(action: string, button: number): void;
    /**
     * Create or update an action which is enabled when the gamepad button is pressed.
     *
     * @param {string} action - The name of the action.
     * @param {number} pad - The index of the pad to register (use {@link PAD_1}, etc).
     * @param {number} button - The pad button.
     */
    registerPadButton(action: string, pad: number, button: number): void;
    /**
     * Register an action against a controller axis.
     *
     * @param {object} [options] - Optional options object.
     * @param {number} [options.pad] - The index of the game pad to register for (use {@link PAD_1}, etc).
     */
    registerAxis(options?: {
        pad?: number;
    }): void;
    /**
     * Returns true if the current action is enabled.
     *
     * @param {string} actionName - The name of the action.
     * @returns {boolean} True if the action is enabled.
     */
    isPressed(actionName: string): boolean;
    /**
     * Returns true if the action was enabled this since the last update.
     *
     * @param {string} actionName - The name of the action.
     * @returns {boolean} True if the action was enabled this since the last update.
     */
    wasPressed(actionName: string): boolean;
    getAxis(name: any): number;
    _enableMouse(): void;
    _enableKeyboard(): void;
}

/**
 * @import { Keyboard } from './keyboard.js'
 */
/**
 * The KeyboardEvent is passed into all event handlers registered on the {@link Keyboard}. The
 * events are:
 *
 * - {@link Keyboard.EVENT_KEYDOWN}
 * - {@link Keyboard.EVENT_KEYUP}
 *
 * @category Input
 */
declare class KeyboardEvent {
    /**
     * Create a new KeyboardEvent.
     *
     * @param {Keyboard} keyboard - The keyboard object which is firing the event.
     * @param {globalThis.KeyboardEvent} event - The original browser event that was fired.
     * @example
     * const onKeyDown = function (e) {
     *     if (e.key === pc.KEY_SPACE) {
     *         // space key pressed
     *     }
     *     e.event.preventDefault(); // Use original browser event to prevent browser action.
     * };
     * app.keyboard.on("keydown", onKeyDown, this);
     */
    constructor(keyboard: Keyboard, event: globalThis.KeyboardEvent);
    /**
     * The keyCode of the key that has changed. See the KEY_* constants.
     *
     * @type {number|null}
     */
    key: number | null;
    /**
     * The element that fired the keyboard event.
     *
     * @type {Element|null}
     */
    element: Element | null;
    /**
     * The original browser event which was fired.
     *
     * @type {globalThis.KeyboardEvent|null}
     */
    event: globalThis.KeyboardEvent | null;
}

/**
 * A SoundInstance3d plays a {@link Sound} in 3D.
 *
 * @category Sound
 */
declare class SoundInstance3d extends SoundInstance {
    /**
     * Create a new SoundInstance3d instance.
     *
     * @param {SoundManager} manager - The sound manager.
     * @param {Sound} sound - The sound to play.
     * @param {object} options - Options for the instance.
     * @param {number} [options.volume] - The playback volume, between 0 and 1. Defaults to 1.
     * @param {number} [options.pitch] - The relative pitch. Defaults to 1 (plays at normal pitch).
     * @param {boolean} [options.loop] - Whether the sound should loop when it reaches the end or
     * not. Defaults to false.
     * @param {number} [options.startTime] - The time from which the playback will start. Default
     * is 0 to start at the beginning.
     * @param {number} [options.duration] - The total time after the startTime when playback will
     * stop or restart if loop is true.
     * @param {Vec3} [options.position] - The position of the sound in 3D space.
     * @param {string} [options.distanceModel] - Determines which algorithm to use to reduce the
     * volume of the audio as it moves away from the listener. Can be:
     *
     * - {@link DISTANCE_LINEAR}
     * - {@link DISTANCE_INVERSE}
     * - {@link DISTANCE_EXPONENTIAL}
     *
     * Defaults to {@link DISTANCE_LINEAR}.
     * @param {number} [options.refDistance] - The reference distance for reducing volume as the
     * sound source moves further from the listener. Defaults to 1.
     * @param {number} [options.maxDistance] - The maximum distance from the listener at which
     * audio falloff stops. Note the volume of the audio is not 0 after this distance, but just
     * doesn't fall off anymore. Defaults to 10000.
     * @param {number} [options.rollOffFactor] - The factor used in the falloff equation. Defaults
     * to 1.
     */
    constructor(manager: SoundManager, sound: Sound, options?: {
        volume?: number;
        pitch?: number;
        loop?: boolean;
        startTime?: number;
        duration?: number;
        position?: Vec3;
        distanceModel?: string;
        refDistance?: number;
        maxDistance?: number;
        rollOffFactor?: number;
    });
    /**
     * @type {Vec3}
     * @private
     */
    private _position;
    /**
     * @type {Vec3}
     * @private
     */
    private _velocity;
    /**
     * Sets the position of the sound in 3D space.
     *
     * @type {Vec3}
     */
    set position(value: Vec3);
    /**
     * Gets the position of the sound in 3D space.
     *
     * @type {Vec3}
     */
    get position(): Vec3;
    /**
     * Sets the maximum distance from the listener at which audio falloff stops. Note that the
     * volume of the audio is not 0 after this distance, but just doesn't fall off anymore.
     *
     * @type {number}
     */
    set maxDistance(value: number);
    /**
     * Gets the maximum distance from the listener at which audio falloff stops.
     *
     * @type {number}
     */
    get maxDistance(): number;
    /**
     * Sets the reference distance for reducing volume as the sound source moves further from the
     * listener.
     *
     * @type {number}
     */
    set refDistance(value: number);
    /**
     * Gets the reference distance for reducing volume as the sound source moves further from the
     * listener.
     *
     * @type {number}
     */
    get refDistance(): number;
    /**
     * Sets the factor used in the falloff equation.
     *
     * @type {number}
     */
    set rollOffFactor(value: number);
    /**
     * Gets the factor used in the falloff equation.
     *
     * @type {number}
     */
    get rollOffFactor(): number;
    /**
     * Sets which algorithm to use to reduce the volume of the audio as it moves away from
     * the listener. Can be:
     *
     * - {@link DISTANCE_LINEAR}
     * - {@link DISTANCE_INVERSE}
     * - {@link DISTANCE_EXPONENTIAL}
     *
     * Default is {@link DISTANCE_LINEAR}.
     *
     * @type {string}
     */
    set distanceModel(value: string);
    /**
     * Gets which algorithm to use to reduce the volume of the audio as it moves away from
     * the listener.
     *
     * @type {string}
     */
    get distanceModel(): string;
    panner: PannerNode;
    set velocity(velocity: Vec3);
    get velocity(): Vec3;
}

/**
 * Draws a screen-space quad using a specific shader.
 *
 * @param {GraphicsDevice} device - The graphics device used to draw the quad.
 * @param {RenderTarget|null} target - The destination render target. If undefined, target is the
 * frame buffer.
 * @param {Shader} shader - The shader used for rendering the quad. Vertex shader should contain
 * `attribute vec2 vertex_position`.
 * @param {Vec4} [rect] - The viewport rectangle of the quad, in pixels. Defaults to fullscreen:
 * `[0, 0, target.width, target.height]`.
 * @param {Vec4} [scissorRect] - The scissor rectangle of the quad, in pixels. Defaults to fullscreen:
 * `[0, 0, target.width, target.height]`.
 * @category Graphics
 */
declare function drawQuadWithShader(device: GraphicsDevice, target: RenderTarget | null, shader: Shader, rect?: Vec4, scissorRect?: Vec4, ...args: any[]): void;

declare class SkinBatchInstance extends SkinInstance {
    constructor(device: any, nodes: any, rootNode: any);
    device: any;
    rootNode: any;
    bones: any;
}

/**
 * Helper functions to support prefiltering lighting data.
 *
 * @ignore
 */
declare class EnvLighting {
    /**
     * Generate a skybox cubemap in the correct pixel format from the source texture.
     *
     * @param {Texture} source - The source texture. This is either a 2d texture in equirect format
     * or a cubemap.
     * @param {number} [size] - Size of the resulting texture. Otherwise use automatic sizing.
     * @returns {Texture} The resulting cubemap.
     */
    static generateSkyboxCubemap(source: Texture, size?: number): Texture;
    /**
     * Create a texture in the format needed to precalculate lighting data.
     *
     * @param {Texture} source - The source texture. This is either a 2d texture in equirect format
     * or a cubemap.
     * @param {object} [options] - Specify generation options.
     * @param {Texture} [options.target] - The target texture. If one is not provided then a
     * new texture will be created and returned.
     * @param {number} [options.size] - Size of the lighting source cubemap texture. Only used
     * if target isn't specified. Defaults to 128.
     * @returns {Texture} The resulting cubemap.
     */
    static generateLightingSource(source: Texture, options?: {
        target?: Texture;
        size?: number;
    }): Texture;
    /**
     * Generate the environment lighting atlas containing prefiltered reflections and ambient.
     *
     * @param {Texture} source - The source lighting texture, generated by generateLightingSource.
     * @param {object} [options] - Specify prefilter options.
     * @param {Texture} [options.target] - The target texture. If one is not provided then a
     * new texture will be created and returned.
     * @param {number} [options.size] - Size of the target texture to create. Only used if
     * target isn't specified. Defaults to 512.
     * @param {number} [options.numReflectionSamples] - Number of samples to use when generating
     * rough reflections. Defaults to 1024.
     * @param {number} [options.numAmbientSamples] - Number of samples to use when generating ambient
     * lighting. Defaults to 2048.
     * @returns {Texture} The resulting atlas
     */
    static generateAtlas(source: Texture, options?: {
        target?: Texture;
        size?: number;
        numReflectionSamples?: number;
        numAmbientSamples?: number;
    }): Texture;
    /**
     * Generate the environment lighting atlas from prefiltered cubemap data.
     *
     * @param {Texture[]} sources - Array of 6 prefiltered textures.
     * @param {object} [options] - The options object
     * @param {Texture} [options.target] - The target texture. If one is not provided then a
     * new texture will be created and returned.
     * @param {number} [options.size] - Size of the target texture to create. Only used if
     * target isn't specified. Defaults to 512.
     * @param {boolean} [options.legacyAmbient] - Enable generating legacy ambient lighting.
     * Default is false.
     * @param {number} [options.numSamples] - Number of samples to use when generating ambient
     * lighting. Default is 2048.
     * @returns {Texture} The resulting atlas texture.
     */
    static generatePrefilteredAtlas(sources: Texture[], options?: {
        target?: Texture;
        size?: number;
        legacyAmbient?: boolean;
        numSamples?: number;
    }): Texture;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * Render pass that counts digit occurrences per group (Pass 0 of radix sort).
 * Outputs to R32F prefix sums texture.
 *
 * Has two variants:
 * - sourceLinear=true: First pass, reads from user's linear-layout texture
 * - sourceLinear=false: Subsequent passes, reads from internal Morton-layout texture
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassRadixSortCount extends RenderPassShaderQuad {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {boolean} sourceLinear - Whether to read from linear-layout source texture.
     * @param {number} bitsPerStep - Bits per radix step (usually 4).
     * @param {number} groupSize - Log2 of group size (usually 4 for 16 elements).
     * @param {number} currentBit - Current bit offset for this pass.
     */
    constructor(device: GraphicsDevice, sourceLinear: boolean, bitsPerStep: number, groupSize: number, currentBit: number);
    /**
     * Whether this pass reads from linear-layout source texture (first pass).
     *
     * @type {boolean}
     */
    sourceLinear: boolean;
    /**
     * Bits per radix step (usually 4).
     *
     * @type {number}
     */
    bitsPerStep: number;
    /**
     * Log2 of group size (usually 4 for 16 elements).
     *
     * @type {number}
     */
    groupSize: number;
    /**
     * Current bit offset for this pass.
     *
     * @type {number}
     */
    currentBit: number;
    /**
     * Dynamic params updated per frame.
     *
     * @type {{elementCount: number, imageElementsLog2: number}}
     * @private
     */
    private _dynamicParams;
    keysTextureId: ScopeId;
    bitsPerStepId: ScopeId;
    groupSizeId: ScopeId;
    elementCountId: ScopeId;
    imageElementsLog2Id: ScopeId;
    currentBitId: ScopeId;
    /**
     * Sets the keys texture to read from.
     *
     * @param {Texture} keysTexture - The keys texture (R32U).
     */
    setKeysTexture(keysTexture: Texture): void;
    _keysTexture: Texture;
    /**
     * Sets dynamic parameters (called each frame).
     *
     * @param {number} elementCount - Number of elements to sort.
     * @param {number} imageElementsLog2 - Log2 of total texture elements.
     */
    setDynamicParams(elementCount: number, imageElementsLog2: number): void;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { Texture } from '../../platform/graphics/texture.js'
 */
/**
 * Render pass that reorders elements using binary search through mipmap hierarchy
 * (Pass 1 of radix sort). Uses MRT to output both keys (R32U) and indices (R32U).
 *
 * Has multiple variants:
 * - sourceLinear=true: First pass, reads keys from user's linear-layout texture
 * - sourceLinear=false: Subsequent passes, reads keys from internal Morton-layout texture
 * - outputLinear=true: Outputs indices in linear layout (simpler for consumers)
 *
 * @category Graphics
 * @ignore
 */
declare class RenderPassRadixSortReorder extends RenderPassShaderQuad {
    /**
     * @param {GraphicsDevice} device - The graphics device.
     * @param {boolean} sourceLinear - Whether to read from linear-layout source texture.
     * @param {boolean} outputLinear - Whether to output indices in linear layout.
     * @param {number} bitsPerStep - Bits per radix step (usually 4).
     * @param {number} groupSize - Log2 of group size (usually 4 for 16 elements).
     * @param {number} currentBit - Current bit offset for this pass.
     */
    constructor(device: GraphicsDevice, sourceLinear: boolean, outputLinear: boolean, bitsPerStep: number, groupSize: number, currentBit: number);
    /**
     * Whether this pass reads from linear-layout source texture (first pass).
     *
     * @type {boolean}
     */
    sourceLinear: boolean;
    /**
     * Whether to output indices in linear layout.
     *
     * @type {boolean}
     */
    outputLinear: boolean;
    /**
     * Bits per radix step (usually 4).
     *
     * @type {number}
     */
    bitsPerStep: number;
    /**
     * Log2 of group size (usually 4 for 16 elements).
     *
     * @type {number}
     */
    groupSize: number;
    /**
     * Current bit offset for this pass.
     *
     * @type {number}
     */
    currentBit: number;
    /**
     * Dynamic params updated per frame.
     *
     * @type {{elementCount: number, imageElementsLog2: number, imageSize: number}}
     * @private
     */
    private _dynamicParams;
    keysTextureId: ScopeId;
    indicesTextureId: ScopeId;
    prefixSumsId: ScopeId;
    bitsPerStepId: ScopeId;
    groupSizeId: ScopeId;
    elementCountId: ScopeId;
    imageElementsLog2Id: ScopeId;
    currentBitId: ScopeId;
    imageSizeId: ScopeId;
    /**
     * Sets the keys texture to read from.
     *
     * @param {Texture} keysTexture - The keys texture (R32U).
     */
    setKeysTexture(keysTexture: Texture): void;
    _keysTexture: Texture;
    /**
     * Sets the indices texture to read from.
     *
     * @param {Texture} indicesTexture - The indices texture (R32U).
     */
    setIndicesTexture(indicesTexture: Texture): void;
    _indicesTexture: Texture;
    /**
     * Sets the prefix sums texture.
     *
     * @param {Texture} prefixSums - The prefix sums texture (R32F with mipmaps).
     */
    setPrefixSumsTexture(prefixSums: Texture): void;
    _prefixSums: Texture;
    /**
     * Sets dynamic parameters (called each frame).
     *
     * @param {number} elementCount - Number of elements to sort.
     * @param {number} imageElementsLog2 - Log2 of total texture elements.
     * @param {number} imageSize - Size of the internal texture (power of 2).
     */
    setDynamicParams(elementCount: number, imageElementsLog2: number, imageSize: number): void;
}

/**
 * A frame pass that performs GPU-based radix sort using mipmap-based prefix sums.
 *
 * This implementation is based on:
 * - VRChat Gaussian Splatting by MichaelMoroz: https://github.com/MichaelMoroz/VRChatGaussianSplatting
 * - Mipmap prefix sum trick by d4rkpl4y3r: https://github.com/d4rkc0d3r/CompactSparseTextureDemo
 *
 * ## Algorithm Overview
 *
 * The sort uses a 4-bit radix (16 buckets) and processes keys in multiple passes,
 * one pass per 4-bit chunk. Each pass consists of:
 *
 * 1. **Count Pass**: For each digit (0-15), count how many keys in each group have that digit.
 *    Output is an R32F texture where each pixel stores a count. Groups are 16 elements.
 *
 * 2. **Mipmap Generation**: Generate mipmaps for the count texture using hardware mipmap
 *    generation. This creates a quadtree of counts that enables efficient binary search.
 *
 * 3. **Reorder Pass**: For each output position, binary search through the mipmap hierarchy
 *    to find which source element maps to it. The mipmap structure enables O(log N) lookup
 *    per element instead of O(N) linear scan.
 *
 * ## Mipmap Prefix Sum Trick
 *
 * The key insight is that mipmaps naturally form a quadtree of averages. By writing counts
 * (e.g., 1.0 for active pixels) into an R32F texture with auto-generated mipmaps:
 *
 * - Each mip level stores the average of the 4 pixels below it
 * - To reconstruct actual counts, multiply by 4^level (i.e., `1 << (level * 2)`)
 * - This gives us a hierarchical prefix sum structure
 *
 * Binary search traversal:
 * - Start at maxMipLevel and work down to level 0
 * - At each level, check 3 quadrants (can skip 4th - if not in first 3, must be in 4th)
 * - Order: bottom-left → bottom-right → top-left → top-right (Z-order/Morton curve)
 * - Accumulate prefix sums while descending to find the target element
 *
 * The Z-order traversal ensures stable sorting: if element A comes before B in the input,
 * it remains before B in the output.
 *
 * ## Internal Data Layout
 *
 * - Internal keys/indices use Morton order (Z-order curve) for better texture cache locality
 * - Source keys texture uses linear (row-major) layout
 * - Output sorted indices use linear layout for simple consumer access
 *
 * ## Complexity
 *
 * - Time: O(N log N) per pass due to mipmap binary search
 * - Passes: ceil(numBits / 4) passes for numBits-bit keys
 * - Memory: 2x keys textures + 2x indices textures + 1x prefix sums texture (all power-of-2)
 *
 * @category Graphics
 * @ignore
 */
declare class FramePassRadixSort extends FramePass {
    /**
     * The current sorted indices texture (R32U). Access sorted indices using Morton lookup.
     *
     * @type {Texture|null}
     */
    _currentIndices: Texture | null;
    /**
     * Current number of radix passes.
     *
     * @type {number}
     */
    _numPasses: number;
    /**
     * Current internal texture size (power of 2).
     *
     * @type {number}
     */
    _internalSize: number;
    /**
     * Internal keys texture 0 (ping-pong buffer).
     *
     * @type {Texture|null}
     */
    _keys0: Texture | null;
    /**
     * Internal keys texture 1 (ping-pong buffer).
     *
     * @type {Texture|null}
     */
    _keys1: Texture | null;
    /**
     * Internal indices texture 0 (ping-pong buffer).
     *
     * @type {Texture|null}
     */
    _indices0: Texture | null;
    /**
     * Internal indices texture 1 (ping-pong buffer).
     *
     * @type {Texture|null}
     */
    _indices1: Texture | null;
    /**
     * Prefix sums texture (R32F with mipmaps).
     *
     * @type {Texture|null}
     */
    _prefixSums: Texture | null;
    /**
     * Sort render target 0 (MRT for keys + indices).
     *
     * @type {RenderTarget|null}
     */
    _sortRT0: RenderTarget | null;
    /**
     * Sort render target 1 (MRT for keys + indices).
     *
     * @type {RenderTarget|null}
     */
    _sortRT1: RenderTarget | null;
    /**
     * Prefix sums render target.
     *
     * @type {RenderTarget|null}
     */
    _prefixSumsRT: RenderTarget | null;
    /**
     * Count passes for each radix iteration.
     *
     * @type {RenderPassRadixSortCount[]}
     */
    _countPasses: RenderPassRadixSortCount[];
    /**
     * Reorder passes for each radix iteration.
     *
     * @type {RenderPassRadixSortReorder[]}
     */
    _reorderPasses: RenderPassRadixSortReorder[];
    /**
     * Number of elements to sort (set by setup()).
     *
     * @type {number}
     */
    _elementCount: number;
    /**
     * The source keys texture (set by setup()).
     *
     * @type {Texture|null}
     */
    _keysTexture: Texture | null;
    /**
     * Gets the sorted indices texture (R32U, linear layout). Use `.width` for texture dimensions.
     * Access with: `texelFetch(texture, ivec2(index % width, index / width), 0).r`
     *
     * @type {Texture|null}
     */
    get sortedIndices(): Texture | null;
    /**
     * Sets up the sort for the current frame.
     *
     * Note: The source keys texture is read-only and can be any size.
     * The sorted indices will be in a separate power-of-2 texture.
     *
     * @param {Texture} keysTexture - R32U texture containing sort keys (linear layout, any size).
     * @param {number} elementCount - Number of elements to sort.
     * @param {number} [numBits] - Number of bits to sort (1-24). More bits = more passes.
     */
    setup(keysTexture: Texture, elementCount: number, numBits?: number): void;
    /**
     * Calculates the required power-of-2 texture size for the given element count.
     *
     * @param {number} elementCount - Number of elements.
     * @returns {number} Power-of-2 size.
     * @private
     */
    private _calculateInternalSize;
    /**
     * Creates or resizes internal textures.
     *
     * @param {number} size - Power-of-2 size for textures.
     * @private
     */
    private _resizeInternalTextures;
    /**
     * Creates a texture for radix sort.
     *
     * @param {string} name - Texture name.
     * @param {number} size - Texture size.
     * @param {number} format - Pixel format (PIXELFORMAT_R32U or PIXELFORMAT_R32F).
     * @param {boolean} [mipmaps] - Whether to generate mipmaps. Defaults to false.
     * @returns {Texture} The created texture.
     * @private
     */
    private _createTexture;
    /**
     * Destroys internal textures and render targets.
     *
     * @private
     */
    private _destroyInternalTextures;
    /**
     * Creates the sort passes based on numBits.
     * Sets up beforePasses with the complete pass sequence (count, mipmap, reorder for each iteration).
     *
     * @private
     */
    private _createPasses;
    /**
     * Destroys all sort passes.
     *
     * @private
     */
    private _destroyPasses;
    /**
     * Executes the GPU radix sort. This is a convenience method that combines setup, frameUpdate,
     * and rendering all passes in one call.
     *
     * @param {Texture} keysTexture - R32U texture containing sort keys (linear layout, any size).
     * @param {number} elementCount - Number of elements to sort.
     * @param {number} [numBits] - Number of bits to sort (1-24). More bits = more passes. Defaults to 16.
     * @returns {Texture} The sorted indices texture (R32U, linear layout).
     */
    sort(keysTexture: Texture, elementCount: number, numBits?: number): Texture;
}

/**
 * This function reprojects textures between cubemap, equirectangular and octahedral formats. The
 * function can read and write textures with pixel data in RGBE, RGBM, linear and sRGB formats.
 * When specularPower is specified it will perform a phong-weighted convolution of the source (for
 * generating a gloss maps).
 *
 * @param {Texture} source - The source texture.
 * @param {Texture} target - The target texture.
 * @param {object} [options] - The options object.
 * @param {number} [options.specularPower] - Optional specular power. When specular power is
 * specified, the source is convolved by a phong-weighted kernel raised to the specified power.
 * Otherwise the function performs a standard resample.
 * @param {number} [options.numSamples] - Optional number of samples (default is 1024).
 * @param {number} [options.face] - Optional cubemap face to update (default is update all faces).
 * @param {string} [options.distribution] - Specify convolution distribution - 'none', 'lambert',
 * 'phong', 'ggx'. Default depends on specularPower.
 * @param {Vec4} [options.rect] - Optional viewport rectangle.
 * @param {number} [options.seamPixels] - Optional number of seam pixels to render
 * @returns {boolean} True if the reprojection was applied and false otherwise (e.g. if rect is empty)
 * @category Graphics
 */
declare function reprojectTexture(source: Texture, target: Texture, options?: {
    specularPower?: number;
    numSamples?: number;
    face?: number;
    distribution?: string;
    rect?: Vec4;
    seamPixels?: number;
}): boolean;

/**
 * LitMaterial comprises a shader chunk implementing the material "front end" (the shader program
 * providing the material surface properties like diffuse, opacity, normals etc) and a set of
 * flags which control the material "back end" (the shader program calculating the lighting,
 * shadows, reflections, fogging etc).
 *
 * The front end and back end together form a complete PBR shader.
 *
 * @ignore
 */
declare class LitMaterial extends Material {
    usedUvs: boolean[];
    shaderChunkGLSL: any;
    shaderChunkWGSL: any;
    useLighting: boolean;
    useFog: boolean;
    useTonemap: boolean;
    useSkybox: boolean;
    ambientSH: any;
    pixelSnap: boolean;
    nineSlicedMode: any;
    twoSidedLighting: boolean;
    occludeDirect: boolean;
    occludeSpecular: number;
    occludeSpecularIntensity: number;
    opacityFadesSpecular: boolean;
    opacityDither: string;
    opacityShadowDither: string;
    shadowCatcher: boolean;
    ggxSpecular: boolean;
    fresnelModel: number;
    dynamicRefraction: boolean;
    hasAo: boolean;
    hasSpecular: boolean;
    hasSpecularityFactor: boolean;
    hasLighting: boolean;
    hasHeights: boolean;
    hasNormals: boolean;
    hasSheen: boolean;
    hasRefraction: boolean;
    hasIrridescence: boolean;
    hasMetalness: boolean;
    hasClearCoat: boolean;
    hasClearCoatNormals: boolean;
    getShaderVariant(params: any): Shader;
}

/**
 * Shared superclass of {@link CapsuleGeometry}, {@link ConeGeometry} and {@link CylinderGeometry}.
 * Use those classes instead of this one.
 */
declare class ConeBaseGeometry extends Geometry {
    constructor(baseRadius: any, peakRadius: any, height: any, heightSegments: any, capSegments: any, roundedCaps: any);
    indices: any[];
}

/**
 * A procedural capsule-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a CapsuleGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.CapsuleGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class CapsuleGeometry extends ConeBaseGeometry {
    /**
     * Create a new CapsuleGeometry instance.
     *
     * By default, the constructor creates a capsule standing vertically centered on the XZ-plane
     * with a radius of 0.3, a height of 1.0, 1 height segment and 20 cap segments. The capsule is
     * created with UVs in the range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {number} [opts.radius] - The radius of the tube forming the body of the capsule.
     * Defaults to 0.3.
     * @param {number} [opts.height] - The length of the body of the capsule from tip to tip.
     * Defaults to 1.
     * @param {number} [opts.heightSegments] - The number of divisions along the tubular length of
     * the capsule. Defaults to 1.
     * @param {number} [opts.sides] - The number of divisions around the tubular body of the capsule.
     * Defaults to 20.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @example
     * const geometry = new pc.CapsuleGeometry({
     *     radius: 1,
     *     height: 2,
     *     heightSegments: 2,
     *     sides: 20
     * });
     */
    constructor(opts?: {
        radius?: number;
        height?: number;
        heightSegments?: number;
        sides?: number;
        calculateTangents?: boolean;
    });
}

/**
 * A procedural cone-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a ConeGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.ConeGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class ConeGeometry extends ConeBaseGeometry {
    /**
     * Create a new ConeGeometry instance.
     *
     * By default, the constructor creates a cone standing vertically centered on the XZ-plane with
     * a base radius of 0.5, a height of 1.0, 5 height segments and 18 cap segments. The cone is
     * created with UVs in the range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {number} [opts.baseRadius] - The base radius of the cone. Defaults to 0.5.
     * @param {number} [opts.peakRadius] - The peak radius of the cone. Defaults to 0.
     * @param {number} [opts.height] - The length of the body of the cone. Defaults to 1.
     * @param {number} [opts.heightSegments] - The number of divisions along the length of the cone.
     * Defaults to 5.
     * @param {number} [opts.capSegments] - The number of divisions around the tubular body of the
     * cone. Defaults to 18.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @example
     * const geometry = new pc.ConeGeometry({
     *     baseRadius: 1,
     *     height: 2,
     *     heightSegments: 2,
     *     capSegments: 20
     * });
     */
    constructor(opts?: {
        baseRadius?: number;
        peakRadius?: number;
        height?: number;
        heightSegments?: number;
        capSegments?: number;
        calculateTangents?: boolean;
    });
}

/**
 * A procedural cylinder-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a CylinderGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.CylinderGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class CylinderGeometry extends ConeBaseGeometry {
    /**
     * Create a new CylinderGeometry instance.
     *
     * By default, the constructor creates a cylinder standing vertically centered on the XZ-plane
     * with a radius of 0.5, a height of 1.0, 1 height segment and 20 cap segments. The cylinder is
     * created with UVs in the range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {number} [opts.radius] - The radius of the tube forming the body of the cylinder.
     * Defaults to 0.5.
     * @param {number} [opts.height] - The length of the body of the cylinder. Defaults to 1.
     * @param {number} [opts.heightSegments] - The number of divisions along the length of the
     * cylinder. Defaults to 5.
     * @param {number} [opts.capSegments] - The number of divisions around the tubular body of the
     * cylinder. Defaults to 20.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @example
     * const geometry = new pc.CylinderGeometry({
     *     radius: 1,
     *     height: 2,
     *     heightSegments: 2,
     *     capSegments: 10
     * });
     */
    constructor(opts?: {
        radius?: number;
        height?: number;
        heightSegments?: number;
        capSegments?: number;
        calculateTangents?: boolean;
    });
}

/**
 * A procedural sphere-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a SphereGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.SphereGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class SphereGeometry extends Geometry {
    /**
     * Create a new SphereGeometry instance.
     *
     * By default, the constructor creates a sphere centered on the object space origin with a radius
     * of 0.5 and 16 segments in both longitude and latitude. The sphere is created with UVs in the
     * range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {number} [opts.radius] - The radius of the sphere. Defaults to 0.5.
     * @param {number} [opts.latitudeBands] - The number of divisions along the latitudinal axis of
     * the sphere. Defaults to 16.
     * @param {number} [opts.longitudeBands] - The number of divisions along the longitudinal axis of
     * the sphere. Defaults to 16.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @example
     * const geometry = new pc.SphereGeometry({
     *     radius: 1,
     *     latitudeBands: 32,
     *     longitudeBands: 32
     * });
     */
    constructor(opts?: {
        radius?: number;
        latitudeBands?: number;
        longitudeBands?: number;
        calculateTangents?: boolean;
    });
}

/**
 * A procedural dome-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a DomeGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.DomeGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class DomeGeometry extends SphereGeometry {
    /**
     * Create a new DomeGeometry instance.
     *
     * By default, the constructor creates a dome with a radius of 0.5, 16 latitude bands and 16
     * longitude bands. The dome is created with UVs in the range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {number} [opts.latitudeBands] - The number of divisions along the latitudinal axis of
     * the sphere. Defaults to 16.
     * @param {number} [opts.longitudeBands] - The number of divisions along the longitudinal axis of
     * the sphere. Defaults to 16.
     * @example
     * const geometry = new pc.DomeGeometry({
     *     latitudeBands: 32,
     *     longitudeBands: 32
     * });
     */
    constructor(opts?: {
        latitudeBands?: number;
        longitudeBands?: number;
    });
}

/**
 * A procedural box-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a BoxGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.BoxGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class BoxGeometry extends Geometry {
    /**
     * Create a new BoxGeometry instance.
     *
     * By default, the constructor creates a box centered on the object space origin with a width,
     * length and height of 1 unit and 1 segment in either axis (2 triangles per face). The box is
     * created with UVs in the range of 0 to 1 on each face.
     *
     * @param {object} [opts] - Options object.
     * @param {Vec3} [opts.halfExtents] - The half dimensions of the box in each axis. Defaults to
     * [0.5, 0.5, 0.5].
     * @param {number} [opts.widthSegments] - The number of divisions along the X axis of the box.
     * Defaults to 1.
     * @param {number} [opts.lengthSegments] - The number of divisions along the Z axis of the box.
     * Defaults to 1.
     * @param {number} [opts.heightSegments] - The number of divisions along the Y axis of the box.
     * Defaults to 1.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @param {number} [opts.yOffset] - Move the box vertically by given offset in local space. Pass
     * 0.5 to generate the box with pivot point at the bottom face. Defaults to 0.
     * @example
     * const geometry = new pc.BoxGeometry({
     *     halfExtents: new pc.Vec3(1, 1, 1),
     *     widthSegments: 2,
     *     lengthSegments: 2,
     *     heightSegments: 2
     * });
     */
    constructor(opts?: {
        halfExtents?: Vec3;
        widthSegments?: number;
        lengthSegments?: number;
        heightSegments?: number;
        calculateTangents?: boolean;
        yOffset?: number;
    });
    positions: any[];
    normals: any[];
    uvs: any[];
    uvs1: any[];
    indices: any[];
}

/**
 * A procedural plane-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a PlaneGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.PlaneGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class PlaneGeometry extends Geometry {
    /**
     * Create a new PlaneGeometry instance.
     *
     * By default, the constructor creates a plane centered on the object space origin with a width
     * and length of 1 and 5 segments in either axis (50 triangles). The normal vector of the plane is
     * aligned along the positive Y axis. The plane is created with UVs in the range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {Vec2} [opts.halfExtents] - The half dimensions of the plane in the X and Z axes.
     * Defaults to [0.5, 0.5].
     * @param {number} [opts.widthSegments] - The number of divisions along the X axis of the plane.
     * Defaults to 5.
     * @param {number} [opts.lengthSegments] - The number of divisions along the Z axis of the plane.
     * Defaults to 5.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @example
     * const geometry = new pc.PlaneGeometry({
     *     halfExtents: new pc.Vec2(1, 1),
     *     widthSegments: 10,
     *     lengthSegments: 10
     * });
     */
    constructor(opts?: {
        halfExtents?: Vec2;
        widthSegments?: number;
        lengthSegments?: number;
        calculateTangents?: boolean;
    });
}

/**
 * A procedural torus-shaped geometry.
 *
 * Typically, you would:
 *
 * 1. Create a TorusGeometry instance.
 * 2. Generate a {@link Mesh} from the geometry.
 * 3. Create a {@link MeshInstance} referencing the mesh.
 * 4. Create an {@link Entity} with a {@link RenderComponent} and assign the {@link MeshInstance} to it.
 * 5. Add the entity to the {@link Scene}.
 *
 * ```javascript
 * // Create a mesh instance
 * const geometry = new pc.TorusGeometry();
 * const mesh = pc.Mesh.fromGeometry(app.graphicsDevice, geometry);
 * const material = new pc.StandardMaterial();
 * const meshInstance = new pc.MeshInstance(mesh, material);
 *
 * // Create an entity
 * const entity = new pc.Entity();
 * entity.addComponent('render', {
 *     meshInstances: [meshInstance]
 * });
 *
 * // Add the entity to the scene hierarchy
 * app.scene.root.addChild(entity);
 * ```
 *
 * @category Graphics
 */
declare class TorusGeometry extends Geometry {
    /**
     * Create a new TorusGeometry instance.
     *
     * By default, the constructor creates a torus in the XZ-plane with a tube radius of 0.2, a ring
     * radius of 0.3, 30 segments and 20 sides. The torus is created with UVs in the range of 0 to 1.
     *
     * @param {object} [opts] - Options object.
     * @param {number} [opts.tubeRadius] - The radius of the tube forming the body of the torus.
     * Defaults to 0.2.
     * @param {number} [opts.ringRadius] - The radius from the centre of the torus to the centre of the
     * tube. Defaults to 0.3.
     * @param {number} [opts.sectorAngle] - The sector angle in degrees of the ring of the torus.
     * Defaults to 2 * Math.PI.
     * @param {number} [opts.segments] - The number of radial divisions forming cross-sections of the
     * torus ring. Defaults to 20.
     * @param {number} [opts.sides] - The number of divisions around the tubular body of the torus ring.
     * Defaults to 30.
     * @param {boolean} [opts.calculateTangents] - Generate tangent information. Defaults to false.
     * @example
     * const geometry = new pc.TorusGeometry({
     *     tubeRadius: 1,
     *     ringRadius: 2,
     *     sectorAngle: 360,
     *     segments: 30,
     *     sides: 20
     * });
     */
    constructor(opts?: {
        tubeRadius?: number;
        ringRadius?: number;
        sectorAngle?: number;
        segments?: number;
        sides?: number;
        calculateTangents?: boolean;
    });
}

declare class ShaderGenerator {
    /**
     * @param {Map<string, string>} defines - the set of defines to be used in the shader.
     * @returns {number} the hash code of the defines.
     */
    static definesHash(defines: Map<string, string>): number;
}

/**
 * @import { ShaderGenerator } from './programs/shader-generator.js'
 */
/**
 * A class responsible for creation and caching of required shaders.
 * There is a two level cache. The first level generates the shader based on the provided options.
 * The second level processes this generated shader using processing options - in most cases
 * modifies it to support uniform buffers.
 *
 * @ignore
 */
declare class ProgramLibrary {
    constructor(device: any, standardMaterial: any);
    /**
     * A cache of shaders processed using processing options.
     *
     * @type {Map<string, Shader>}
     */
    processedCache: Map<string, Shader>;
    /**
     * A cache of shader definitions before processing.
     *
     * @type {Map<number, object>}
     */
    definitionsCache: Map<number, object>;
    /**
     * Named shader generators.
     *
     * @type {Map<string, ShaderGenerator>}
     */
    _generators: Map<string, ShaderGenerator>;
    _device: any;
    _isClearingCache: boolean;
    _precached: boolean;
    _programsCollection: any[];
    _defaultStdMatOption: StandardMaterialOptions;
    _defaultStdMatOptionMin: StandardMaterialOptions;
    destroy(): void;
    register(name: any, generator: any): void;
    unregister(name: any): void;
    isRegistered(name: any): boolean;
    /**
     * Returns a generated shader definition for the specified options. They key is used to cache the
     * shader definition.
     *
     * @param {ShaderGenerator} generator - The generator to use.
     * @param {string} name - The unique name of the shader generator.
     * @param {number} key - A unique key representing the shader options.
     * @param {object} options - The shader options.
     * @returns {object} - The shader definition.
     */
    generateShaderDefinition(generator: ShaderGenerator, name: string, key: number, options: object): object;
    getCachedShader(key: any): Shader;
    setCachedShader(key: any, shader: any): void;
    getProgram(name: any, options: any, processingOptions: any, userMaterialId: any): Shader;
    storeNewProgram(name: any, options: any): void;
    dumpPrograms(): void;
    clearCache(): void;
    /**
     * Remove shader from the cache. This function does not destroy it, that is the responsibility
     * of the caller.
     *
     * @param {Shader} shader - The shader to be removed.
     */
    removeFromCache(shader: Shader): void;
    _getDefaultStdMatOptions(pass: any): StandardMaterialOptions;
    precompile(cache: any): void;
}

declare class ChunkUtils {
    static decodeFunc(encoding: any): any;
    static encodeFunc(encoding: any): any;
}

/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { ShaderMaterial } from '../materials/shader-material.js'
 * @import { GSplatFormat } from './gsplat-format.js'
 */
/**
 * A container for procedural Gaussian Splat data. This class allows you to create splat data
 * programmatically using either a built-in format or a custom format with your own texture
 * streams and read code.
 *
 * A default format is provided via {@link GSplatFormat.createDefaultFormat} which uses float
 * textures for easy CPU population.
 *
 * @example
 * // Example 1: Using the default format (easy CPU population)
 * const format = pc.GSplatFormat.createDefaultFormat(device);
 * const container = new pc.GSplatContainer(device, 100, format);
 *
 * // Float format textures are straightforward to fill
 * const centerTex = container.getTexture('dataCenter');
 * const pixels = centerTex.lock();
 * // pixels is Float32Array, fill with [x, y, z, 0, x, y, z, 0, ...]
 * centerTex.unlock();
 *
 * // Set bounding box and centers (required for culling/sorting)
 * container.aabb = new pc.BoundingBox();
 * container.centers.set([x0, y0, z0, x1, y1, z1, ...]);  // xyz per splat
 *
 * // Add to scene
 * entity.addComponent('gsplat', { resource: container, unified: true });
 *
 * @example
 * // Example 2: Using a custom format
 * const format = new pc.GSplatFormat(device, [
 *     { name: 'data', format: pc.PIXELFORMAT_RGBA32F }
 * ], {
 *     // Shader code to read splat attributes from the texture
 *     readGLSL: `
 *         vec4 d = loadData();
 *         splatCenter = d.xyz;
 *         splatColor = vec4(1.0);
 *         splatScale = vec3(d.w);
 *         splatRotation = vec4(0, 0, 0, 1);
 *     `,
 *     readWGSL: `
 *         let d = loadData();
 *         splatCenter = d.xyz;
 *         splatColor = vec4f(1.0);
 *         splatScale = vec3f(d.w);
 *         splatRotation = vec4f(0, 0, 0, 1);
 *     `
 * });
 *
 * const container = new pc.GSplatContainer(device, 100, format);
 *
 * @category Graphics
 */
declare class GSplatContainer extends GSplatResourceBase {
    /**
     * Creates a new GSplatContainer instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {number} maxSplats - Maximum number of splats this container can hold.
     * @param {GSplatFormat} format - The format descriptor with streams and read code. Use
     * {@link GSplatFormat.createDefaultFormat} for the built-in format, or create a custom
     * {@link GSplatFormat}.
     */
    constructor(device: GraphicsDevice, maxSplats: number, format: GSplatFormat);
    /**
     * Maximum number of splats this container can hold.
     *
     * Internal note: We cannot (easily) implement resizing of the container, due textures needing
     * to be constant for the world state in GsplatInfo. This is non-issue for gpu based sorting
     * of course, but not for cpu based sorting. The workaround is to recreate container when the
     * size changes.
     *
     * @type {number}
     * @private
     */
    private _maxSplats;
    /**
     * Current number of splats to render.
     *
     * @type {number}
     * @private
     */
    private _numSplats;
    /**
     * Maximum number of splats this container can hold.
     *
     * @type {number}
     */
    get maxSplats(): number;
    /**
     * Gets the number of splats to render.
     *
     * @type {number}
     */
    get numSplats(): number;
    /**
     * Updates the container after modifying texture data and centers. Call this after filling
     * data to signal that the container contents have changed.
     *
     * @param {number} [numSplats] - Number of splats to render. Defaults to current value.
     * Must be between 0 and {@link maxSplats}.
     * @param {boolean} [centersUpdated] - Whether the centers array was modified. Set to
     * false when only numSplats changes but center positions remain the same, to avoid the cost
     * of re-cloning centers in the sorter (can be significant for large containers).
     */
    update(numSplats?: number, centersUpdated?: boolean): void;
    /**
     * Configures material defines for this container.
     *
     * @param {Map<string, string>} defines - The defines map to configure.
     * @ignore
     */
    configureMaterialDefines(defines: Map<string, string>): void;
    /**
     * Configures a material to use this container's data.
     *
     * @param {ShaderMaterial} material - The material to configure.
     * @ignore
     */
    configureMaterial(material: ShaderMaterial, workBufferModifier: any, formatDeclarations: any): void;
}

/**
 * Configuration object specifying a data binding for GSplatProcessor.
 * Defines where to read from (source) or write to (destination) including
 * the resource, component for instance textures, and which streams to access.
 */
type GSplatProcessorBinding = {
    /**
     * - Resource to read/write from.
     */
    resource?: GSplatResourceBase;
    /**
     * - Component for instance textures. If provided,
     * resource is automatically resolved from the component.
     */
    component?: GSplatComponent;
    /**
     * - Names of streams to read/write. For destination, this is
     * required. For source, if omitted, all format streams except destination streams are used
     * automatically, providing getCenter/getColor/etc functions. Specify explicitly to limit
     * which streams are bound.
     */
    streams?: string[];
};
/**
 * @import { GraphicsDevice } from '../../platform/graphics/graphics-device.js'
 * @import { GSplatResourceBase } from '../../scene/gsplat/gsplat-resource-base.js'
 * @import { GSplatStreamDescriptor } from '../../scene/gsplat/gsplat-format.js'
 * @import { Texture as TextureType } from '../../platform/graphics/texture.js'
 * @import { StorageBuffer } from '../../platform/graphics/storage-buffer.js'
 * @import { GSplatComponent } from '../components/gsplat/component.js'
 */
/**
 * @typedef {object} GSplatProcessorBinding
 * Configuration object specifying a data binding for GSplatProcessor.
 * Defines where to read from (source) or write to (destination) including
 * the resource, component for instance textures, and which streams to access.
 * @property {GSplatResourceBase} [resource] - Resource to read/write from.
 * @property {GSplatComponent} [component] - Component for instance textures. If provided,
 * resource is automatically resolved from the component.
 * @property {string[]} [streams] - Names of streams to read/write. For destination, this is
 * required. For source, if omitted, all format streams except destination streams are used
 * automatically, providing getCenter/getColor/etc functions. Specify explicitly to limit
 * which streams are bound.
 */
/**
 * GSplatProcessor enables GPU-based processing of Gaussian Splat data using custom shader code.
 * Gaussian Splats store per-splat attributes (position, rotation, scale, color, spherical harmonics)
 * in texture streams. This processor reads from source streams and writes results to destination
 * streams, enabling operations like painting, selection marking, or custom data transforms.
 *
 * Custom streams can be added to loaded gsplat resources via {@link GSplatFormat#addExtraStreams},
 * or you can create fully procedural splat data using {@link GSplatContainer}.
 *
 * The source and destination can reference the same resource or component, as long as the read and
 * write streams don't overlap (you cannot read and write the same stream in one pass).
 *
 * By default (when source streams are not specified), the processor provides access to the format's
 * built-in getCenter(), getRotation(), getScale(), and getColor() functions for reading splat data.
 * Note: getCenter() must be called first as it loads shared data used by the other functions.
 *
 * Custom uniforms can be passed to the shader via {@link setParameter}, including scalar values,
 * vectors, and additional textures for effects like brush patterns or lookup tables.
 *
 * The following built-in uniforms are available in processing shaders:
 * - `srcNumSplats` (uint) - Number of splats in source resource
 * - `dstNumSplats` (uint) - Number of splats in destination resource
 *
 * @example
 * // Create a processor that reads splat positions and writes to a customColor texture
 * const processor = new pc.GSplatProcessor(
 *     app.graphicsDevice,
 *     { component: entity.gsplat },  // source: all streams auto-bound
 *     { component: entity.gsplat, streams: ['customColor'] }, // destination: customColor stream only
 *     {
 *         processGLSL: `
 *             uniform vec4 uPaintSphere;
 *             uniform vec4 uPaintColor;
 *
 *             void process() {
 *                 vec3 center = getCenter();
 *                 float dist = distance(center, uPaintSphere.xyz);
 *                 if (dist < uPaintSphere.w) {
 *                     writeCustomColor(uPaintColor);
 *                 } else {
 *                     writeCustomColor(vec4(0.0));
 *                 }
 *             }
 *         `,
 *         processWGSL: `
 *             uniform uPaintSphere: vec4f;
 *             uniform uPaintColor: vec4f;
 *
 *             fn process() {
 *                 let center = getCenter();
 *                 let dist = distance(center, uniform.uPaintSphere.xyz);
 *                 if (dist < uniform.uPaintSphere.w) {
 *                     writeCustomColor(uniform.uPaintColor);
 *                 } else {
 *                     writeCustomColor(vec4f(0.0));
 *                 }
 *             }
 *         `
 *     }
 * );
 *
 * // Set uniforms and execute
 * processor.setParameter('uPaintSphere', [0, 1, 0, 0.5]);
 * processor.setParameter('uPaintColor', [1, 0, 0, 1]);
 * processor.process();
 *
 * @category Graphics
 */
declare class GSplatProcessor {
    /**
     * Creates a new GSplatProcessor instance.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {GSplatProcessorBinding} source - Source configuration specifying where to read from.
     * Can specify resource directly or component (for instance textures).
     * @param {GSplatProcessorBinding} destination - Destination configuration specifying where to write.
     * Can specify resource directly or component (for instance textures).
     * @param {object} options - Shader options for the processing logic.
     * @param {string} [options.processGLSL] - GLSL code at module scope. Must define a `void process()`
     * function that implements the processing logic. Can include uniform declarations and helper functions.
     * @param {string} [options.processWGSL] - WGSL code at module scope. Must define a `fn process()`
     * function that implements the processing logic. Can include uniform declarations and helper functions.
     */
    constructor(device: GraphicsDevice, source: GSplatProcessorBinding, destination: GSplatProcessorBinding, options: {
        processGLSL?: string;
        processWGSL?: string;
    });
    /**
     * @type {GraphicsDevice}
     * @private
     */
    private _device;
    /**
     * Source binding configuration.
     *
     * @type {GSplatProcessorBinding}
     * @private
     */
    private _source;
    /**
     * Destination binding configuration.
     *
     * @type {GSplatProcessorBinding}
     * @private
     */
    private _destination;
    /**
     * Source resource (resolved from binding).
     *
     * @type {GSplatResourceBase}
     * @private
     */
    private _srcResource;
    /**
     * Destination resource (resolved from binding).
     *
     * @type {GSplatResourceBase}
     * @private
     */
    private _dstResource;
    /**
     * @type {GSplatStreamDescriptor[]}
     * @private
     */
    private _dstStreamDescriptors;
    /**
     * Set of destination stream names for quick lookup.
     *
     * @type {Set<string>}
     * @private
     */
    private _dstStreamNames;
    /**
     * Whether to use all input streams (no specific source streams requested).
     *
     * @type {boolean}
     * @private
     */
    private _useAllInputStreams;
    /**
     * Pre-resolved source textures to bind during process().
     *
     * @type {Array<{name: string, texture: TextureType}>}
     * @private
     */
    private _srcTextures;
    /**
     * @type {RenderTarget|null}
     * @private
     */
    private _renderTarget;
    /**
     * @type {QuadRender|null}
     * @private
     */
    private _quadRender;
    /**
     * @type {RenderPassShaderQuad|null}
     * @private
     */
    private _renderPass;
    /**
     * Shader parameters set by the user.
     *
     * @type {Map<string, { scopeId: object, data: number|number[]|ArrayBufferView|TextureType|StorageBuffer }>}
     * @private
     */
    private _parameters;
    /**
     * The blend state to use when processing. Allows accumulation of results
     * (e.g., additive blending for painting). Defaults to no blending.
     *
     * @type {BlendState}
     */
    blendState: BlendState;
    /**
     * Destroys this processor and releases all resources.
     */
    destroy(): void;
    /**
     * Resolves a texture for the given stream name from a binding configuration.
     *
     * Resolution order:
     * 1. Component instance texture (if component provided and stream is instance-level)
     * 2. Resource texture
     *
     * @param {GSplatProcessorBinding} binding - The binding configuration.
     * @param {string} name - The stream name.
     * @param {GSplatResourceBase} resource - The resolved resource.
     * @returns {TextureType|null} The resolved texture, or null if not found.
     * @private
     */
    private _resolveTexture;
    /**
     * Creates the MRT render target for destination streams.
     *
     * @private
     */
    private _createRenderTarget;
    /**
     * Creates the shader and QuadRender for processing.
     *
     * @param {object} options - Shader options.
     * @private
     */
    private _createShader;
    /**
     * Sets a shader parameter for this processor. Parameters are applied during processing.
     *
     * @param {string} name - The name of the parameter (uniform name in shader).
     * @param {number|number[]|ArrayBufferView|TextureType|StorageBuffer} data - The value for the parameter.
     */
    setParameter(name: string, data: number | number[] | ArrayBufferView | Texture | StorageBuffer): void;
    /**
     * Gets a shader parameter value previously set with {@link setParameter}.
     *
     * @param {string} name - The name of the parameter.
     * @returns {number|number[]|ArrayBufferView|TextureType|StorageBuffer|undefined} The parameter value, or undefined if not set.
     */
    getParameter(name: string): number | number[] | ArrayBufferView | Texture | StorageBuffer | undefined;
    /**
     * Removes a shader parameter.
     *
     * @param {string} name - The name of the parameter to remove.
     */
    deleteParameter(name: string): void;
    /**
     * Executes the processing, reading from source streams and writing to destination streams.
     */
    process(): void;
}

declare class GSplatSogResource extends GSplatResourceBase {
    /**
     * Populates the parameters map with dequantization uniforms for SOG format.
     *
     * @private
     */
    private _populateParameters;
    configureMaterialDefines(defines: any): void;
}

/**
 * Callback used by {@link script.createLoadingScreen}.
 */
type CreateScreenCallback = (app: AppBase) => void;
declare namespace script {
    let app: any;
    /**
     * Handles the creation of the loading screen of the application. A script can subscribe to the
     * events of a {@link AppBase} to show a loading screen, progress bar etc. In order for
     * this to work you need to set the project's loading screen script to the script that calls
     * this method.
     *
     * @param {CreateScreenCallback} callback - A function which can set up and tear down a
     * customized loading screen.
     * @example
     * pc.script.createLoadingScreen((app) => {
     *     const showSplashScreen = () => {};
     *     const hideSplashScreen = () => {};
     *     const showProgress = (progress) => {};
     *     app.on("preload:start", showSplashScreen);
     *     app.on("preload:progress", showProgress);
     *     app.on("start", hideSplashScreen);
     * });
     */
    function createLoadingScreen(callback: CreateScreenCallback): void;
}

/**
 * Used to manage layout calculations for {@link LayoutGroupComponent}s.
 *
 * @ignore
 */
declare class LayoutCalculator {
    calculateLayout(elements: any, options: any): any;
}

/**
 * @import { AppBase } from './app-base.js'
 * @import { Entity } from './entity.js'
 */
/**
 * Create a Template resource from raw database data.
 */
declare class Template {
    /**
     * Create a new Template instance.
     *
     * @param {AppBase} app - The application.
     * @param {object} data - Asset data from the database.
     */
    constructor(app: AppBase, data: object);
    /**
     * @type {AppBase}
     * @private
     */
    private _app;
    /** @private */
    private _data;
    /**
     * @type {Entity|null}
     * @private
     */
    private _templateRoot;
    /**
     * Create an instance of this template.
     *
     * @returns {Entity} The root entity of the created instance.
     */
    instantiate(): Entity;
    /** @private */
    private _parseTemplate;
    set data(value: any);
    get data(): any;
}

/**
 * Implementation of {@link AnimBinder} for animating a skeleton in the graph-node hierarchy.
 *
 * @ignore
 */
declare class DefaultAnimBinder {
    static createAnimTarget(func: any, type: any, valueCount: any, node: any, propertyPath: any, componentType: any): AnimTarget;
    constructor(graph: any);
    graph: any;
    _mask: any;
    nodes: {};
    targetCache: {};
    visitedFallbackGraphPaths: {};
    nodeCounts: {};
    activeNodes: any[];
    handlers: {
        localPosition: (node: any) => AnimTarget;
        localRotation: (node: any) => AnimTarget;
        localScale: (node: any) => AnimTarget;
        weight: (node: any, weightName: any) => AnimTarget;
        materialTexture: (node: any, textureName: any) => AnimTarget;
    };
    _isPathInMask: (path: any, checkMaskValue: any) => boolean;
    _isPathActive(path: any): boolean;
    findNode(path: any): any;
    resolve(path: any): any;
    unresolve(path: any): void;
    update(deltaTime: any): void;
    assignMask(mask: any): boolean;
}

/**
 * An asset resource which represents an anim state graph. It can be loaded into an anim component using the {@link AnimComponent#loadStateGraph} method.
 *
 * ## Usage
 * Scripts can retrieve an AnimStateGraph instance from assets of type 'animstategraph'. An AnimStateGraph can then be loaded into an anim component as follows:
 * ```javascript
 * const animStateGraph = app.assets.get(ASSET_ID).resource;
 * const entity = new pc.Entity();
 * entity.addComponent('anim');
 * entity.anim.loadStateGraph(animStateGraph);
 * ```
 *
 * @category Animation
 */
declare class AnimStateGraph {
    /**
     * Create an AnimStateGraph instance from JSON data.
     *
     * @param {object} data - The JSON data to create the AnimStateGraph from.
     * @ignore
     */
    constructor(data: object);
    _layers: any;
    _parameters: {};
    get parameters(): {};
    get layers(): any;
}

/**
 * @import { AssetRegistry } from './asset-registry.js'
 */
/**
 * Used to load a group of assets and fires a callback when all assets are loaded.
 *
 * ```javascript
 * const assets = [
 *     new Asset('model', 'container', { url: `http://example.com/asset.glb` }),
 *     new Asset('styling', 'css', { url: `http://example.com/asset.css` })
 * ];
 * const assetListLoader = new AssetListLoader(assets, app.assets);
 * assetListLoader.load((err, failed) => {
 *     if (err) {
 *         console.error(`${failed.length} assets failed to load`);
 *     } else {
 *         console.log(`${assets.length} assets loaded`);
 *    }
 * });
 * ```
 *
 * @category Asset
 */
declare class AssetListLoader extends EventHandler {
    /**
     * Create a new AssetListLoader using a list of assets to load and the asset registry used to
     * load and manage them.
     *
     * @param {Asset[]|number[]} assetList - An array of {@link Asset} objects to load or an array
     * of Asset IDs to load.
     * @param {AssetRegistry} assetRegistry - The application's asset registry.
     * @example
     * const assetListLoader = new pc.AssetListLoader([
     *     new pc.Asset("texture1", "texture", { url: 'http://example.com/my/assets/here/texture1.png') }),
     *     new pc.Asset("texture2", "texture", { url: 'http://example.com/my/assets/here/texture2.png') })
     * ], app.assets);
     */
    constructor(assetList: Asset[] | number[], assetRegistry: AssetRegistry);
    /**
     * @type {Set<Asset>}
     * @private
     */
    private _assets;
    /**
     * @type {Set<Asset>}
     * @private
     */
    private _loadingAssets;
    /**
     * @type {Set<Asset>}
     * @private
     */
    private _waitingAssets;
    /** @private */
    private _loading;
    /** @private */
    private _loaded;
    /**
     * Array of assets that failed to load.
     *
     * @type {Asset[]}
     * @private
     */
    private _failed;
    _registry: AssetRegistry;
    /**
     * Removes all references to this asset list loader.
     */
    destroy(): void;
    _assetHasDependencies(asset: any): any;
    /**
     * Start loading asset list and call `done()` when all assets have loaded or failed to load.
     *
     * @param {Function} done - Callback called when all assets in the list are loaded. Passed
     * `(err, failed)` where `err` is `undefined` if no errors are encountered and failed contains
     * an array of assets that failed to load.
     * @param {object} [scope] - Scope to use when calling callback.
     */
    load(done: Function, scope?: object): void;
    _callback: Function;
    _scope: any;
    /**
     * Sets a callback which will be called when all assets in the list have been loaded.
     *
     * @param {Function} done - Callback called when all assets in the list are loaded.
     * @param {object} [scope] - Scope to use when calling callback.
     */
    ready(done: Function, scope?: object): void;
    _loadingComplete(): void;
    _onLoad(asset: any): void;
    _onError(err: any, asset: any): void;
    _onAddAsset(asset: any): void;
    _waitForAsset(assetId: any): void;
}

/**
 * @import { Asset } from './asset.js'
 * @import { AssetRegistry } from './asset-registry.js'
 * @import { EventHandle } from '../../core/event-handle.js'
 */
/**
 * An object that manages the case where an object holds a reference to an asset and needs to be
 * notified when changes occur in the asset. e.g. notifications include load, add and remove
 * events.
 *
 * @category Asset
 */
declare class AssetReference {
    /**
     * Create a new AssetReference instance.
     *
     * @param {string} propertyName - The name of the property that the asset is stored under,
     * passed into callbacks to enable updating.
     * @param {Asset|object} parent - The parent object that contains the asset reference, passed
     * into callbacks to enable updating. Currently an asset, but could be component or other.
     * @param {AssetRegistry} registry - The asset registry that stores all assets.
     * @param {object} callbacks - A set of functions called when the asset state changes: load,
     * add, remove.
     * @param {object} [callbacks.load] - The function called when the asset loads
     * load(propertyName, parent, asset).
     * @param {object} [callbacks.add] - The function called when the asset is added to the
     * registry add(propertyName, parent, asset).
     * @param {object} [callbacks.remove] - The function called when the asset is remove from the
     * registry remove(propertyName, parent, asset).
     * @param {object} [callbacks.unload] - The function called when the asset is unloaded
     * unload(propertyName, parent, asset).
     * @param {object} [scope] - The scope to call the callbacks in.
     * @example
     * const reference = new pc.AssetReference('textureAsset', this, this.app.assets, {
     *     load: this.onTextureAssetLoad,
     *     add: this.onTextureAssetAdd,
     *     remove: this.onTextureAssetRemove
     * }, this);
     * reference.id = this.textureAsset.id;
     */
    constructor(propertyName: string, parent: Asset | object, registry: AssetRegistry, callbacks: {
        load?: object;
        add?: object;
        remove?: object;
        unload?: object;
    }, scope?: object);
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLoadById;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtUnloadById;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtAddById;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtRemoveById;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtLoadByUrl;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtAddByUrl;
    /**
     * @type {EventHandle|null}
     * @private
     */
    private _evtRemoveByUrl;
    propertyName: string;
    parent: any;
    _scope: any;
    _registry: AssetRegistry;
    /**
     * Sets the asset id which this references. One of either id or url must be set to
     * initialize an asset reference.
     *
     * @type {number}
     */
    set id(value: number);
    /**
     * Gets the asset id which this references.
     *
     * @type {number}
     */
    get id(): number;
    /**
     * Sets the asset url which this references. One of either id or url must be called to
     * initialize an asset reference.
     *
     * @type {string|null}
     */
    set url(value: string | null);
    /**
     * Gets the asset url which this references.
     *
     * @type {string|null}
     */
    get url(): string | null;
    asset: any;
    _onAssetLoad: any;
    _onAssetAdd: any;
    _onAssetRemove: any;
    _onAssetUnload: any;
    _id: number;
    _url: string;
    _bind(): void;
    _unbind(): void;
    _onLoad(asset: any): void;
    _onAdd(asset: any): void;
    _onRemove(asset: any): void;
    _onUnload(asset: any): void;
}

/**
 * Represents the resource of a Bundle Asset, which contains an index that maps URLs to DataViews.
 *
 * @ignore
 */
declare class Bundle extends EventHandler {
    /**
     * Fired when a file has been added to a Bundle.
     *
     * @event
     * @example
     * bundle.on("add", (url, data) => {
     *     console.log("file added: " + url);
     * });
     */
    static EVENT_ADD: string;
    /**
     * Fired when all files of a Bundle has been loaded.
     *
     * @event
     * @example
     * bundle.on("load", () => {
     *     console.log("All Bundle files has been loaded");
     * });
     */
    static EVENT_LOAD: string;
    /**
     * Index of file url to to DataView.
     * @type {Map<string, DataView>}
     * @private
     */
    private _index;
    /**
     * If Bundle has all files loaded.
     * @type {boolean}
     * @private
     */
    private _loaded;
    /**
     * Add file to a Bundle.
     *
     * @param {string} url - A url of a file.
     * @param {DataView} data - A DataView of a file.
     * @ignore
     */
    addFile(url: string, data: DataView): void;
    /**
     * Returns true if the specified URL exists in the loaded bundle.
     *
     * @param {string} url - The original file URL. Make sure you have called decodeURIComponent on
     * the URL first.
     * @returns {boolean} True of false.
     */
    has(url: string): boolean;
    /**
     * Returns a DataView for the specified URL.
     *
     * @param {string} url - The original file URL. Make sure you have called decodeURIComponent on
     * the URL first.
     * @returns {DataView|null} A DataView.
     */
    get(url: string): DataView | null;
    /**
     * Destroys the bundle.
     */
    destroy(): void;
    /**
     * True if all files of a Bundle are loaded.
     * @type {boolean}
     */
    set loaded(value: boolean);
    get loaded(): boolean;
}

/**
 * Picker object used to select mesh instances from screen coordinates. It can also optionally
 * capture depth information to determine world positions of picked points.
 *
 * The picker works by rendering mesh instances to an offscreen render target with unique IDs
 * encoded as colors. When queried, it reads back the pixel data to identify which mesh instance
 * was at the specified screen coordinates. If depth picking is enabled, it also captures depth
 * values to compute world positions.
 *
 * **Main API methods:**
 * - {@link Picker#prepare} - Renders the pick buffer (call once per frame before picking)
 * - {@link Picker#getSelectionAsync} - Get mesh instances in a screen area
 * - {@link Picker#getWorldPointAsync} - Get world position at screen coordinates (requires depth)
 *
 * **Performance considerations:**
 * The picker resolution can be set lower than the screen resolution for better performance,
 * though this reduces picking precision and may miss small objects.
 *
 * @example
 * // Create a picker with depth picking enabled at quarter resolution
 * const picker = new pc.Picker(app, canvas.width * 0.25, canvas.height * 0.25, true);
 *
 * // In your update loop, prepare the picker
 * picker.resize(canvas.width * 0.25, canvas.height * 0.25);
 * picker.prepare(camera, scene);
 *
 * // Pick mesh instances in an area
 * picker.getSelectionAsync(x, y, width, height).then((meshInstances) => {
 *     meshInstances.forEach((meshInstance) => {
 *         console.log('Picked:', meshInstance.node.name);
 *     });
 * });
 *
 * // Pick world position (requires depth enabled)
 * picker.getWorldPointAsync(x, y).then((worldPoint) => {
 *     if (worldPoint) {
 *         console.log(worldPoint);
 *     }
 * });
 *
 * @see {@link http://playcanvas.github.io/#/graphics/area-picker|Area Picker Example}
 * @see {@link https://playcanvas.github.io/#gaussian-splatting/picking|Gaussian Splatting Picking Example}
 *
 * @category Graphics
 */
declare class Picker {
    /**
     * Create a new Picker instance.
     *
     * @param {AppBase} app - The application managing this picker instance.
     * @param {number} width - The width of the pick buffer in pixels.
     * @param {number} height - The height of the pick buffer in pixels.
     * @param {boolean} [depth] - Whether to enable depth picking. When enabled, depth
     * information is captured alongside mesh IDs using MRT. Defaults to false.
     */
    constructor(app: AppBase, width: number, height: number, depth?: boolean);
    /**
     * @type {import('../../platform/graphics/graphics-device.js').GraphicsDevice}
     * @private
     */
    private device;
    /**
     * @type {RenderPassPicker}
     * @private
     */
    private renderPass;
    /**
     * @type {boolean}
     * @private
     */
    private depth;
    /** @type {number} */
    width: number;
    /** @type {number} */
    height: number;
    /**
     * Internal render target.
     *
     * @type {RenderTarget|null}
     * @private
     */
    private renderTarget;
    /**
     * Color buffer texture for pick IDs.
     *
     * @type {Texture|null}
     * @private
     */
    private colorBuffer;
    /**
     * Optional depth buffer texture for depth picking.
     *
     * @type {Texture|null}
     * @private
     */
    private depthBuffer;
    /**
     * Internal render target for reading the depth buffer.
     *
     * @type {RenderTarget|null}
     * @private
     */
    private renderTargetDepth;
    /**
     * Mapping table from ids to MeshInstances or GSplatComponents.
     *
     * @type {Map<number, MeshInstance | GSplatComponent>}
     * @private
     */
    private mapping;
    /**
     * When the device is destroyed, this allows us to ignore async results.
     *
     * @type {boolean}
     * @private
     */
    private deviceValid;
    /**
     * Frees resources associated with this picker.
     */
    destroy(): void;
    /**
     * Return the list of mesh instances selected by the specified rectangle in the previously
     * prepared pick buffer. The rectangle using top-left coordinate system.
     *
     * Note: This function is not supported on WebGPU. Use {@link Picker#getSelectionAsync} instead.
     * Note: This function is blocks the main thread while reading pixels from GPU memory. It's
     * recommended to use {@link Picker#getSelectionAsync} instead.
     *
     * @param {number} x - The left edge of the rectangle.
     * @param {number} y - The top edge of the rectangle.
     * @param {number} [width] - The width of the rectangle. Defaults to 1.
     * @param {number} [height] - The height of the rectangle. Defaults to 1.
     * @returns {(MeshInstance | GSplatComponent)[]} An array of mesh instances or gsplat components
     * that are in the selection.
     * @example
     * // Get the selection at the point (10,20)
     * const selection = picker.getSelection(10, 20);
     * @example
     * // Get all models in rectangle with corners at (10,20) and (20,40)
     * const selection = picker.getSelection(10, 20, 10, 20);
     */
    getSelection(x: number, y: number, width?: number, height?: number): (MeshInstance | GSplatComponent)[];
    /**
     * Return the list of mesh instances selected by the specified rectangle in the previously
     * prepared pick buffer. The rectangle uses top-left coordinate system.
     *
     * This method is asynchronous and does not block the execution.
     *
     * @param {number} x - The left edge of the rectangle.
     * @param {number} y - The top edge of the rectangle.
     * @param {number} [width] - The width of the rectangle. Defaults to 1.
     * @param {number} [height] - The height of the rectangle. Defaults to 1.
     * @returns {Promise<(MeshInstance | GSplatComponent)[]>} - Promise that resolves with an
     * array of mesh instances or gsplat components that are in the selection.
     * @example
     * // Get the mesh instances at the rectangle with start at (10,20) and size of (5,5)
     * picker.getSelectionAsync(10, 20, 5, 5).then((meshInstances) => {
     *    console.log(meshInstances);
     * });
     */
    getSelectionAsync(x: number, y: number, width?: number, height?: number): Promise<(MeshInstance | GSplatComponent)[]>;
    /**
     * Helper method to read pixels from a texture asynchronously.
     *
     * @param {Texture} texture - The texture to read from.
     * @param {number} x - The x coordinate.
     * @param {number} y - The y coordinate.
     * @param {number} width - The width of the rectangle.
     * @param {number} height - The height of the rectangle.
     * @param {RenderTarget} renderTarget - The render target to use for reading.
     * @returns {Promise<Uint8Array>} Promise resolving to the pixel data.
     * @private
     */
    private _readTexture;
    /**
     * Return the world position of the mesh instance picked at the specified screen coordinates.
     *
     * @param {number} x - The x coordinate of the pixel to pick.
     * @param {number} y - The y coordinate of the pixel to pick.
     * @returns {Promise<Vec3|null>} Promise that resolves with the world position of the picked point,
     * or null if no depth is available or nothing was picked.
     * @example
     * // Get the world position at screen coordinates (100, 50)
     * picker.getWorldPointAsync(100, 50).then((worldPoint) => {
     *     if (worldPoint) {
     *         console.log('World position:', worldPoint);
     *         // Use the world position
     *     } else {
     *         console.log('No object at this position');
     *     }
     * });
     */
    getWorldPointAsync(x: number, y: number): Promise<Vec3 | null>;
    /**
     * Return the depth value of the mesh instance picked at the specified screen coordinates.
     *
     * @param {number} x - The x coordinate of the pixel to pick.
     * @param {number} y - The y coordinate of the pixel to pick.
     * @returns {Promise<number|null>} Promise that resolves with the linear normalized depth value
     * of the picked point (0 = near plane, 1 = far plane), or null if depth picking is not enabled
     * or no object was picked.
     * @ignore
     */
    getPointDepthAsync(x: number, y: number): Promise<number | null>;
    sanitizeRect(x: any, y: any, width: any, height: any): Vec4;
    decodePixels(pixels: any, mapping: any): any[];
    createTexture(name: any): Texture;
    allocateRenderTarget(): void;
    releaseRenderTarget(): void;
    /**
     * Primes the pick buffer with a rendering of the specified models from the point of view of
     * the supplied camera. Once the pick buffer has been prepared, {@link Picker#getSelection} can
     * be called multiple times on the same picker object. Therefore, if the models or camera do
     * not change in any way, {@link Picker#prepare} does not need to be called again.
     *
     * @param {CameraComponent} camera - The camera component used to render the scene.
     * @param {Scene} scene - The scene containing the pickable mesh instances.
     * @param {Layer[]} [layers] - Layers from which objects will be picked. If not supplied, all
     * layers of the specified camera will be used.
     */
    prepare(camera: CameraComponent, scene: Scene, layers?: Layer[]): void;
    /**
     * Sets the resolution of the pick buffer. The pick buffer resolution does not need to match
     * the resolution of the corresponding frame buffer use for general rendering of the 3D scene.
     * However, the lower the resolution of the pick buffer, the less accurate the selection
     * results returned by {@link Picker#getSelection}. On the other hand, smaller pick buffers
     * will yield greater performance, so there is a trade off.
     *
     * @param {number} width - The width of the pick buffer in pixels.
     * @param {number} height - The height of the pick buffer in pixels.
     */
    resize(width: number, height: number): void;
}

/**
 * A render pass implementing rendering of mesh instances into a pick buffer.
 *
 * @ignore
 */
declare class RenderPassPicker extends RenderPass {
    constructor(device: any, renderer: any);
    /** @type {BindGroup[]} */
    viewBindGroups: BindGroup[];
    /** @type {BlendState} */
    blendState: BlendState;
    /** @type {CameraComponent} */
    camera: CameraComponent;
    /** @type {Scene} */
    scene: Scene;
    /** @type {Layer[]} */
    layers: Layer[];
    /** @type {Map<number, MeshInstance | GSplatComponent>} */
    mapping: Map<number, MeshInstance | GSplatComponent>;
    /** @type {boolean} */
    depth: boolean;
    /** @type {number[]} */
    _qualifiedLayerIndices: number[];
    /** @type {Map<number, MeshInstance|null>} */
    _pickMeshInstances: Map<number, MeshInstance | null>;
    renderer: any;
    /**
     * @param {CameraComponent} camera - The camera component used for picking.
     * @param {Scene} scene - The scene to pick from.
     * @param {Layer[]} layers - The layers to pick from.
     * @param {Map<number, MeshInstance | GSplatComponent>} mapping - Map to store ID to object mappings.
     * @param {boolean} depth - Whether to render depth information.
     */
    update(camera: CameraComponent, scene: Scene, layers: Layer[], mapping: Map<number, MeshInstance | GSplatComponent>, depth: boolean): void;
    emptyWorldClusters: any;
}

/**
 * Initialize the Basis transcode worker.
 *
 * @param {object} [config] - The Basis configuration.
 * @param {string} [config.glueUrl] - URL of glue script.
 * @param {string} [config.wasmUrl] - URL of the wasm module.
 * @param {string} [config.fallbackUrl] - URL of the fallback script to use when wasm modules
 * aren't supported.
 * @param {boolean} [config.lazyInit] - Wait for first transcode request before initializing Basis
 * (default is false). Otherwise initialize Basis immediately.
 * @param {number} [config.numWorkers] - Number of workers to use for transcoding (default is 1).
 * While it is possible to improve transcode performance using multiple workers, this will likely
 * depend on the runtime platform. For example, desktop will likely benefit from more workers
 * compared to mobile. Also keep in mind that it takes time to initialize workers and increasing
 * this value could impact application startup time. Make sure to test your application performance
 * on all target platforms when changing this parameter.
 * @param {boolean} [config.eagerWorkers] - Use eager workers (default is true). When enabled, jobs
 * are assigned to workers immediately, independent of their work load. This can result in
 * unbalanced workloads, however there is no delay between jobs. If disabled, new jobs are assigned
 * to workers only when their previous job has completed. This will result in balanced workloads
 * across workers, however workers can be idle for a short time between jobs.
 * @param {string[]} [config.rgbPriority] - Array of texture compression formats in priority order
 * for textures without alpha. The supported compressed formats are: 'astc', 'atc', 'dxt', 'etc1',
 * 'etc2', 'pvr'.
 * @param {string[]} [config.rgbaPriority] - Array of texture compression formats in priority order
 * for textures with alpha. The supported compressed formats are: 'astc', 'atc', 'dxt', 'etc1',
 * 'etc2', 'pvr'.
 * @param {number} [config.maxRetries] - Number of http load retry attempts. Defaults to 5.
 */
declare function basisInitialize(config?: {
    glueUrl?: string;
    wasmUrl?: string;
    fallbackUrl?: string;
    lazyInit?: boolean;
    numWorkers?: number;
    eagerWorkers?: boolean;
    rgbPriority?: string[];
    rgbaPriority?: string[];
    maxRetries?: number;
}): void;

/**
 * Resource handler used for loading {@link AnimClip} resources.
 *
 * @ignore
 */
declare class AnimClipHandler extends ResourceHandler {
    constructor(app: any);
    load(url: any, callback: any): void;
    open(url: any, data: any): AnimTrack;
}

/**
 * Resource handler used for loading {@link AnimStateGraph} resources.
 *
 * @ignore
 */
declare class AnimStateGraphHandler extends ResourceHandler {
    constructor(app: any);
    load(url: any, callback: any): void;
    open(url: any, data: any): AnimStateGraph;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
/**
 * Resource handler used for loading {@link Animation} resources.
 *
 * @category Animation
 */
declare class AnimationHandler extends ResourceHandler {
    /**
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    device: GraphicsDevice;
    assets: AssetRegistry;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, asset: any): any;
    _parseAnimationV3(data: any): Animation;
    _parseAnimationV4(data: any): Animation;
}

/**
 * Resource handler used for loading {@link Sound} resources.
 *
 * @category Sound
 */
declare class AudioHandler extends ResourceHandler {
    /**
     * Create a new AudioHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    manager: SoundManager;
    _isSupported(url: any): boolean;
    load(url: any, callback: any): void;
    /**
     * Loads an audio asset using an AudioContext by URL and calls success or error with the
     * created resource or error respectively.
     *
     * @param {string} url - The url of the audio asset.
     * @param {Function} success - Function to be called if the audio asset was loaded or if we
     * just want to continue without errors even if the audio is not loaded.
     * @param {Function} error - Function to be called if there was an error while loading the
     * audio asset.
     * @private
     */
    private _createSound;
}

declare class BinaryHandler extends ResourceHandler {
    constructor(app: any);
    load(url: any, callback: any): void;
    /**
     * Parses raw DataView and returns ArrayBuffer.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {ArrayBuffer} The parsed resource data.
     */
    openBinary(data: DataView): ArrayBuffer;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
/**
 * Loads Bundle Assets.
 *
 * @ignore
 */
declare class BundleHandler extends ResourceHandler {
    /**
     * Create a new BundleHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     */
    constructor(app: AppBase);
    _assets: AssetRegistry;
    _fetchRetries(url: any, options: any, retries?: number): Promise<any>;
    load(url: any, callback: any): void;
    /**
     * Open the bundle.
     *
     * @param {string} url - The URL of the resource to open.
     * @param {Bundle} bundle - Bundle to open.
     * @returns {Bundle} The bundle.
     */
    open(url: string, bundle: Bundle): Bundle;
}

declare class CssHandler extends ResourceHandler {
    constructor(app: any);
    /**
     * TextDecoder for decoding binary data.
     *
     * @type {TextDecoder|null}
     * @private
     */
    private decoder;
    load(url: any, callback: any): void;
    /**
     * Parses raw DataView and returns string.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {string} The parsed resource data.
     */
    openBinary(data: DataView): string;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
/**
 * Resource handler used for loading cubemap {@link Texture} resources.
 *
 * @category Graphics
 */
declare class CubemapHandler extends ResourceHandler {
    /**
     * Create a new CubemapHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _device: GraphicsDevice;
    _registry: AssetRegistry;
    _loader: ResourceLoader;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, asset: any): any;
    patch(asset: any, registry: any): void;
    getAssetIds(cubemapAsset: any): any[];
    compareAssetIds(assetIdA: any, assetIdB: any): boolean;
    update(cubemapAsset: any, assetIds: any, assets: any): void;
    cmpArrays(arr1: any, arr2: any): boolean;
    resolveId(value: any): any;
    loadAssets(cubemapAsset: any, callback: any): void;
}

declare class FolderHandler extends ResourceHandler {
    constructor(app: any);
    load(url: any, callback: any): void;
}

/**
 * Resource handler used for loading {@link Font} resources.
 *
 * @category User Interface
 */
declare class FontHandler extends ResourceHandler {
    /**
     * Create a new FontHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _loader: ResourceLoader;
    load(url: any, callback: any, asset: any): void;
    _loadTextures(url: any, data: any, callback: any): void;
    open(url: any, data: any, asset: any): Font;
    patch(asset: any, assets: any): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
declare class HierarchyHandler extends ResourceHandler {
    /**
     * @param {AppBase} app - The running {@link AppBase}.
     */
    constructor(app: AppBase);
    load(url: any, callback: any): void;
    open(url: any, data: any): Entity;
}

declare class HtmlHandler extends ResourceHandler {
    constructor(app: any);
    /**
     * TextDecoder for decoding binary data.
     *
     * @type {TextDecoder|null}
     * @private
     */
    private decoder;
    load(url: any, callback: any): void;
    /**
     * Parses raw DataView and returns string.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {string} The parsed resource data.
     */
    openBinary(data: DataView): string;
}

declare class JsonHandler extends ResourceHandler {
    constructor(app: any);
    /**
     * TextDecoder for decoding binary data.
     *
     * @type {TextDecoder|null}
     * @private
     */
    private decoder;
    load(url: any, callback: any): void;
    /**
     * Parses raw DataView and returns string.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {object} The parsed resource data.
     */
    openBinary(data: DataView): object;
}

declare class StandardMaterialValidator {
    removeInvalid: boolean;
    valid: boolean;
    enumValidators: {
        occludeSpecular: (value: any) => boolean;
        cull: (value: any) => boolean;
        blendType: (value: any) => boolean;
        depthFunc: (value: any) => boolean;
    };
    setInvalid(key: any, data: any): void;
    validate(data: any): boolean;
    _createEnumValidator(values: any): (value: any) => boolean;
}

/**
 * Convert incoming JSON data into a {@link StandardMaterial}.
 *
 * @ignore
 */
declare class JsonStandardMaterialParser {
    _validator: StandardMaterialValidator;
    parse(input: any): StandardMaterial;
    /**
     * Initialize material properties from the material data block e.g. Loading from server.
     *
     * @param {StandardMaterial} material - The material to be initialized.
     * @param {object} data - The data block that is used to initialize.
     */
    initialize(material: StandardMaterial, data: object): void;
    migrate(data: any): any;
    _validate(data: any): any;
}

/**
 * Resource handler used for loading {@link Material} resources.
 *
 * @category Graphics
 */
declare class MaterialHandler extends ResourceHandler {
    /**
     * Create a new MaterialHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _assets: AssetRegistry;
    _device: GraphicsDevice;
    _parser: JsonStandardMaterialParser;
    load(url: any, callback: any): void;
    open(url: any, data: any): StandardMaterial;
    patch(asset: any, assets: any): void;
    _onAssetUnload(asset: any): void;
    _assignTexture(parameterName: any, materialAsset: any, texture: any): void;
    _getPlaceholderTexture(parameterName: any): any;
    _assignPlaceholderTexture(parameterName: any, materialAsset: any): void;
    _onTextureLoad(parameterName: any, materialAsset: any, textureAsset: any): void;
    _onTextureAdd(parameterName: any, materialAsset: any, textureAsset: any): void;
    _onTextureRemoveOrUnload(parameterName: any, materialAsset: any, textureAsset: any): void;
    _assignCubemap(parameterName: any, materialAsset: any, textures: any): void;
    _onCubemapLoad(parameterName: any, materialAsset: any, cubemapAsset: any): void;
    _onCubemapAdd(parameterName: any, materialAsset: any, cubemapAsset: any): void;
    _onCubemapRemoveOrUnload(parameterName: any, materialAsset: any, cubemapAsset: any): void;
    _bindAndAssignAssets(materialAsset: any, assets: any): void;
}

/**
 * Callback used by {@link ModelHandler#addParser} to decide on which parser to use.
 */
type AddParserCallback = (url: string, data: object) => boolean;
/**
 * @import { AppBase } from '../app-base.js'
 */
/**
 * @callback AddParserCallback
 * Callback used by {@link ModelHandler#addParser} to decide on which parser to use.
 * @param {string} url - The resource url.
 * @param {object} data - The raw model data.
 * @returns {boolean} Return true if this parser should be used to parse the data into a
 * {@link Model}.
 */
/**
 * Resource handler used for loading {@link Model} resources.
 *
 * @category Graphics
 */
declare class ModelHandler extends ResourceHandler {
    /**
     * Create a new ModelHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _parsers: any[];
    device: GraphicsDevice;
    assets: AssetRegistry;
    defaultMaterial: StandardMaterial;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any): any;
    patch(asset: any, assets: any): void;
    /**
     * Add a parser that converts raw data into a {@link Model}. Default parser is for JSON models.
     *
     * @param {object} parser - See JsonModelParser for example.
     * @param {AddParserCallback} decider - Function that decides on which parser to use. Function
     * should take (url, data) arguments and return true if this parser should be used to parse the
     * data into a {@link Model}. The first parser to return true is used.
     */
    addParser(parser: object, decider: AddParserCallback): void;
}

declare class SogBundleParser {
    constructor(app: any, maxRetries?: number);
    /** @type {AppBase} */
    app: AppBase;
    /** @type {number} */
    maxRetries: number;
    /**
     * @param {object} url - The URL of the resource to load.
     * @param {string} url.load - The URL to use for loading the resource.
     * @param {string} url.original - The original URL useful for identifying the resource type.
     * @param {ResourceHandlerCallback} callback - The callback used when
     * the resource is loaded or an error occurs.
     * @param {Asset} asset - Container asset.
     */
    load(url: {
        load: string;
        original: string;
    }, callback: ResourceHandlerCallback, asset: Asset): Promise<void>;
}

/**
 * @import { AppBase } from '../app-base.js'
 * @import { ResourceHandlerCallback } from '../handlers/handler.js'
 */
declare class SogParser {
    /**
     * @param {AppBase} app - The app instance.
     * @param {number} maxRetries - Maximum amount of retries.
     */
    constructor(app: AppBase, maxRetries: number);
    /** @type {AppBase} */
    app: AppBase;
    /** @type {number} */
    maxRetries: number;
    /**
     * Checks if loading should be aborted due to asset unload or invalid device.
     *
     * @param {Asset} asset - The asset being loaded.
     * @param {boolean} unloaded - Whether the asset was unloaded during async loading.
     * @returns {boolean} True if loading should be aborted.
     * @private
     */
    private _shouldAbort;
    loadTextures(url: any, callback: any, asset: any, meta: any): Promise<void>;
    /**
     * @param {object} url - The URL of the resource to load.
     * @param {string} url.load - The URL to use for loading the resource.
     * @param {string} url.original - The original URL useful for identifying the resource type.
     * @param {ResourceHandlerCallback} callback - The callback used when
     * the resource is loaded or an error occurs.
     * @param {Asset} asset - Container asset.
     */
    load(url: {
        load: string;
        original: string;
    }, callback: ResourceHandlerCallback, asset: Asset): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 * @import { ResourceHandlerCallback } from '../handlers/handler.js'
 */
declare class GSplatOctreeParser {
    /**
     * @param {AppBase} app - The app instance.
     * @param {number} maxRetries - Maximum amount of retries.
     */
    constructor(app: AppBase, maxRetries: number);
    /** @type {AppBase} */
    app: AppBase;
    /** @type {number} */
    maxRetries: number;
    /**
     * @param {object} url - The URL of the resource to load.
     * @param {string} url.load - The URL to use for loading the resource.
     * @param {string} url.original - The original URL useful for identifying the resource type.
     * @param {ResourceHandlerCallback} callback - The callback used when
     * the resource is loaded or an error occurs.
     * @param {object} asset - Container asset.
     */
    load(url: {
        load: string;
        original: string;
    }, callback: ResourceHandlerCallback, asset: object): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
declare class GSplatHandler extends ResourceHandler {
    /**
     * Create a new GSplatHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    parsers: {
        ply: PlyParser;
        sog: SogBundleParser;
        json: SogParser;
        octree: GSplatOctreeParser;
    };
    _getUrlWithoutParams(url: any): any;
    _getParser(url: any): any;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, asset: any): any;
}

/**
 * @import { Mesh } from './mesh.js'
 */
/**
 * A `Render` contains an array of meshes that are referenced by a single hierarchy node in a GLB
 * scene, and are accessible using the {@link ContainerResource#renders} property. A `Render` is
 * the resource of a Render Asset. They are usually created by the GLB loader and not created by
 * hand.
 *
 * @ignore
 */
declare class Render extends EventHandler {
    /**
     * Fired when the meshes are set on the render. The handler is passed the an array of
     * {@link Mesh} objects.
     *
     * @event
     * @example
     * render.on('set:meshes', (meshes) => {
     *     console.log(`Render has ${meshes.length} meshes`);
     * });
     */
    static EVENT_SETMESHES: string;
    /**
     * Meshes are reference counted, and this class owns the references and is responsible for
     * releasing the meshes when they are no longer referenced.
     *
     * @type {Array<Mesh|null>|null}
     * @private
     */
    private _meshes;
    /**
     * Sets the meshes that the render contains.
     *
     * @type {Array<Mesh|null>|null}
     */
    set meshes(value: Array<Mesh | null> | null);
    /**
     * Gets the meshes that the render contains.
     *
     * @type {Array<Mesh|null>|null}
     */
    get meshes(): Array<Mesh | null> | null;
    destroy(): void;
    /**
     * Decrement references to meshes. Destroy the ones with zero references.
     */
    decRefMeshes(): void;
    /**
     * Increments ref count on all meshes.
     */
    incRefMeshes(): void;
}

/**
 * Resource handler used for loading {@link Render} resources.
 *
 * @category Graphics
 */
declare class RenderHandler extends ResourceHandler {
    /**
     * Create a new RenderHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _registry: AssetRegistry;
    open(url: any, data: any): Render;
    patch(asset: any, registry: any): void;
}

/**
 * Resource handler for loading JavaScript files dynamically.  Two types of JavaScript files can be
 * loaded, PlayCanvas scripts which contain calls to {@link createScript}, or regular JavaScript
 * files, such as third-party libraries.
 *
 * @category Script
 */
declare class ScriptHandler extends ResourceHandler {
    /**
     * Create a new ScriptHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _scripts: {};
    _cache: {};
    clearCache(): void;
    load(url: any, callback: any): void;
    open(url: any, data: any): any;
    patch(asset: any, assets: any): void;
    _loadScript(url: any, callback: any): void;
    _loadModule(url: any, callback: any): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 */
/**
 * Resource handler used for loading {@link Scene} resources.
 *
 * @category Graphics
 */
declare class SceneHandler extends ResourceHandler {
    /**
     * Create a new SceneHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    load(url: any, callback: any): void;
    open(url: any, data: any): Scene;
}

declare class SceneSettingsHandler extends ResourceHandler {
    constructor(app: any);
    load(url: any, callback: any): void;
    open(url: any, data: any): any;
}

declare class ShaderHandler extends ResourceHandler {
    constructor(app: any);
    /**
     * TextDecoder for decoding binary data.
     *
     * @type {TextDecoder|null}
     * @private
     */
    private decoder;
    load(url: any, callback: any): void;
    /**
     * Parses raw DataView and returns string.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {string} The parsed resource data.
     */
    openBinary(data: DataView): string;
}

/**
 * Resource handler used for loading {@link Sprite} resources.
 *
 * @category Graphics
 */
declare class SpriteHandler extends ResourceHandler {
    /**
     * Create a new SpriteHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _assets: AssetRegistry;
    _device: GraphicsDevice;
    load(url: any, callback: any): void;
    open(url: any, data: any): Sprite;
    patch(asset: any, assets: any): void;
    _updateAtlas(asset: any): void;
    _onAssetChange(asset: any, attribute: any, value: any, oldValue: any): void;
}

declare class TemplateHandler extends ResourceHandler {
    constructor(app: any);
    /**
     * TextDecoder for decoding binary data.
     *
     * @type {TextDecoder|null}
     * @private
     */
    private decoder;
    load(url: any, callback: any): void;
    open(url: any, data: any): Template;
    /**
     * Parses raw DataView and returns string.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {Template} The parsed resource data.
     */
    openBinary(data: DataView): Template;
    patch(asset: any, registry: any): void;
}

declare class TextHandler extends ResourceHandler {
    constructor(app: any);
    /**
     * TextDecoder for decoding binary data.
     *
     * @type {TextDecoder|null}
     * @private
     */
    private decoder;
    load(url: any, callback: any): void;
    /**
     * Parses raw DataView and returns string.
     *
     * @param {DataView} data - The raw data as a DataView
     * @returns {string} The parsed resource data.
     */
    openBinary(data: DataView): string;
}

/**
 * @import { Asset } from '../../asset/asset.js'
 * @import { GraphicsDevice } from '../../../platform/graphics/graphics-device.js'
 * @import { ResourceHandlerCallback } from '../../../framework/handlers/handler.js'
 * @import { Texture } from '../../../platform/graphics/texture.js'
 */
/**
 * Interface to a texture parser. Implementations of this interface handle the loading and opening
 * of texture assets.
 */
declare class TextureParser {
    /**
     * Load the texture from the remote URL. When loaded (or failed), use the callback to return an
     * the raw resource data (or error).
     *
     * @param {object} url - The URL of the resource to load.
     * @param {string} url.load - The URL to use for loading the resource.
     * @param {string} url.original - The original URL useful for identifying the resource type.
     * @param {ResourceHandlerCallback} callback - The callback used when the resource is loaded or
     * an error occurs.
     * @param {Asset} [asset] - Optional asset that is passed by ResourceLoader.
     */
    load(url: {
        load: string;
        original: string;
    }, callback: ResourceHandlerCallback, asset?: Asset): void;
    /**
     * Convert raw resource data into a {@link Texture}.
     *
     * @param {string} url - The URL of the resource to open.
     * @param {*} data - The raw resource data passed by callback from {@link ResourceHandler#load}.
     * @param {GraphicsDevice} device - The graphics device.
     * @returns {Texture} The parsed resource data.
     */
    open(url: string, data: any, device: GraphicsDevice): Texture;
}

/**
 * Parser for browser-supported image formats.
 */
declare class ImgParser extends TextureParser {
    constructor(registry: any, device: any);
    crossOrigin: string;
    maxRetries: number;
    device: any;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, device: any, textureOptions?: {}): Texture;
    _loadImage(url: any, originalUrl: any, crossOrigin: any, callback: any, asset: any): void;
    _loadImageBitmap(url: any, originalUrl: any, crossOrigin: any, callback: any, asset: any): void;
    _loadImageBitmapFromBlob(blob: any, callback: any): void;
}

/**
 * Legacy texture parser for dds files.
 */
declare class DdsParser extends TextureParser {
    constructor(registry: any);
    maxRetries: number;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, device: any, textureOptions?: {}): Texture;
}

/**
 * Texture parser for ktx files.
 */
declare class KtxParser extends TextureParser {
    constructor(registry: any);
    maxRetries: number;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, device: any, textureOptions?: {}): Texture;
    parse(data: any): {
        format: any;
        width: number;
        height: number;
        levels: any[][];
        cubemap: boolean;
    };
}

/**
 * Texture parser for ktx2 files.
 */
declare class Ktx2Parser extends TextureParser {
    constructor(registry: any, device: any);
    maxRetries: number;
    device: any;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, device: any, textureOptions?: {}): Texture;
    parse(arraybuffer: any, url: any, callback: any, asset: any): any;
}

/**
 * Parser for basis files.
 */
declare class BasisParser extends TextureParser {
    constructor(registry: any, device: any);
    device: any;
    maxRetries: number;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, device: any, textureOptions?: {}): Texture;
}

/**
 * Texture parser for hdr files.
 */
declare class HdrParser extends TextureParser {
    constructor(registry: any);
    maxRetries: number;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, device: any, textureOptions?: {}): Texture;
    parse(data: any): {
        width: number;
        height: number;
        levels: Uint8Array<any>[];
    };
    _readPixels(readStream: any, width: any, height: any, flipY: any): Uint8Array<any>;
    _readPixelsFlat(readStream: any, width: any, height: any): Uint8Array<any>;
}

/**
 * Resource handler used for loading 2D and 3D {@link Texture} resources.
 *
 * @category Graphics
 */
declare class TextureHandler extends ResourceHandler {
    /**
     * Create a new TextureHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _device: GraphicsDevice;
    _assets: AssetRegistry;
    imgParser: ImgParser;
    parsers: {
        dds: DdsParser;
        ktx: KtxParser;
        ktx2: Ktx2Parser;
        basis: BasisParser;
        hdr: HdrParser;
    };
    set crossOrigin(value: string);
    get crossOrigin(): string;
    _getUrlWithoutParams(url: any): any;
    _getParser(url: any): any;
    _getTextureOptions(asset: any): {
        profilerHint: number;
    };
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, asset: any): any;
    patch(asset: any, assets: any): void;
}

/**
 * Resource handler used for loading {@link TextureAtlas} resources.
 *
 * @category Graphics
 */
declare class TextureAtlasHandler extends ResourceHandler {
    /**
     * Create a new TextureAtlasHandler instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    _loader: ResourceLoader;
    load(url: any, callback: any): void;
    open(url: any, data: any, asset: any): TextureAtlas;
    patch(asset: any, assets: any): void;
    _onAssetChange(asset: any, attribute: any, value: any): void;
}

/**
 * Merge the contents of two objects into a single object.
 *
 * @param {object} target - The target object of the merge.
 * @param {object} ex - The object that is merged with target.
 * @returns {object} The target object.
 * @example
 * const A = {
 *     a: function () {
 *         console.log(this.a);
 *     }
 * };
 * const B = {
 *     b: function () {
 *         console.log(this.b);
 *     }
 * };
 *
 * extend(A, B);
 * A.a();
 * // logs "a"
 * A.b();
 * // logs "b"
 * @ignore
 */
declare function extend(target: object, ex: object): object;
/**
 * The engine revision number. This is the Git hash of the last commit made to the branch
 * from which the engine was built.
 */
declare const revision: "$_CURRENT_SDK_REVISION";
/**
 * The engine version number. This is in semantic versioning format (MAJOR.MINOR.PATCH).
 */
declare const version: "$_CURRENT_SDK_VERSION";

/**
 * Create a URI object from constituent parts.
 *
 * @param {object} options - Parts of the URI to build.
 * @param {string} [options.scheme] - The URI scheme (e.g. http).
 * @param {string} [options.authority] - The URI authority (e.g. `www.example.com`).
 * @param {string} [options.host] - Combination of scheme and authority (e.g. `http://www.example.com`).
 * @param {string} [options.path] - The URI path (e.g. /users/example).
 * @param {string} [options.hostpath] - Combination of scheme, authority and path (e.g. `http://www.example.com/users/example`).
 * @param {string} [options.query] - The query section, after the ?(e.g. `http://example.com?**key=value&another=123**`).
 * @param {string} [options.fragment] - The fragment section, after the # (e.g. `http://example.com#**fragment/data**`).
 * @returns {string} A URI string.
 * @ignore
 */
declare function createURI(options: {
    scheme?: string;
    authority?: string;
    host?: string;
    path?: string;
    hostpath?: string;
    query?: string;
    fragment?: string;
}): string;
/**
 * A URI object.
 *
 * @ignore
 */
declare class URI {
    /**
     * Create a new URI instance.
     *
     * @param {string} uri - URI string.
     */
    constructor(uri: string);
    /**
     * The scheme. (e.g. http).
     *
     * @type {string}
     */
    scheme: string;
    /**
     * The authority. (e.g. `www.example.com`).
     *
     * @type {string}
     */
    authority: string;
    /**
     * The path. (e.g. /users/example).
     *
     * @type {string}
     */
    path: string;
    /**
     * The query, the section after a ?. (e.g. search=value).
     *
     * @type {string}
     */
    query: string;
    /**
     * The fragment, the section after a #.
     *
     * @type {string}
     */
    fragment: string;
    /**
     * Convert URI back to string.
     *
     * @returns {string} The URI as a string.
     */
    toString(): string;
    /**
     * Returns the query parameters as an Object.
     *
     * @returns {object} The URI's query parameters converted to an Object.
     * @example
     * const s = "http://example.com?a=1&b=2&c=3";
     * const uri = new pc.URI(s);
     * const q = uri.getQuery();
     * console.log(q.a); // logs "1"
     * console.log(q.b); // logs "2"
     * console.log(q.c); // logs "3"
     */
    getQuery(): object;
    /**
     * Set the query section of the URI from a Object.
     *
     * @param {object} params - Key-Value pairs to encode into the query string.
     * @example
     * const s = "http://example.com";
     * const uri = new pc.URI(s);
     * uri.setQuery({
     *     "a": 1,
     *     "b": 2
     * });
     * console.log(uri.toString()); // logs "http://example.com?a=1&b=2
     */
    setQuery(params: object): void;
}

/**
 * Callback used by {@link Http#get}, {@link Http#post}, {@link Http#put}, {@link Http#del}, and
 * {@link Http#request}.
 */
type HttpResponseCallback = (err: number | string | Error | null, response?: any) => void;
declare const http: Http;
/**
 * @import { EventHandler } from '../../core/event-handler.js';
 */
/**
 * @callback HttpResponseCallback
 * Callback used by {@link Http#get}, {@link Http#post}, {@link Http#put}, {@link Http#del}, and
 * {@link Http#request}.
 * @param {number|string|Error|null} err - The error code, message, or exception in the case where
 * the request fails.
 * @param {any} [response] - The response data if no errors were encountered. Format depends on
 * response type: text, Object, ArrayBuffer, XML.
 * @returns {void}
 */
/**
 * Used to send and receive HTTP requests.
 */
declare class Http {
    static ContentType: {
        AAC: string;
        BASIS: string;
        BIN: string;
        DDS: string;
        FORM_URLENCODED: string;
        GIF: string;
        GLB: string;
        JPEG: string;
        JSON: string;
        MP3: string;
        MP4: string;
        OGG: string;
        OPUS: string;
        PNG: string;
        TEXT: string;
        WAV: string;
        XML: string;
    };
    static ResponseType: {
        TEXT: string;
        ARRAY_BUFFER: string;
        BLOB: string;
        DOCUMENT: string;
        JSON: string;
    };
    static binaryExtensions: string[];
    static retryDelay: number;
    /**
     * Perform an HTTP GET request to the given url with additional options such as headers,
     * retries, credentials, etc.
     *
     * @param {string} url - The URL to make the request to.
     * @param {object} options - Additional options.
     * @param {Object<string, string>} [options.headers] - HTTP headers to add to the request.
     * @param {boolean} [options.async] - Make the request asynchronously. Defaults to true.
     * @param {boolean} [options.cache] - If false, then add a timestamp to the request to prevent caching.
     * @param {boolean} [options.withCredentials] - Send cookies with this request. Defaults to false.
     * @param {string} [options.responseType] - Override the response type.
     * @param {Document|object} [options.postdata] - Data to send in the body of the request.
     * Some content types are handled automatically. If postdata is an XML Document, it is handled. If
     * the Content-Type header is set to 'application/json' then the postdata is JSON stringified.
     * Otherwise, by default, the data is sent as form-urlencoded.
     * @param {boolean} [options.retry] - If true then if the request fails it will be retried with an exponential backoff.
     * @param {number} [options.maxRetries] - If options.retry is true this specifies the maximum number of retries. Defaults to 5.
     * @param {number} [options.maxRetryDelay] - If options.retry is true this specifies the maximum amount of time to wait between retries in milliseconds. Defaults to 5000.
     * @param {EventHandler} [options.progress] - Object to use for firing progress events.
     * @param {HttpResponseCallback} callback - The callback used when the response has returned. Passed (err, data)
     * where data is the response (format depends on response type: text, Object, ArrayBuffer, XML) and
     * err is the error code.
     * @example
     * pc.http.get("http://example.com/", {
     *     "retry": true,
     *     "maxRetries": 5
     * }, (err, response) => {
     *     console.log(response);
     * });
     * @returns {XMLHttpRequest} The request object.
     */
    get(url: string, options: {
        headers?: {
            [x: string]: string;
        };
        async?: boolean;
        cache?: boolean;
        withCredentials?: boolean;
        responseType?: string;
        postdata?: Document | object;
        retry?: boolean;
        maxRetries?: number;
        maxRetryDelay?: number;
        progress?: EventHandler;
    }, callback: HttpResponseCallback): XMLHttpRequest;
    /**
     * Perform an HTTP POST request to the given url with additional options such as headers,
     * retries, credentials, etc.
     *
     * @param {string} url - The URL to make the request to.
     * @param {object} data - Data to send in the body of the request.
     * Some content types are handled automatically. If postdata is an XML Document, it is handled.
     * If the Content-Type header is set to 'application/json' then the postdata is JSON
     * stringified. Otherwise, by default, the data is sent as form-urlencoded.
     * @param {object} options - Additional options.
     * @param {Object<string, string>} [options.headers] - HTTP headers to add to the request.
     * @param {boolean} [options.async] - Make the request asynchronously. Defaults to true.
     * @param {boolean} [options.cache] - If false, then add a timestamp to the request to prevent caching.
     * @param {boolean} [options.withCredentials] - Send cookies with this request. Defaults to false.
     * @param {string} [options.responseType] - Override the response type.
     * @param {boolean} [options.retry] - If true then if the request fails it will be retried with an exponential backoff.
     * @param {number} [options.maxRetries] - If options.retry is true this specifies the maximum
     * number of retries. Defaults to 5.
     * @param {number} [options.maxRetryDelay] - If options.retry is true this specifies the
     * maximum amount of time to wait between retries in milliseconds. Defaults to 5000.
     * @param {HttpResponseCallback} callback - The callback used when the response has returned.
     * Passed (err, data) where data is the response (format depends on response type: text,
     * Object, ArrayBuffer, XML) and err is the error code.
     * @example
     * pc.http.post("http://example.com/", {
     *     "name": "Alex"
     * }, {
     *     "retry": true,
     *     "maxRetries": 5
     * }, (err, response) => {
     *     console.log(response);
     * });
     * @returns {XMLHttpRequest} The request object.
     */
    post(url: string, data: object, options: {
        headers?: {
            [x: string]: string;
        };
        async?: boolean;
        cache?: boolean;
        withCredentials?: boolean;
        responseType?: string;
        retry?: boolean;
        maxRetries?: number;
        maxRetryDelay?: number;
    }, callback: HttpResponseCallback): XMLHttpRequest;
    /**
     * Perform an HTTP PUT request to the given url with additional options such as headers,
     * retries, credentials, etc.
     *
     * @param {string} url - The URL to make the request to.
     * @param {Document|object} data - Data to send in the body of the request. Some content types
     * are handled automatically. If postdata is an XML Document, it is handled. If the
     * Content-Type header is set to 'application/json' then the postdata is JSON stringified.
     * Otherwise, by default, the data is sent as form-urlencoded.
     * @param {object} options - Additional options.
     * @param {Object<string, string>} [options.headers] - HTTP headers to add to the request.
     * @param {boolean} [options.async] - Make the request asynchronously. Defaults to true.
     * @param {boolean} [options.cache] - If false, then add a timestamp to the request to prevent caching.
     * @param {boolean} [options.withCredentials] - Send cookies with this request. Defaults to false.
     * @param {string} [options.responseType] - Override the response type.
     * @param {boolean} [options.retry] - If true then if the request fails it will be retried with
     * an exponential backoff.
     * @param {number} [options.maxRetries] - If options.retry is true this specifies the maximum
     * number of retries. Defaults to 5.
     * @param {number} [options.maxRetryDelay] - If options.retry is true this specifies the
     * maximum amount of time to wait between retries in milliseconds. Defaults to 5000.
     * @param {HttpResponseCallback} callback - The callback used when the response has returned.
     * Passed (err, data) where data is the response (format depends on response type: text,
     * Object, ArrayBuffer, XML) and err is the error code.
     * @example
     * pc.http.put("http://example.com/", {
     *     "name": "Alex"
     * }, {
     *     "retry": true,
     *     "maxRetries": 5
     * }, (err, response) => {
     *     console.log(response);
     * });
     * @returns {XMLHttpRequest} The request object.
     */
    put(url: string, data: Document | object, options: {
        headers?: {
            [x: string]: string;
        };
        async?: boolean;
        cache?: boolean;
        withCredentials?: boolean;
        responseType?: string;
        retry?: boolean;
        maxRetries?: number;
        maxRetryDelay?: number;
    }, callback: HttpResponseCallback): XMLHttpRequest;
    /**
     * Perform an HTTP DELETE request to the given url with additional options such as headers,
     * retries, credentials, etc.
     *
     * @param {string} url - The URL to make the request to.
     * @param {object} options - Additional options.
     * @param {Object<string, string>} [options.headers] - HTTP headers to add to the request.
     * @param {boolean} [options.async] - Make the request asynchronously. Defaults to true.
     * @param {boolean} [options.cache] - If false, then add a timestamp to the request to prevent caching.
     * @param {boolean} [options.withCredentials] - Send cookies with this request. Defaults to false.
     * @param {string} [options.responseType] - Override the response type.
     * @param {Document|object} [options.postdata] - Data to send in the body of the request.
     * Some content types are handled automatically. If postdata is an XML Document, it is handled.
     * If the Content-Type header is set to 'application/json' then the postdata is JSON
     * stringified. Otherwise, by default, the data is sent as form-urlencoded.
     * @param {boolean} [options.retry] - If true then if the request fails it will be retried with
     * an exponential backoff.
     * @param {number} [options.maxRetries] - If options.retry is true this specifies the maximum
     * number of retries. Defaults to 5.
     * @param {number} [options.maxRetryDelay] - If options.retry is true this specifies the
     * maximum amount of time to wait between retries in milliseconds. Defaults to 5000.
     * @param {HttpResponseCallback} callback - The callback used when the response has returned.
     * Passed (err, data) where data is the response (format depends on response type: text,
     * Object, ArrayBuffer, XML) and err is the error code.
     * @example
     * pc.http.del("http://example.com/", {
     *     "retry": true,
     *     "maxRetries": 5
     * }, (err, response) => {
     *     console.log(response);
     * });
     * @returns {XMLHttpRequest} The request object.
     */
    del(url: string, options: {
        headers?: {
            [x: string]: string;
        };
        async?: boolean;
        cache?: boolean;
        withCredentials?: boolean;
        responseType?: string;
        postdata?: Document | object;
        retry?: boolean;
        maxRetries?: number;
        maxRetryDelay?: number;
    }, callback: HttpResponseCallback): XMLHttpRequest;
    /**
     * Make a general purpose HTTP request with additional options such as headers, retries,
     * credentials, etc.
     *
     * @param {string} method - The HTTP method "GET", "POST", "PUT", "DELETE".
     * @param {string} url - The url to make the request to.
     * @param {object} options - Additional options.
     * @param {Object<string, string>} [options.headers] - HTTP headers to add to the request.
     * @param {boolean} [options.async] - Make the request asynchronously. Defaults to true.
     * @param {boolean} [options.cache] - If false, then add a timestamp to the request to prevent caching.
     * @param {boolean} [options.withCredentials] - Send cookies with this request. Defaults to false.
     * @param {boolean} [options.retry] - If true then if the request fails it will be retried with
     * an exponential backoff.
     * @param {number} [options.maxRetries] - If options.retry is true this specifies the maximum
     * number of retries. Defaults to 5.
     * @param {number} [options.maxRetryDelay] - If options.retry is true this specifies the
     * maximum amount of time to wait between retries in milliseconds. Defaults to 5000.
     * @param {string} [options.responseType] - Override the response type.
     * @param {Document|object} [options.postdata] - Data to send in the body of the request.
     * Some content types are handled automatically. If postdata is an XML Document, it is handled.
     * If the Content-Type header is set to 'application/json' then the postdata is JSON
     * stringified. Otherwise, by default, the data is sent as form-urlencoded.
     * @param {HttpResponseCallback} callback - The callback used when the response has returned.
     * Passed (err, data) where data is the response (format depends on response type: text,
     * Object, ArrayBuffer, XML) and err is the error code.
     * @example
     * pc.http.request("get", "http://example.com/", {
     *     "retry": true,
     *     "maxRetries": 5
     * }, (err, response) => {
     *     console.log(response);
     * });
     * @returns {XMLHttpRequest} The request object.
     */
    request(method: string, url: string, options: {
        headers?: {
            [x: string]: string;
        };
        async?: boolean;
        cache?: boolean;
        withCredentials?: boolean;
        retry?: boolean;
        maxRetries?: number;
        maxRetryDelay?: number;
        responseType?: string;
        postdata?: Document | object;
    }, callback: HttpResponseCallback): XMLHttpRequest;
    _guessResponseType(url: any): string;
    _isBinaryContentType(contentType: any): boolean;
    _isBinaryResponseType(responseType: any): boolean;
    _onReadyStateChange(method: any, url: any, options: any, xhr: any): void;
    _onSuccess(method: any, url: any, options: any, xhr: any): void;
    _onError(method: any, url: any, options: any, xhr: any): void;
}

/**
 * Generates normal information from the specified positions and triangle indices.
 *
 * @param {number[]} positions - An array of 3-dimensional vertex positions.
 * @param {number[]} indices - An array of triangle indices.
 * @returns {number[]} An array of 3-dimensional vertex normals.
 * @example
 * const normals = pc.calculateNormals(positions, indices);
 * @category Graphics
 */
declare function calculateNormals(positions: number[], indices: number[]): number[];
/**
 * Generates tangent information from the specified positions, normals, texture coordinates and
 * triangle indices.
 *
 * @param {number[]} positions - An array of 3-dimensional vertex positions.
 * @param {number[]} normals - An array of 3-dimensional vertex normals.
 * @param {number[]} uvs - An array of 2-dimensional vertex texture coordinates.
 * @param {number[]} indices - An array of triangle indices.
 * @returns {number[]} An array of 3-dimensional vertex tangents.
 * @example
 * const tangents = pc.calculateTangents(positions, normals, uvs, indices);
 * @category Graphics
 */
declare function calculateTangents(positions: number[], normals: number[], uvs: number[], indices: number[]): number[];

/**
 * @import { BindGroupFormat } from './bind-group-format.js'
 * @import { GraphicsDevice } from './graphics-device.js'
 * @import { UniformBufferFormat } from './uniform-buffer-format.js'
 * @import { VertexFormat } from './vertex-format.js'
 */
/**
 * Options to drive shader processing to add support for bind groups and uniform buffers.
 *
 * @ignore
 */
declare class ShaderProcessorOptions {
    /**
     * Constructs shader processing options, used to process the shader for uniform buffer support.
     *
     * @param {UniformBufferFormat} [viewUniformFormat] - Format of the uniform buffer.
     * @param {BindGroupFormat} [viewBindGroupFormat] - Format of the bind group.
     * @param {VertexFormat} [vertexFormat] - Format of the vertex buffer.
     */
    constructor(viewUniformFormat?: UniformBufferFormat, viewBindGroupFormat?: BindGroupFormat, vertexFormat?: VertexFormat);
    /** @type {UniformBufferFormat[]} */
    uniformFormats: UniformBufferFormat[];
    /** @type {BindGroupFormat[]} */
    bindGroupFormats: BindGroupFormat[];
    /** @type {VertexFormat[]} */
    vertexFormat: VertexFormat[];
    /**
     * Get the bind group index for the uniform name.
     *
     * @param {string} name - The name of the uniform.
     * @returns {boolean} - Returns true if the uniform exists, false otherwise.
     */
    hasUniform(name: string): boolean;
    /**
     * Get the bind group texture slot for the texture uniform name.
     *
     * @param {string} name - The name of the texture uniform.
     * @returns {boolean} - Returns true if the texture uniform exists, false otherwise.
     */
    hasTexture(name: string): boolean;
    getVertexElement(semantic: any): any;
    /**
     * Generate unique key representing the processing options.
     *
     * @param {GraphicsDevice} device - The device.
     * @returns {string} - Returns the key.
     */
    generateKey(device: GraphicsDevice): string;
}

declare class ShaderUtils {
    /**
     * Creates a shader. When the active graphics device is WebGL, the provided GLSL vertex and
     * fragment source code is used. For WebGPU, if WGSL vertex and fragment source code is
     * supplied, it is used directly; otherwise, the system automatically translates the provided
     * GLSL code into WGSL. In the case of GLSL shaders, additional blocks are appended to both the
     * vertex and fragment source code to support extended features and maintain compatibility.
     * These additions include the shader version declaration, precision qualifiers, and commonly
     * used extensions, and therefore should be excluded from the user-supplied GLSL source.
     * Note: The shader has access to all registered shader chunks via the `#include` directive.
     * Any provided includes will be applied as overrides on top of those.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {object} options - Object for passing optional arguments.
     * @param {string} options.uniqueName - Unique name for the shader. If a shader with this name
     * already exists, it will be returned instead of a new shader instance.
     * @param {Object<string, string>} options.attributes - Object detailing the mapping of vertex
     * shader attribute names to semantics SEMANTIC_*. This enables the engine to match vertex
     * buffer data to the shader attributes.
     * @param {boolean} [options.useTransformFeedback] - Whether to use transform feedback. Defaults
     * to false. Only supported by WebGL.
     * @param {string} [options.vertexChunk] - The name of the vertex shader chunk to use.
     * @param {string} [options.vertexGLSL] - The vertex shader code in GLSL. Ignored if vertexChunk
     * is provided.
     * @param {string} [options.vertexWGSL] - The vertex shader code in WGSL. Ignored if vertexChunk
     * is provided.
     * @param {string} [options.fragmentChunk] - The name of the fragment shader chunk to use.
     * @param {string} [options.fragmentGLSL] - The fragment shader code in GLSL. Ignored if
     * fragmentChunk is provided.
     * @param {string} [options.fragmentWGSL] - The fragment shader code in WGSL. Ignored if
     * fragmentChunk is provided.
     * @param {Map<string, string>} [options.vertexIncludes] - A map containing key-value pairs of
     * include names and their content. These are used for resolving #include directives in the
     * vertex shader source.
     * @param {Map<string, string>} [options.vertexDefines] - A map containing key-value pairs of
     * define names and their values. These are used for resolving #ifdef style of directives in the
     * vertex code.
     * @param {Map<string, string>} [options.fragmentIncludes] - A map containing key-value pairs
     * of include names and their content. These are used for resolving #include directives in the
     * fragment shader source.
     * @param {Map<string, string>} [options.fragmentDefines] - A map containing key-value pairs of
     * define names and their values. These are used for resolving #ifdef style of directives in the
     * fragment code.
     * @param {string | string[]} [options.fragmentOutputTypes] - Fragment shader output types,
     * which default to vec4. Passing a string will set the output type for all color attachments.
     * Passing an array will set the output type for each color attachment.
     * @returns {Shader} The newly created shader.
     */
    static createShader(device: GraphicsDevice, options: {
        uniqueName: string;
        attributes: {
            [x: string]: string;
        };
        useTransformFeedback?: boolean;
        vertexChunk?: string;
        vertexGLSL?: string;
        vertexWGSL?: string;
        fragmentChunk?: string;
        fragmentGLSL?: string;
        fragmentWGSL?: string;
        vertexIncludes?: Map<string, string>;
        vertexDefines?: Map<string, string>;
        fragmentIncludes?: Map<string, string>;
        fragmentDefines?: Map<string, string>;
        fragmentOutputTypes?: string | string[];
    }): Shader;
    /**
     * Create a map of defines used for shader generation for a material.
     *
     * @param {Material} material - The material to create the shader defines for.
     * @param {ShaderVariantParams} params - The shader variant parameters.
     * @returns {Map<string, string>} The map of shader defines.
     * @ignore
     */
    static getCoreDefines(material: Material, params: ShaderVariantParams): Map<string, string>;
    /**
     * Process shader using shader processing options, utilizing the cache of the ProgramLibrary.
     *
     * @param {Shader} shader - The shader to be processed.
     * @param {ShaderProcessorOptions} processingOptions - The shader processing options.
     * @returns {Shader} The processed shader.
     * @ignore
     */
    static processShader(shader: Shader, processingOptions: ShaderProcessorOptions): Shader;
    /**
     * Add defines required for correct screenDepthPS chunk functionality for the given camera
     * shader parameters.
     *
     * @param {GraphicsDevice} device - The graphics device.
     * @param {CameraShaderParams} cameraShaderParams - The camera shader parameters.
     * @ignore
     */
    static addScreenDepthChunkDefines(device: GraphicsDevice, cameraShaderParams: CameraShaderParams, defines: any): void;
}
declare function createShader(device: any, vsName: any, fsName: any, useTransformFeedback?: boolean, shaderDefinitionOptions?: {}): void;
declare function createShaderFromCode(device: any, vsCode: any, fsCode: any, uniqueName: any, attributes: any, useTransformFeedback?: boolean, shaderDefinitionOptions?: {}): Shader;

/**
 * Initialize the Draco mesh decoder.
 *
 * @param {object} [config] - The Draco decoder configuration.
 * @param {string} [config.jsUrl] - URL of glue script.
 * @param {string} [config.wasmUrl] - URL of the wasm module.
 * @param {number} [config.numWorkers] - Number of workers to use for decoding (default is 1).
 * @param {boolean} [config.lazyInit] - Wait for first decode request before initializing workers
 * (default is false). Otherwise initialize workers immediately.
 */
declare function dracoInitialize(config?: {
    jsUrl?: string;
    wasmUrl?: string;
    numWorkers?: number;
    lazyInit?: boolean;
}): void;
/**
 * Enqueue a buffer for decoding.
 *
 * @param {ArrayBuffer} buffer - The draco data to decode.
 * @param {Function} callback - Callback function to receive decoded result.
 * @returns {boolean} True if the draco worker was initialized and false otherwise.
 */
declare function dracoDecode(buffer: ArrayBuffer, callback: Function): boolean;

declare class GlbContainerParser {
    constructor(device: any, assets: any, maxRetries: any);
    _device: any;
    _assets: any;
    _defaultMaterial: StandardMaterial;
    maxRetries: any;
    _getUrlWithoutParams(url: any): any;
    load(url: any, callback: any, asset: any): void;
    open(url: any, data: any, asset: any): any;
    patch(asset: any, assets: any): void;
}

/**
 * @import { AppBase } from '../app-base.js'
 * @import { Asset } from '../asset/asset.js'
 * @import { Entity } from '../entity.js'
 * @import { MeshInstance } from '../../scene/mesh-instance.js'
 * @import { ResourceHandlerCallback } from './handler.js'
 */
/**
 * Container for a list of animations, textures, materials, renders and a model.
 *
 * @property {Asset[]} renders An array of the Render assets.
 * @property {Asset[]} materials An array of {@link Material} and/or {@link StandardMaterial} assets.
 * @property {Asset[]} textures An array of the {@link Texture} assets.
 * @property {Asset[]} animations An array of the {@link Animation} assets.
 * @interface
 * @category Graphics
 */
declare class ContainerResource {
    /**
     * Instantiates an entity with a model component.
     *
     * @param {object} [options] - The initialization data for the model component type
     * {@link ModelComponent}.
     * @returns {Entity} A single entity with a model component. Model component internally
     * contains a hierarchy based on {@link GraphNode}.
     * @example
     * // load a glb file and instantiate an entity with a model component based on it
     * app.assets.loadFromUrl("statue.glb", "container", (err, asset) => {
     *     const entity = asset.resource.instantiateModelEntity({
     *         castShadows: true
     *     });
     *     app.root.addChild(entity);
     * });
     */
    instantiateModelEntity(options?: object): Entity;
    /**
     * Instantiates an entity with a render component.
     *
     * @param {object} [options] - The initialization data for the render component type
     * {@link RenderComponent}.
     * @returns {Entity} A hierarchy of entities with render components on entities containing
     * renderable geometry.
     * @example
     * // load a glb file and instantiate an entity with a render component based on it
     * app.assets.loadFromUrl("statue.glb", "container", (err, asset) => {
     *     const entity = asset.resource.instantiateRenderEntity({
     *         castShadows: true
     *     });
     *     app.root.addChild(entity);
     *
     *     // find all render components containing mesh instances, and change blend mode on their materials
     *     const renders = entity.findComponents("render");
     *     renders.forEach((render) => {
     *         render.meshInstances.forEach((meshInstance) => {
     *             meshInstance.material.blendType = pc.BLEND_MULTIPLICATIVE;
     *             meshInstance.material.update();
     *         });
     *     });
     * });
     */
    instantiateRenderEntity(options?: object): Entity;
    /**
     * Queries the list of available material variants.
     *
     * @returns {string[]} An array of variant names.
     */
    getMaterialVariants(): string[];
    /**
     * Applies a material variant to an entity hierarchy.
     *
     * @param {Entity} entity - The entity root to which material variants will be applied.
     * @param {string} [name] - The name of the variant, as queried from getMaterialVariants, if
     * null the variant will be reset to the default.
     * @example
     * // load a glb file and instantiate an entity with a render component based on it
     * app.assets.loadFromUrl("statue.glb", "container", (err, asset) => {
     *     const entity = asset.resource.instantiateRenderEntity({
     *         castShadows: true
     *     });
     *     app.root.addChild(entity);
     *     const materialVariants = asset.resource.getMaterialVariants();
     *     asset.resource.applyMaterialVariant(entity, materialVariants[0]);
     * });
     */
    applyMaterialVariant(entity: Entity, name?: string): void;
    /**
     * Applies a material variant to a set of mesh instances. Compared to the applyMaterialVariant,
     * this method allows for setting the variant on a specific set of mesh instances instead of the
     * whole entity.
     *
     * @param {MeshInstance[]} instances - An array of mesh instances.
     * @param {string} [name] - The name of the variant, as queried by getMaterialVariants. If null,
     * the variant will be reset to the default.
     * @example
     * // load a glb file and instantiate an entity with a render component based on it
     * app.assets.loadFromUrl("statue.glb", "container", (err, asset) => {
     *     const entity = asset.resource.instantiateRenderEntity({
     *         castShadows: true
     *     });
     *     app.root.addChild(entity);
     *     const materialVariants = asset.resource.getMaterialVariants();
     *     const renders = entity.findComponents("render");
     *     for (let i = 0; i < renders.length; i++) {
     *         const renderComponent = renders[i];
     *         asset.resource.applyMaterialVariantInstances(renderComponent.meshInstances, materialVariants[0]);
     *     }
     * });
     */
    applyMaterialVariantInstances(instances: MeshInstance[], name?: string): void;
}
/**
 * Loads files that contain multiple resources. For example glTF files can contain textures, models
 * and animations.
 *
 * For glTF files, the asset options object can be used to pass load time callbacks for handling
 * the various resources at different stages of loading. The table below lists the resource types
 * and the corresponding supported process functions.
 *
 * | resource   | preprocess | process | processAsync | postprocess |
 * | ---------- | :--------: | :-----: | :----------: | :---------: |
 * | global     |      √     |         |              |      √      |
 * | node       |      √     |    √    |              |      √      |
 * | light      |      √     |    √    |              |      √      |
 * | camera     |      √     |    √    |              |      √      |
 * | animation  |      √     |         |              |      √      |
 * | material   |      √     |    √    |              |      √      |
 * | image      |      √     |         |      √       |      √      |
 * | texture    |      √     |         |      √       |      √      |
 * | buffer     |      √     |         |      √       |      √      |
 * | bufferView |      √     |         |      √       |      √      |
 *
 * Additional options that can be passed for glTF files:
 * [options.morphPreserveData] - When true, the morph target keeps its data passed using the options,
 * allowing the clone operation.
 * [options.morphPreferHighPrecision] - When true, high precision storage for morph targets should
 * be preferred. This is faster to create and allows higher precision, but takes more memory and
 * might be slower to render. Defaults to false.
 * [options.skipMeshes] - When true, the meshes from the container are not created. This can be
 * useful if you only need access to textures or animations and similar.
 *
 * For example, to receive a texture preprocess callback:
 *
 * ```javascript
 * const containerAsset = new pc.Asset(filename, 'container', { url: url, filename: filename }, null, {
 *     texture: {
 *         preprocess: (gltfTexture) => {
 *             console.log("texture preprocess");
 *         }
 *     }
 * });
 * ```
 *
 * @category Graphics
 */
declare class ContainerHandler extends ResourceHandler {
    /**
     * Create a new ContainerResource instance.
     *
     * @param {AppBase} app - The running {@link AppBase}.
     * @ignore
     */
    constructor(app: AppBase);
    glbContainerParser: GlbContainerParser;
    parsers: {};
    set maxRetries(value: any);
    get maxRetries(): any;
    /**
     * @param {string} url - The resource URL.
     * @returns {string} The URL with query parameters removed.
     * @private
     */
    private _getUrlWithoutParams;
    /**
     * @param {string} url - The resource URL.
     * @returns {*} A suitable parser to parse the resource.
     * @private
     */
    private _getParser;
}

/**
 * Create and register a new {@link ScriptType}. It returns new class type (constructor function),
 * which is auto-registered to {@link ScriptRegistry} using its name. This is the main interface to
 * create Script Types, to define custom logic using JavaScript, that is used to create interaction
 * for entities.
 *
 * @param {string} name - Unique Name of a Script Type. If a Script Type with the same name has
 * already been registered and the new one has a `swap` method defined in its prototype, then it
 * will perform hot swapping of existing Script Instances on entities using this new Script Type.
 * Note: There is a reserved list of names that cannot be used, such as list below as well as some
 * starting from `_` (underscore): system, entity, create, destroy, swap, move, scripts, onEnable,
 * onDisable, onPostStateChange, has, on, off, fire, once, hasEvent, worker.
 * @param {AppBase} [app] - Optional application handler, to choose which {@link ScriptRegistry}
 * to add a script to. By default it will use `Application.getApplication()` to get current
 * {@link AppBase}.
 * @returns {typeof ScriptType|null} A class type (constructor function) that inherits {@link ScriptType},
 * which the developer is meant to further extend by adding attributes and prototype methods.
 * Returns null if there was an error.
 * @example
 * var Turning = pc.createScript('turn');
 *
 * // define 'speed' attribute that is available in Editor UI
 * Turning.attributes.add('speed', {
 *     type: 'number',
 *     default: 180,
 *     placeholder: 'deg/s'
 * });
 *
 * // runs every tick
 * Turning.prototype.update = function (dt) {
 *     this.entity.rotate(0, this.speed * dt, 0);
 * };
 * @category Script
 */
declare function createScript(name: string, app?: AppBase): typeof ScriptType | null;
declare namespace createScript {
    export { reservedAttributes };
}
/**
 * Register a existing class type as a Script Type to {@link ScriptRegistry}. Useful when defining
 * a ES6 script class that extends {@link ScriptType} (see example).
 *
 * @param {typeof ScriptType} script - The existing class type (constructor function) to be
 * registered as a Script Type. Class must extend {@link ScriptType} (see example). Please note: A
 * class created using {@link createScript} is auto-registered, and should therefore not be pass
 * into {@link registerScript} (which would result in swapping out all related script instances).
 * @param {string} [name] - Optional unique name of the Script Type. By default it will use the
 * same name as the existing class. If a Script Type with the same name has already been registered
 * and the new one has a `swap` method defined in its prototype, then it will perform hot swapping
 * of existing Script Instances on entities using this new Script Type. Note: There is a reserved
 * list of names that cannot be used, such as list below as well as some starting from `_`
 * (underscore): system, entity, create, destroy, swap, move, scripts, onEnable, onDisable,
 * onPostStateChange, has, on, off, fire, once, hasEvent.
 * @param {AppBase} [app] - Optional application handler, to choose which {@link ScriptRegistry}
 * to register the script type to. By default it will use `Application.getApplication()` to get
 * current {@link AppBase}.
 * @example
 * // define a ES6 script class
 * class PlayerController extends pc.ScriptType {
 *
 *     initialize() {
 *         // called once on initialize
 *     }
 *
 *     update(dt) {
 *         // called each tick
 *     }
 * }
 *
 * // register the class as a script
 * pc.registerScript(PlayerController);
 *
 * // declare script attributes (Must be after pc.registerScript())
 * PlayerController.attributes.add('attribute1', {type: 'number'});
 * @category Script
 */
declare function registerScript(script: typeof ScriptType, name?: string, app?: AppBase): void;
declare function getReservedScriptNames(): Set<string>;

declare const reservedAttributes: {};

export { ABSOLUTE_URL, ACTION_GAMEPAD, ACTION_KEYBOARD, ACTION_MOUSE, ADDRESS_CLAMP_TO_EDGE, ADDRESS_MIRRORED_REPEAT, ADDRESS_REPEAT, AMBIENTSRC_AMBIENTSH, AMBIENTSRC_CONSTANT, AMBIENTSRC_ENVALATLAS, ANIM_BLEND_1D, ANIM_BLEND_2D_CARTESIAN, ANIM_BLEND_2D_DIRECTIONAL, ANIM_BLEND_DIRECT, ANIM_CONTROL_STATES, ANIM_EQUAL_TO, ANIM_GREATER_THAN, ANIM_GREATER_THAN_EQUAL_TO, ANIM_INTERRUPTION_NEXT, ANIM_INTERRUPTION_NEXT_PREV, ANIM_INTERRUPTION_NONE, ANIM_INTERRUPTION_PREV, ANIM_INTERRUPTION_PREV_NEXT, ANIM_LAYER_ADDITIVE, ANIM_LAYER_OVERWRITE, ANIM_LESS_THAN, ANIM_LESS_THAN_EQUAL_TO, ANIM_NOT_EQUAL_TO, ANIM_PARAMETER_BOOLEAN, ANIM_PARAMETER_FLOAT, ANIM_PARAMETER_INTEGER, ANIM_PARAMETER_TRIGGER, ANIM_STATE_ANY, ANIM_STATE_END, ANIM_STATE_START, ASPECT_AUTO, ASPECT_MANUAL, ASSET_ANIMATION, ASSET_AUDIO, ASSET_CONTAINER, ASSET_CSS, ASSET_CUBEMAP, ASSET_HTML, ASSET_IMAGE, ASSET_JSON, ASSET_MATERIAL, ASSET_MODEL, ASSET_SCRIPT, ASSET_SHADER, ASSET_TEXT, ASSET_TEXTURE, ASSET_TEXTUREATLAS, AXIS_KEY, AXIS_MOUSE_X, AXIS_MOUSE_Y, AXIS_PAD_L_X, AXIS_PAD_L_Y, AXIS_PAD_R_X, AXIS_PAD_R_Y, AnimBinder, AnimClip, AnimClipHandler, AnimComponent, AnimComponentLayer, AnimComponentSystem, AnimController, AnimCurve, AnimData, AnimEvaluator, AnimEvents, AnimSnapshot, AnimStateGraph, AnimStateGraphHandler, AnimTarget, AnimTrack, Animation, AnimationComponent, AnimationComponentSystem, AnimationHandler, AnimationKey, AnimationNode, AppBase, AppOptions, Application, Asset, AssetListLoader, AssetReference, AssetRegistry, AudioHandler, AudioListenerComponent, AudioListenerComponentSystem, BAKE_COLOR, BAKE_COLORDIR, BINDGROUP_MESH, BINDGROUP_MESH_UB, BINDGROUP_VIEW, BLENDEQUATION_ADD, BLENDEQUATION_MAX, BLENDEQUATION_MIN, BLENDEQUATION_REVERSE_SUBTRACT, BLENDEQUATION_SUBTRACT, BLENDMODE_CONSTANT, BLENDMODE_CONSTANT_ALPHA, BLENDMODE_CONSTANT_COLOR, BLENDMODE_DST_ALPHA, BLENDMODE_DST_COLOR, BLENDMODE_ONE, BLENDMODE_ONE_MINUS_CONSTANT, BLENDMODE_ONE_MINUS_CONSTANT_ALPHA, BLENDMODE_ONE_MINUS_CONSTANT_COLOR, BLENDMODE_ONE_MINUS_DST_ALPHA, BLENDMODE_ONE_MINUS_DST_COLOR, BLENDMODE_ONE_MINUS_SRC_ALPHA, BLENDMODE_ONE_MINUS_SRC_COLOR, BLENDMODE_SRC_ALPHA, BLENDMODE_SRC_ALPHA_SATURATE, BLENDMODE_SRC_COLOR, BLENDMODE_ZERO, BLEND_ADDITIVE, BLEND_ADDITIVEALPHA, BLEND_MAX, BLEND_MIN, BLEND_MULTIPLICATIVE, BLEND_MULTIPLICATIVE2X, BLEND_NONE, BLEND_NORMAL, BLEND_PREMULTIPLIED, BLEND_SCREEN, BLEND_SUBTRACTIVE, BLUR_BOX, BLUR_GAUSSIAN, BODYFLAG_KINEMATIC_OBJECT, BODYFLAG_NORESPONSE_OBJECT, BODYFLAG_STATIC_OBJECT, BODYGROUP_DEFAULT, BODYGROUP_DYNAMIC, BODYGROUP_ENGINE_1, BODYGROUP_ENGINE_2, BODYGROUP_ENGINE_3, BODYGROUP_KINEMATIC, BODYGROUP_NONE, BODYGROUP_STATIC, BODYGROUP_TRIGGER, BODYGROUP_USER_1, BODYGROUP_USER_2, BODYGROUP_USER_3, BODYGROUP_USER_4, BODYGROUP_USER_5, BODYGROUP_USER_6, BODYGROUP_USER_7, BODYGROUP_USER_8, BODYMASK_ALL, BODYMASK_NONE, BODYMASK_NOT_STATIC, BODYMASK_NOT_STATIC_KINEMATIC, BODYMASK_STATIC, BODYSTATE_ACTIVE_TAG, BODYSTATE_DISABLE_DEACTIVATION, BODYSTATE_DISABLE_SIMULATION, BODYSTATE_ISLAND_SLEEPING, BODYSTATE_WANTS_DEACTIVATION, BODYTYPE_DYNAMIC, BODYTYPE_KINEMATIC, BODYTYPE_STATIC, BUFFERUSAGE_COPY_DST, BUFFERUSAGE_COPY_SRC, BUFFERUSAGE_INDEX, BUFFERUSAGE_INDIRECT, BUFFERUSAGE_READ, BUFFERUSAGE_STORAGE, BUFFERUSAGE_UNIFORM, BUFFERUSAGE_VERTEX, BUFFERUSAGE_WRITE, BUFFER_DYNAMIC, BUFFER_GPUDYNAMIC, BUFFER_STATIC, BUFFER_STREAM, BUTTON_TRANSITION_MODE_SPRITE_CHANGE, BUTTON_TRANSITION_MODE_TINT, Batch, BatchGroup, BatchManager, BinaryHandler, BindGroupFormat, BindStorageBufferFormat, BindStorageTextureFormat, BindTextureFormat, BindUniformBufferFormat, BlendState, BoundingBox, BoundingSphere, BoxGeometry, Bundle, BundleHandler, BundleRegistry, ButtonComponent, ButtonComponentSystem, CHUNKAPI_1_51, CHUNKAPI_1_55, CHUNKAPI_1_56, CHUNKAPI_1_57, CHUNKAPI_1_58, CHUNKAPI_1_60, CHUNKAPI_1_62, CHUNKAPI_1_65, CHUNKAPI_1_70, CHUNKAPI_2_1, CHUNKAPI_2_3, CHUNKAPI_2_5, CHUNKAPI_2_6, CHUNKAPI_2_7, CHUNKAPI_2_8, CLEARFLAG_COLOR, CLEARFLAG_DEPTH, CLEARFLAG_STENCIL, CUBEFACE_NEGX, CUBEFACE_NEGY, CUBEFACE_NEGZ, CUBEFACE_POSX, CUBEFACE_POSY, CUBEFACE_POSZ, CUBEPROJ_BOX, CUBEPROJ_NONE, CULLFACE_BACK, CULLFACE_FRONT, CULLFACE_FRONTANDBACK, CULLFACE_NONE, CURVE_LINEAR, CURVE_SMOOTHSTEP, CURVE_SPLINE, CURVE_STEP, Camera, CameraComponent, CameraComponentSystem, CameraFrame, CameraFrameOptions, CanvasFont, CapsuleGeometry, ChunkUtils, CollisionComponent, CollisionComponentSystem, Color, Component, ComponentSystem, ComponentSystemRegistry, Compute, ComputeRadixSort, ConeGeometry, ContactPoint, ContactResult, ContainerHandler, ContainerResource, Controller, CssHandler, CubemapHandler, Curve, CurveSet, CylinderGeometry, DETAILMODE_ADD, DETAILMODE_MAX, DETAILMODE_MIN, DETAILMODE_MUL, DETAILMODE_OVERLAY, DETAILMODE_SCREEN, DEVICETYPE_NULL, DEVICETYPE_WEBGL2, DEVICETYPE_WEBGPU, DEVICETYPE_WEBGPU_BARE, DISPLAYFORMAT_HDR, DISPLAYFORMAT_LDR, DISPLAYFORMAT_LDR_SRGB, DISTANCE_EXPONENTIAL, DISTANCE_INVERSE, DISTANCE_LINEAR, DITHER_BAYER8, DITHER_BLUENOISE, DITHER_IGNNOISE, DITHER_NONE, DefaultAnimBinder, DepthState, DomeGeometry, DrawCommands, DualGestureSource, ELEMENTTYPE_GROUP, ELEMENTTYPE_IMAGE, ELEMENTTYPE_TEXT, EMITTERSHAPE_BOX, EMITTERSHAPE_SPHERE, EVENT_CULL_END, EVENT_GAMEPADCONNECTED, EVENT_GAMEPADDISCONNECTED, EVENT_KEYDOWN, EVENT_KEYUP, EVENT_MOUSEDOWN, EVENT_MOUSEMOVE, EVENT_MOUSEUP, EVENT_MOUSEWHEEL, EVENT_POSTCULL, EVENT_POSTRENDER, EVENT_POSTRENDER_LAYER, EVENT_PRECULL, EVENT_PRERENDER, EVENT_PRERENDER_LAYER, EVENT_SELECT, EVENT_SELECTEND, EVENT_SELECTSTART, EVENT_TOUCHCANCEL, EVENT_TOUCHEND, EVENT_TOUCHMOVE, EVENT_TOUCHSTART, ElementComponent, ElementComponentSystem, ElementDragHelper, ElementInput, ElementInputEvent, ElementMouseEvent, ElementSelectEvent, ElementTouchEvent, Entity, EnvLighting, EventHandle, EventHandler, FILLMODE_FILL_WINDOW, FILLMODE_KEEP_ASPECT, FILLMODE_NONE, FILTER_LINEAR, FILTER_LINEAR_MIPMAP_LINEAR, FILTER_LINEAR_MIPMAP_NEAREST, FILTER_NEAREST, FILTER_NEAREST_MIPMAP_LINEAR, FILTER_NEAREST_MIPMAP_NEAREST, FITMODE_CONTAIN, FITMODE_COVER, FITMODE_STRETCH, FITTING_BOTH, FITTING_NONE, FITTING_SHRINK, FITTING_STRETCH, FOG_EXP, FOG_EXP2, FOG_LINEAR, FOG_NONE, FONT_BITMAP, FONT_MSDF, FRESNEL_NONE, FRESNEL_SCHLICK, FRONTFACE_CCW, FRONTFACE_CW, FUNC_ALWAYS, FUNC_EQUAL, FUNC_GREATER, FUNC_GREATEREQUAL, FUNC_LESS, FUNC_LESSEQUAL, FUNC_NEVER, FUNC_NOTEQUAL, FloatPacking, FlyController, FocusController, FogParams, FolderHandler, Font, FontHandler, ForwardRenderer, FramePass, FramePassBloom, FramePassCameraFrame, FramePassColorGrab, FramePassDof, FramePassRadixSort, Frustum, GAMMA_NONE, GAMMA_SRGB, GIZMOAXIS_FACE, GIZMOAXIS_X, GIZMOAXIS_XY, GIZMOAXIS_XYZ, GIZMOAXIS_XZ, GIZMOAXIS_Y, GIZMOAXIS_YZ, GIZMOAXIS_Z, GIZMOSPACE_LOCAL, GIZMOSPACE_WORLD, GSPLATDATA_COMPACT, GSPLATDATA_LARGE, GSPLAT_DEBUG_HEATMAP, GSPLAT_DEBUG_LOD, GSPLAT_DEBUG_NONE, GSPLAT_DEBUG_SH_UPDATE, GSPLAT_FORWARD, GSPLAT_RENDERER_AUTO, GSPLAT_RENDERER_COMPUTE, GSPLAT_RENDERER_RASTER_CPU_SORT, GSPLAT_RENDERER_RASTER_GPU_SORT, GSPLAT_SHADOW, GSPLAT_STREAM_INSTANCE, GSPLAT_STREAM_RESOURCE, GSplatComponent, GSplatComponentSystem, GSplatContainer, GSplatData, GSplatFormat, GSplatHandler, GSplatInstance, GSplatProcessor, GSplatResource, GSplatResourceBase, GSplatSogData, GSplatSogResource, GamePads, GamepadSource, Geometry, Gizmo, GltfExporter, GraphNode, GraphicsDevice, HierarchyHandler, HtmlHandler, Http, I18n, INDEXFORMAT_UINT16, INDEXFORMAT_UINT32, INDEXFORMAT_UINT8, INTERPOLATION_CUBIC, INTERPOLATION_LINEAR, INTERPOLATION_STEP, ImageElement, IndexBuffer, IndexedList, InputConsumer, InputController, InputDelta, InputFrame, InputSource, JointComponent, JointComponentSystem, JsonHandler, JsonStandardMaterialParser, KEY_0, KEY_1, KEY_2, KEY_3, KEY_4, KEY_5, KEY_6, KEY_7, KEY_8, KEY_9, KEY_A, KEY_ADD, KEY_ALT, KEY_B, KEY_BACKSPACE, KEY_BACK_SLASH, KEY_C, KEY_CAPS_LOCK, KEY_CLOSE_BRACKET, KEY_COMMA, KEY_CONTEXT_MENU, KEY_CONTROL, KEY_D, KEY_DECIMAL, KEY_DELETE, KEY_DIVIDE, KEY_DOWN, KEY_E, KEY_END, KEY_ENTER, KEY_EQUAL, KEY_ESCAPE, KEY_F, KEY_F1, KEY_F10, KEY_F11, KEY_F12, KEY_F2, KEY_F3, KEY_F4, KEY_F5, KEY_F6, KEY_F7, KEY_F8, KEY_F9, KEY_G, KEY_H, KEY_HOME, KEY_I, KEY_INSERT, KEY_J, KEY_K, KEY_L, KEY_LEFT, KEY_M, KEY_META, KEY_MULTIPLY, KEY_N, KEY_NUMPAD_0, KEY_NUMPAD_1, KEY_NUMPAD_2, KEY_NUMPAD_3, KEY_NUMPAD_4, KEY_NUMPAD_5, KEY_NUMPAD_6, KEY_NUMPAD_7, KEY_NUMPAD_8, KEY_NUMPAD_9, KEY_O, KEY_OPEN_BRACKET, KEY_P, KEY_PAGE_DOWN, KEY_PAGE_UP, KEY_PAUSE, KEY_PERIOD, KEY_PRINT_SCREEN, KEY_Q, KEY_R, KEY_RETURN, KEY_RIGHT, KEY_S, KEY_SEMICOLON, KEY_SEPARATOR, KEY_SHIFT, KEY_SLASH, KEY_SPACE, KEY_SUBTRACT, KEY_T, KEY_TAB, KEY_U, KEY_UP, KEY_V, KEY_W, KEY_WINDOWS, KEY_X, KEY_Y, KEY_Z, Kernel, Key, Keyboard, KeyboardEvent, KeyboardMouseSource, LAYERID_DEPTH, LAYERID_IMMEDIATE, LAYERID_SKYBOX, LAYERID_UI, LAYERID_WORLD, LAYER_GIZMO, LAYER_HUD, LAYER_WORLD, LIGHTFALLOFF_INVERSESQUARED, LIGHTFALLOFF_LINEAR, LIGHTSHAPE_DISK, LIGHTSHAPE_PUNCTUAL, LIGHTSHAPE_RECT, LIGHTSHAPE_SPHERE, LIGHTTYPE_COUNT, LIGHTTYPE_DIRECTIONAL, LIGHTTYPE_OMNI, LIGHTTYPE_POINT, LIGHTTYPE_SPOT, LIGHT_COLOR_DIVIDER, Layer, LayerComposition, LayoutCalculator, LayoutChildComponent, LayoutChildComponentSystem, LayoutGroupComponent, LayoutGroupComponentSystem, Light, LightComponent, LightComponentSystem, LightingParams, Lightmapper, LitMaterial, LitOptions, LitShaderOptions, LocalizedAsset, MASK_AFFECT_DYNAMIC, MASK_AFFECT_LIGHTMAPPED, MASK_BAKE, MOTION_FREE, MOTION_LIMITED, MOTION_LOCKED, MOUSEBUTTON_LEFT, MOUSEBUTTON_MIDDLE, MOUSEBUTTON_NONE, MOUSEBUTTON_RIGHT, Mat3, Mat4, Material, MaterialHandler, Mesh, MeshInstance, MiniStats, Model, ModelComponent, ModelComponentSystem, ModelHandler, Morph, MorphInstance, MorphTarget, Mouse, MouseEvent, MultiTouchSource, Node, NullGraphicsDevice, ORIENTATION_HORIZONTAL, ORIENTATION_VERTICAL, OrbitController, OrientedBox, OutlineRenderer, PAD_1, PAD_2, PAD_3, PAD_4, PAD_DOWN, PAD_FACE_1, PAD_FACE_2, PAD_FACE_3, PAD_FACE_4, PAD_LEFT, PAD_L_SHOULDER_1, PAD_L_SHOULDER_2, PAD_L_STICK_BUTTON, PAD_L_STICK_X, PAD_L_STICK_Y, PAD_RIGHT, PAD_R_SHOULDER_1, PAD_R_SHOULDER_2, PAD_R_STICK_BUTTON, PAD_R_STICK_X, PAD_R_STICK_Y, PAD_SELECT, PAD_START, PAD_UP, PAD_VENDOR, PARTICLEMODE_CPU, PARTICLEMODE_GPU, PARTICLEORIENTATION_EMITTER, PARTICLEORIENTATION_SCREEN, PARTICLEORIENTATION_WORLD, PARTICLESORT_DISTANCE, PARTICLESORT_NEWER_FIRST, PARTICLESORT_NONE, PARTICLESORT_OLDER_FIRST, PIXELFORMAT_111110F, PIXELFORMAT_A8, PIXELFORMAT_ASTC_4x4, PIXELFORMAT_ASTC_4x4_SRGB, PIXELFORMAT_ATC_RGB, PIXELFORMAT_ATC_RGBA, PIXELFORMAT_BC6F, PIXELFORMAT_BC6UF, PIXELFORMAT_BC7, PIXELFORMAT_BC7_SRGBA, PIXELFORMAT_BGRA8, PIXELFORMAT_DEPTH, PIXELFORMAT_DEPTH16, PIXELFORMAT_DEPTHSTENCIL, PIXELFORMAT_DXT1, PIXELFORMAT_DXT1_SRGB, PIXELFORMAT_DXT3, PIXELFORMAT_DXT3_SRGBA, PIXELFORMAT_DXT5, PIXELFORMAT_DXT5_SRGBA, PIXELFORMAT_ETC1, PIXELFORMAT_ETC2_RGB, PIXELFORMAT_ETC2_RGBA, PIXELFORMAT_ETC2_SRGB, PIXELFORMAT_ETC2_SRGBA, PIXELFORMAT_L8, PIXELFORMAT_L8_A8, PIXELFORMAT_LA8, PIXELFORMAT_PVRTC_2BPP_RGBA_1, PIXELFORMAT_PVRTC_2BPP_RGB_1, PIXELFORMAT_PVRTC_4BPP_RGBA_1, PIXELFORMAT_PVRTC_4BPP_RGB_1, PIXELFORMAT_R16F, PIXELFORMAT_R16I, PIXELFORMAT_R16U, PIXELFORMAT_R32F, PIXELFORMAT_R32I, PIXELFORMAT_R32U, PIXELFORMAT_R4_G4_B4_A4, PIXELFORMAT_R5_G5_B5_A1, PIXELFORMAT_R5_G6_B5, PIXELFORMAT_R8, PIXELFORMAT_R8I, PIXELFORMAT_R8U, PIXELFORMAT_R8_G8_B8, PIXELFORMAT_R8_G8_B8_A8, PIXELFORMAT_RG16F, PIXELFORMAT_RG16I, PIXELFORMAT_RG16U, PIXELFORMAT_RG32F, PIXELFORMAT_RG32I, PIXELFORMAT_RG32U, PIXELFORMAT_RG8, PIXELFORMAT_RG8I, PIXELFORMAT_RG8S, PIXELFORMAT_RG8U, PIXELFORMAT_RGB10A2, PIXELFORMAT_RGB10A2U, PIXELFORMAT_RGB16F, PIXELFORMAT_RGB32F, PIXELFORMAT_RGB565, PIXELFORMAT_RGB8, PIXELFORMAT_RGB9E5, PIXELFORMAT_RGBA16F, PIXELFORMAT_RGBA16I, PIXELFORMAT_RGBA16U, PIXELFORMAT_RGBA32F, PIXELFORMAT_RGBA32I, PIXELFORMAT_RGBA32U, PIXELFORMAT_RGBA4, PIXELFORMAT_RGBA5551, PIXELFORMAT_RGBA8, PIXELFORMAT_RGBA8I, PIXELFORMAT_RGBA8S, PIXELFORMAT_RGBA8U, PIXELFORMAT_SBGRA8, PIXELFORMAT_SRGB, PIXELFORMAT_SRGB8, PIXELFORMAT_SRGBA, PIXELFORMAT_SRGBA8, PRIMITIVE_LINELOOP, PRIMITIVE_LINES, PRIMITIVE_LINESTRIP, PRIMITIVE_POINTS, PRIMITIVE_TRIANGLES, PRIMITIVE_TRIFAN, PRIMITIVE_TRISTRIP, PROJECTION_ORTHOGRAPHIC, PROJECTION_PERSPECTIVE, ParticleEmitter, ParticleSystemComponent, ParticleSystemComponentSystem, Picker, Plane, PlaneGeometry, Pose, PostEffect, PostEffectQueue, ProgramLibrary, QuadRender, Quat, REFLECTIONSRC_CUBEMAP, REFLECTIONSRC_ENVATLAS, REFLECTIONSRC_ENVATLASHQ, REFLECTIONSRC_NONE, REFLECTIONSRC_SPHEREMAP, RENDERSTYLE_POINTS, RENDERSTYLE_SOLID, RENDERSTYLE_WIREFRAME, RESOLUTION_AUTO, RESOLUTION_FIXED, RIGIDBODY_ACTIVE_TAG, RIGIDBODY_CF_KINEMATIC_OBJECT, RIGIDBODY_CF_NORESPONSE_OBJECT, RIGIDBODY_CF_STATIC_OBJECT, RIGIDBODY_DISABLE_DEACTIVATION, RIGIDBODY_DISABLE_SIMULATION, RIGIDBODY_ISLAND_SLEEPING, RIGIDBODY_TYPE_DYNAMIC, RIGIDBODY_TYPE_KINEMATIC, RIGIDBODY_TYPE_STATIC, RIGIDBODY_WANTS_DEACTIVATION, Ray, RaycastResult, ReadStream, RenderComponent, RenderComponentSystem, RenderHandler, RenderPass, RenderPassCompose, RenderPassDepthAwareBlur, RenderPassDownsample, RenderPassForward, RenderPassPicker, RenderPassPrepass, RenderPassShaderQuad, RenderPassSsao, RenderPassTAA, RenderPassUpsample, RenderTarget, ResourceHandler, ResourceLoader, RigidBodyComponent, RigidBodyComponentSystem, RotateGizmo, SAMPLETYPE_DEPTH, SAMPLETYPE_FLOAT, SAMPLETYPE_INT, SAMPLETYPE_UINT, SAMPLETYPE_UNFILTERABLE_FLOAT, SCALEMODE_BLEND, SCALEMODE_NONE, SCROLLBAR_VISIBILITY_SHOW_ALWAYS, SCROLLBAR_VISIBILITY_SHOW_WHEN_REQUIRED, SCROLL_MODE_BOUNCE, SCROLL_MODE_CLAMP, SCROLL_MODE_INFINITE, SEMANTIC_ATTR0, SEMANTIC_ATTR1, SEMANTIC_ATTR10, SEMANTIC_ATTR11, SEMANTIC_ATTR12, SEMANTIC_ATTR13, SEMANTIC_ATTR14, SEMANTIC_ATTR15, SEMANTIC_ATTR2, SEMANTIC_ATTR3, SEMANTIC_ATTR4, SEMANTIC_ATTR5, SEMANTIC_ATTR6, SEMANTIC_ATTR7, SEMANTIC_ATTR8, SEMANTIC_ATTR9, SEMANTIC_BLENDINDICES, SEMANTIC_BLENDWEIGHT, SEMANTIC_COLOR, SEMANTIC_NORMAL, SEMANTIC_POSITION, SEMANTIC_TANGENT, SEMANTIC_TEXCOORD, SEMANTIC_TEXCOORD0, SEMANTIC_TEXCOORD1, SEMANTIC_TEXCOORD2, SEMANTIC_TEXCOORD3, SEMANTIC_TEXCOORD4, SEMANTIC_TEXCOORD5, SEMANTIC_TEXCOORD6, SEMANTIC_TEXCOORD7, SHADERDEF_BATCH, SHADERDEF_DIRLM, SHADERDEF_INSTANCING, SHADERDEF_LM, SHADERDEF_LMAMBIENT, SHADERDEF_MORPH_NORMAL, SHADERDEF_MORPH_POSITION, SHADERDEF_MORPH_TEXTURE_BASED_INT, SHADERDEF_NOSHADOW, SHADERDEF_SCREENSPACE, SHADERDEF_SKIN, SHADERDEF_TANGENTS, SHADERDEF_UV0, SHADERDEF_UV1, SHADERDEF_VCOLOR, SHADERLANGUAGE_GLSL, SHADERLANGUAGE_WGSL, SHADERPASS_ALBEDO, SHADERPASS_AO, SHADERPASS_EMISSION, SHADERPASS_FORWARD, SHADERPASS_GLOSS, SHADERPASS_LIGHTING, SHADERPASS_METALNESS, SHADERPASS_OPACITY, SHADERPASS_SPECULARITY, SHADERPASS_UV0, SHADERPASS_WORLDNORMAL, SHADERSTAGE_COMPUTE, SHADERSTAGE_FRAGMENT, SHADERSTAGE_VERTEX, SHADERTAG_MATERIAL, SHADER_DEPTH_PICK, SHADER_FORWARD, SHADER_PICK, SHADER_PREPASS, SHADER_SHADOW, SHADOWCAMERA_NAME, SHADOWUPDATE_NONE, SHADOWUPDATE_REALTIME, SHADOWUPDATE_THISFRAME, SHADOW_CASCADE_0, SHADOW_CASCADE_1, SHADOW_CASCADE_2, SHADOW_CASCADE_3, SHADOW_CASCADE_ALL, SHADOW_PCF1, SHADOW_PCF1_16F, SHADOW_PCF1_32F, SHADOW_PCF3, SHADOW_PCF3_16F, SHADOW_PCF3_32F, SHADOW_PCF5, SHADOW_PCF5_16F, SHADOW_PCF5_32F, SHADOW_PCSS_32F, SHADOW_VSM16, SHADOW_VSM32, SHADOW_VSM_16F, SHADOW_VSM_32F, SKYTYPE_BOX, SKYTYPE_DOME, SKYTYPE_INFINITE, SORTMODE_BACK2FRONT, SORTMODE_CUSTOM, SORTMODE_FRONT2BACK, SORTMODE_MANUAL, SORTMODE_MATERIALMESH, SORTMODE_NONE, SPECOCC_AO, SPECOCC_GLOSSDEPENDENT, SPECOCC_NONE, SPRITETYPE_ANIMATED, SPRITETYPE_SIMPLE, SPRITE_RENDERMODE_SIMPLE, SPRITE_RENDERMODE_SLICED, SPRITE_RENDERMODE_TILED, SSAOTYPE_COMBINE, SSAOTYPE_LIGHTING, SSAOTYPE_NONE, STENCILOP_DECREMENT, STENCILOP_DECREMENTWRAP, STENCILOP_INCREMENT, STENCILOP_INCREMENTWRAP, STENCILOP_INVERT, STENCILOP_KEEP, STENCILOP_REPLACE, STENCILOP_ZERO, ScaleGizmo, Scene, SceneHandler, SceneRegistry, SceneRegistryItem, SceneSettingsHandler, ScopeId, ScopeSpace, ScreenComponent, ScreenComponentSystem, Script, ScriptAttributes, ScriptComponent, ScriptComponentSystem, ScriptHandler, ScriptRegistry, ScriptType, ScrollViewComponent, ScrollViewComponentSystem, ScrollbarComponent, ScrollbarComponentSystem, Shader, ShaderChunks, ShaderHandler, ShaderMaterial, ShaderPass, ShaderUtils, SingleContactResult, SingleGestureSource, Skeleton, Skin, SkinBatchInstance, SkinInstance, Sky, SortedLoopArray, Sound, SoundComponent, SoundComponentSystem, SoundInstance, SoundInstance3d, SoundManager, SoundSlot, SphereGeometry, Sprite, SpriteAnimationClip, SpriteComponent, SpriteComponentSystem, SpriteHandler, StandardMaterial, StandardMaterialOptions, StencilParameters, StorageBuffer, TEXHINT_ASSET, TEXHINT_LIGHTMAP, TEXHINT_NONE, TEXHINT_SHADOWMAP, TEXPROPERTY_ADDRESS_U, TEXPROPERTY_ADDRESS_V, TEXPROPERTY_ADDRESS_W, TEXPROPERTY_ALL, TEXPROPERTY_ANISOTROPY, TEXPROPERTY_COMPARE_FUNC, TEXPROPERTY_COMPARE_ON_READ, TEXPROPERTY_MAG_FILTER, TEXPROPERTY_MIN_FILTER, TEXTUREDIMENSION_1D, TEXTUREDIMENSION_2D, TEXTUREDIMENSION_2D_ARRAY, TEXTUREDIMENSION_3D, TEXTUREDIMENSION_CUBE, TEXTUREDIMENSION_CUBE_ARRAY, TEXTURELOCK_NONE, TEXTURELOCK_READ, TEXTURELOCK_WRITE, TEXTUREPROJECTION_CUBE, TEXTUREPROJECTION_EQUIRECT, TEXTUREPROJECTION_NONE, TEXTUREPROJECTION_OCTAHEDRAL, TEXTURETYPE_DEFAULT, TEXTURETYPE_RGBE, TEXTURETYPE_RGBM, TEXTURETYPE_RGBP, TEXTURETYPE_SWIZZLEGGGR, TONEMAP_ACES, TONEMAP_ACES2, TONEMAP_FILMIC, TONEMAP_HEJL, TONEMAP_LINEAR, TONEMAP_NEUTRAL, TONEMAP_NONE, TRACEID_ASSETS, TRACEID_BINDGROUPFORMAT_ALLOC, TRACEID_BINDGROUP_ALLOC, TRACEID_COMPUTEPIPELINE_ALLOC, TRACEID_ELEMENT, TRACEID_GPU_TIMINGS, TRACEID_OCTREE_RESOURCES, TRACEID_PIPELINELAYOUT_ALLOC, TRACEID_RENDERPIPELINE_ALLOC, TRACEID_RENDER_ACTION, TRACEID_RENDER_FRAME, TRACEID_RENDER_FRAME_TIME, TRACEID_RENDER_PASS, TRACEID_RENDER_PASS_DETAIL, TRACEID_RENDER_QUEUE, TRACEID_RENDER_TARGET_ALLOC, TRACEID_SHADER_ALLOC, TRACEID_SHADER_COMPILE, TRACEID_TEXTURES, TRACEID_TEXTURE_ALLOC, TRACEID_VRAM_IB, TRACEID_VRAM_SB, TRACEID_VRAM_TEXTURE, TRACEID_VRAM_VB, TYPE_FLOAT16, TYPE_FLOAT32, TYPE_INT16, TYPE_INT32, TYPE_INT8, TYPE_UINT16, TYPE_UINT32, TYPE_UINT8, Tags, Template, TemplateHandler, TextElement, TextHandler, Texture, TextureAtlas, TextureAtlasHandler, TextureHandler, TextureUtils, TextureView, TorusGeometry, Touch, TouchDevice, TouchEvent, Tracing, TransformFeedback, TransformGizmo, TranslateGizmo, Tri, UNIFORMTYPE_BOOL, UNIFORMTYPE_BOOLARRAY, UNIFORMTYPE_BVEC2, UNIFORMTYPE_BVEC2ARRAY, UNIFORMTYPE_BVEC3, UNIFORMTYPE_BVEC3ARRAY, UNIFORMTYPE_BVEC4, UNIFORMTYPE_BVEC4ARRAY, UNIFORMTYPE_FLOAT, UNIFORMTYPE_FLOATARRAY, UNIFORMTYPE_INT, UNIFORMTYPE_INTARRAY, UNIFORMTYPE_ITEXTURE2D, UNIFORMTYPE_ITEXTURE2D_ARRAY, UNIFORMTYPE_ITEXTURE3D, UNIFORMTYPE_ITEXTURECUBE, UNIFORMTYPE_IVEC2, UNIFORMTYPE_IVEC2ARRAY, UNIFORMTYPE_IVEC3, UNIFORMTYPE_IVEC3ARRAY, UNIFORMTYPE_IVEC4, UNIFORMTYPE_IVEC4ARRAY, UNIFORMTYPE_MAT2, UNIFORMTYPE_MAT3, UNIFORMTYPE_MAT4, UNIFORMTYPE_MAT4ARRAY, UNIFORMTYPE_TEXTURE2D, UNIFORMTYPE_TEXTURE2D_ARRAY, UNIFORMTYPE_TEXTURE2D_SHADOW, UNIFORMTYPE_TEXTURE3D, UNIFORMTYPE_TEXTURECUBE, UNIFORMTYPE_TEXTURECUBE_SHADOW, UNIFORMTYPE_UINT, UNIFORMTYPE_UINTARRAY, UNIFORMTYPE_UTEXTURE2D, UNIFORMTYPE_UTEXTURE2D_ARRAY, UNIFORMTYPE_UTEXTURE3D, UNIFORMTYPE_UTEXTURECUBE, UNIFORMTYPE_UVEC2, UNIFORMTYPE_UVEC2ARRAY, UNIFORMTYPE_UVEC3, UNIFORMTYPE_UVEC3ARRAY, UNIFORMTYPE_UVEC4, UNIFORMTYPE_UVEC4ARRAY, UNIFORMTYPE_VEC2, UNIFORMTYPE_VEC2ARRAY, UNIFORMTYPE_VEC3, UNIFORMTYPE_VEC3ARRAY, UNIFORMTYPE_VEC4, UNIFORMTYPE_VEC4ARRAY, UNIFORM_BUFFER_DEFAULT_SLOT_NAME, UNUSED_UNIFORM_NAME, URI, UniformBufferFormat, UniformFormat, UsdzExporter, VIEW_CENTER, VIEW_LEFT, VIEW_RIGHT, Vec2, Vec3, Vec4, VertexBuffer, VertexFormat, VertexIterator, ViewCube, WORKBUFFER_UPDATE_ALWAYS, WORKBUFFER_UPDATE_AUTO, WORKBUFFER_UPDATE_ONCE, WasmModule, WebglGraphicsDevice, WebgpuGraphicsDevice, WorldClusters, XRDEPTHSENSINGFORMAT_F32, XRDEPTHSENSINGFORMAT_L8A8, XRDEPTHSENSINGFORMAT_R16U, XRDEPTHSENSINGUSAGE_CPU, XRDEPTHSENSINGUSAGE_GPU, XREYE_LEFT, XREYE_NONE, XREYE_RIGHT, XRHAND_LEFT, XRHAND_NONE, XRHAND_RIGHT, XRPAD_A, XRPAD_B, XRPAD_SQUEEZE, XRPAD_STICK_BUTTON, XRPAD_STICK_X, XRPAD_STICK_Y, XRPAD_TOUCHPAD_BUTTON, XRPAD_TOUCHPAD_X, XRPAD_TOUCHPAD_Y, XRPAD_TRIGGER, XRSPACE_BOUNDEDFLOOR, XRSPACE_LOCAL, XRSPACE_LOCALFLOOR, XRSPACE_UNBOUNDED, XRSPACE_VIEWER, XRTARGETRAY_GAZE, XRTARGETRAY_POINTER, XRTARGETRAY_SCREEN, XRTRACKABLE_MESH, XRTRACKABLE_PLANE, XRTRACKABLE_POINT, XRTYPE_AR, XRTYPE_INLINE, XRTYPE_VR, XrAnchor, XrAnchors, XrDomOverlay, XrFinger, XrHand, XrHitTest, XrHitTestSource, XrImageTracking, XrInput, XrInputSource, XrJoint, XrLightEstimation, XrManager, XrMeshDetection, XrPlane, XrPlaneDetection, XrTrackedImage, XrView, XrViews, ZoneComponent, ZoneComponentSystem, ambientSrcNames, app, basisInitialize, bindGroupNames, blendNames, calculateNormals, calculateTangents, createBox, createCapsule, createCone, createCylinder, createGraphicsDevice, createMesh, createPlane, createScript, createShader, createShaderFromCode, createSphere, createTorus, createURI, cubemaProjectionNames, ditherNames, dracoDecode, dracoInitialize, drawFullscreenQuad, drawQuadWithShader, extend, fresnelNames, gammaNames, getGlslShaderType, getPixelFormatArrayType, getReservedScriptNames, getTouchTargetCoords, getWgslShaderType, guid, http, indexFormatByteSize, isCompressedPixelFormat, isIntegerPixelFormat, isSrgbPixelFormat, lightFalloffNames, lightShapeNames, lightTypeNames, math, now, path, pixelFormatGammaToLinear, pixelFormatInfo, pixelFormatLinearToGamma, platform, primitiveGlslToWgslTypeMap, reflectionSrcNames, registerScript, reprojectTexture, requiresManualGamma, revision, script, semanticToLocation, shaderChunks, shadowTypeInfo, specularOcclusionNames, spriteRenderModeNames, string, tonemapNames, typedArrayIndexFormats, typedArrayIndexFormatsByteSize, typedArrayToType, typedArrayTypes, typedArrayTypesByteSize, uniformTypeToName, uniformTypeToNameMapWGSL, uniformTypeToNameWGSL, uniformTypeToStorage, version, vertexTypesNames };
export as namespace pc;
export as namespace pcx;
